diff --git a/.drone.yml b/.drone.yml index 2358391285dcb..0593379f84d57 100644 --- a/.drone.yml +++ b/.drone.yml @@ -18,14 +18,14 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - ./bin/build verify-drone @@ -69,21 +69,13 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner -- commands: - - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd - depends_on: [] - environment: - CGO_ENABLED: 0 - image: golang:1.22.4-alpine - name: compile-build-cmd - commands: - go install github.com/bazelbuild/buildtools/buildifier@latest - buildifier --lint=warn -mode=check -r . - depends_on: - - compile-build-cmd - image: golang:1.22.4-alpine + depends_on: [] + image: golang:1.22.7-alpine name: lint-starlark trigger: event: @@ -120,7 +112,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -222,7 +214,7 @@ steps: name: clone-enterprise - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -311,7 +303,7 @@ steps: name: clone-enterprise - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -321,7 +313,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -330,14 +322,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - apk add --update build-base shared-mime-info shared-mime-info-lang @@ -345,7 +337,7 @@ steps: -timeout=5m depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: test-backend - commands: - apk add --update build-base @@ -354,7 +346,7 @@ steps: | grep -o '\(.*\)/' | sort -u) depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: test-backend-integration trigger: event: @@ -399,14 +391,14 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - apk add --update curl jq bash @@ -433,16 +425,16 @@ steps: - apk add --update make - make gen-go depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - go run scripts/modowners/modowners.go check go.mod - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: validate-modfile - commands: - apk add --update make - make swagger-validate - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: validate-openapi-spec trigger: event: @@ -488,11 +480,11 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl @@ -501,7 +493,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -511,7 +503,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -520,14 +512,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - yarn install --immutable || yarn install --immutable @@ -560,7 +552,7 @@ steps: from_secret: drone_token - commands: - /src/grafana-build artifacts -a targz:grafana:linux/amd64 -a targz:grafana:linux/arm64 - -a targz:grafana:linux/arm/v7 --go-version=1.22.4 --yarn-cache=$$YARN_CACHE_FOLDER + -a targz:grafana:linux/arm/v7 --go-version=1.22.7 --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER --grafana-dir=$$PWD > packages.txt depends_on: - compile-build-cmd @@ -586,7 +578,7 @@ steps: GF_APP_MODE: development GF_SERVER_HTTP_PORT: "3001" GF_SERVER_ROUTER_LOGGING: "1" - image: alpine:3.19.1 + image: alpine:3.20.3 name: grafana-server - commands: - ./bin/build e2e-tests --port 3001 --suite dashboards-suite @@ -765,7 +757,7 @@ steps: - /src/grafana-build artifacts -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu -a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7 -a docker:grafana:linux/arm/v7:ubuntu --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER - --go-version=1.22.4 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.19.1 --tag-format='{{ + --go-version=1.22.7 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.20.3 --tag-format='{{ .version_base }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt - find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i @@ -935,7 +927,7 @@ steps: name: clone-enterprise - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl @@ -944,11 +936,11 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -958,7 +950,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -967,14 +959,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - dockerize -wait tcp://postgres:5432 -timeout 120s @@ -995,7 +987,7 @@ steps: GRAFANA_TEST_DB: postgres PGPASSWORD: grafanatest POSTGRES_HOST: postgres - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: postgres-integration-tests - commands: - dockerize -wait tcp://mysql57:3306 -timeout 120s @@ -1016,7 +1008,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql57 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-5.7-integration-tests - commands: - dockerize -wait tcp://mysql80:3306 -timeout 120s @@ -1037,7 +1029,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql80 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-8.0-integration-tests - commands: - dockerize -wait tcp://redis:6379 -timeout 120s @@ -1053,7 +1045,7 @@ steps: - wait-for-redis environment: REDIS_URL: redis://redis:6379/0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: redis-integration-tests - commands: - dockerize -wait tcp://memcached:11211 -timeout 120s @@ -1069,7 +1061,7 @@ steps: - wait-for-memcached environment: MEMCACHED_HOSTS: memcached:11211 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: memcached-integration-tests - commands: - dockerize -wait tcp://mimir_backend:8080 -timeout 120s @@ -1085,7 +1077,7 @@ steps: environment: AM_TENANT_ID: test AM_URL: http://mimir_backend:8080 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: remote-alertmanager-integration-tests trigger: event: @@ -1136,7 +1128,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1173,7 +1165,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue trigger: event: @@ -1209,13 +1201,6 @@ platform: os: linux services: [] steps: -- commands: - - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd - depends_on: [] - environment: - CGO_ENABLED: 0 - image: golang:1.22.4-alpine - name: compile-build-cmd - commands: - apt-get update -yq && apt-get install shellcheck - shellcheck -e SC1071 -e SC2162 scripts/**/*.sh @@ -1286,7 +1271,7 @@ steps: environment: GITHUB_TOKEN: from_secret: github_token - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: swagger-gen trigger: event: @@ -1382,7 +1367,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -1393,7 +1378,7 @@ steps: - CODEGEN_VERIFY=1 make gen-cue depends_on: - clone-enterprise - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -1403,14 +1388,14 @@ steps: - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: - clone-enterprise - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - apk add --update build-base @@ -1418,7 +1403,7 @@ steps: - go test -v -run=^$ -benchmem -timeout=1h -count=8 -bench=. ${GO_PACKAGES} depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: sqlite-benchmark-integration-tests - commands: - apk add --update build-base @@ -1430,7 +1415,7 @@ steps: GRAFANA_TEST_DB: postgres PGPASSWORD: grafanatest POSTGRES_HOST: postgres - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: postgres-benchmark-integration-tests - commands: - apk add --update build-base @@ -1441,7 +1426,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql57 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-5.7-benchmark-integration-tests - commands: - apk add --update build-base @@ -1452,7 +1437,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql80 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-8.0-benchmark-integration-tests trigger: event: @@ -1493,7 +1478,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1530,7 +1515,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue trigger: branch: main @@ -1569,7 +1554,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1627,7 +1612,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -1693,7 +1678,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -1703,7 +1688,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -1712,14 +1697,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - apk add --update build-base shared-mime-info shared-mime-info-lang @@ -1727,7 +1712,7 @@ steps: -timeout=5m depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: test-backend - commands: - apk add --update build-base @@ -1736,7 +1721,7 @@ steps: | grep -o '\(.*\)/' | sort -u) depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: test-backend-integration trigger: branch: main @@ -1774,29 +1759,29 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - apk add --update make - make gen-go depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - go run scripts/modowners/modowners.go check go.mod - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: validate-modfile - commands: - apk add --update make - make swagger-validate - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: validate-openapi-spec - commands: - ./bin/build verify-drone @@ -1840,11 +1825,11 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl @@ -1853,7 +1838,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -1863,7 +1848,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -1872,14 +1857,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - yarn install --immutable || yarn install --immutable @@ -1912,7 +1897,7 @@ steps: name: build-frontend-packages - commands: - /src/grafana-build artifacts -a targz:grafana:linux/amd64 -a targz:grafana:linux/arm64 - -a targz:grafana:linux/arm/v7 --go-version=1.22.4 --yarn-cache=$$YARN_CACHE_FOLDER + -a targz:grafana:linux/arm/v7 --go-version=1.22.7 --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER --grafana-dir=$$PWD > packages.txt depends_on: - update-package-json-version @@ -1937,7 +1922,7 @@ steps: GF_APP_MODE: development GF_SERVER_HTTP_PORT: "3001" GF_SERVER_ROUTER_LOGGING: "1" - image: alpine:3.19.1 + image: alpine:3.20.3 name: grafana-server - commands: - ./bin/build e2e-tests --port 3001 --suite dashboards-suite @@ -2152,7 +2137,7 @@ steps: - /src/grafana-build artifacts -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu -a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7 -a docker:grafana:linux/arm/v7:ubuntu --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER - --go-version=1.22.4 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.19.1 --tag-format='{{ + --go-version=1.22.7 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.20.3 --tag-format='{{ .version_base }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt - find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i @@ -2349,7 +2334,7 @@ services: steps: - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl @@ -2358,11 +2343,11 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -2372,7 +2357,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -2381,14 +2366,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - dockerize -wait tcp://postgres:5432 -timeout 120s @@ -2409,7 +2394,7 @@ steps: GRAFANA_TEST_DB: postgres PGPASSWORD: grafanatest POSTGRES_HOST: postgres - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: postgres-integration-tests - commands: - dockerize -wait tcp://mysql57:3306 -timeout 120s @@ -2430,7 +2415,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql57 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-5.7-integration-tests - commands: - dockerize -wait tcp://mysql80:3306 -timeout 120s @@ -2451,7 +2436,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql80 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-8.0-integration-tests - commands: - dockerize -wait tcp://redis:6379 -timeout 120s @@ -2467,7 +2452,7 @@ steps: - wait-for-redis environment: REDIS_URL: redis://redis:6379/0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: redis-integration-tests - commands: - dockerize -wait tcp://memcached:11211 -timeout 120s @@ -2483,7 +2468,7 @@ steps: - wait-for-memcached environment: MEMCACHED_HOSTS: memcached:11211 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: memcached-integration-tests - commands: - dockerize -wait tcp://mimir_backend:8080 -timeout 120s @@ -2499,7 +2484,7 @@ steps: environment: AM_TENANT_ID: test AM_URL: http://mimir_backend:8080 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: remote-alertmanager-integration-tests trigger: branch: main @@ -2553,7 +2538,7 @@ steps: name: identify-runner - commands: - $$ProgressPreference = "SilentlyContinue" - - Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/windows/grabpl.exe + - Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/windows/grabpl.exe -OutFile grabpl.exe image: grafana/ci-wix:0.1.1 name: windows-init @@ -2679,11 +2664,11 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl @@ -2692,7 +2677,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - ./bin/build artifacts docker fetch --edition oss @@ -2717,31 +2702,32 @@ steps: - |2- bash -c ' + IMAGE_TAG=$(echo "$${TAG}" | sed -e "s/+/-/g") debug= if [[ -n $${DRY_RUN} ]]; then debug=echo; fi docker login -u $${DOCKER_USER} -p $${DOCKER_PASSWORD} # Push the grafana-image-tags images - $$debug docker push grafana/grafana-image-tags:$${TAG}-amd64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-arm64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-armv7 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 # Create the grafana manifests - $$debug docker manifest create grafana/grafana:${TAG} grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7 + $$debug docker manifest create grafana/grafana:$${IMAGE_TAG} grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 - $$debug docker manifest create grafana/grafana:${TAG}-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker manifest create grafana/grafana:$${IMAGE_TAG}-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 # Push the grafana manifests - $$debug docker manifest push grafana/grafana:$${TAG} - $$debug docker manifest push grafana/grafana:$${TAG}-ubuntu + $$debug docker manifest push grafana/grafana:$${IMAGE_TAG} + $$debug docker manifest push grafana/grafana:$${IMAGE_TAG}-ubuntu # if LATEST is set, then also create & push latest if [[ -n $${LATEST} ]]; then - $$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7 - $$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 + $$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 $$debug docker manifest push grafana/grafana:latest $$debug docker manifest push grafana/grafana:latest-ubuntu @@ -2812,11 +2798,11 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl @@ -2825,7 +2811,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - ./bin/build artifacts docker fetch --edition oss @@ -2848,31 +2834,32 @@ steps: - |2- bash -c ' + IMAGE_TAG=$(echo "$${TAG}" | sed -e "s/+/-/g") debug= if [[ -n $${DRY_RUN} ]]; then debug=echo; fi docker login -u $${DOCKER_USER} -p $${DOCKER_PASSWORD} # Push the grafana-image-tags images - $$debug docker push grafana/grafana-image-tags:$${TAG}-amd64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-arm64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-armv7 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 # Create the grafana manifests - $$debug docker manifest create grafana/grafana:${TAG} grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7 + $$debug docker manifest create grafana/grafana:$${IMAGE_TAG} grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 - $$debug docker manifest create grafana/grafana:${TAG}-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker manifest create grafana/grafana:$${IMAGE_TAG}-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 # Push the grafana manifests - $$debug docker manifest push grafana/grafana:$${TAG} - $$debug docker manifest push grafana/grafana:$${TAG}-ubuntu + $$debug docker manifest push grafana/grafana:$${IMAGE_TAG} + $$debug docker manifest push grafana/grafana:$${IMAGE_TAG}-ubuntu # if LATEST is set, then also create & push latest if [[ -n $${LATEST} ]]; then - $$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7 - $$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 + $$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 $$debug docker manifest push grafana/grafana:latest $$debug docker manifest push grafana/grafana:latest-ubuntu @@ -2961,10 +2948,11 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - - ./bin/build artifacts packages --tag $${DRONE_TAG} --src-bucket $${PRERELEASE_BUCKET} + - ./bin/build artifacts packages --artifacts-editions=oss --tag $${DRONE_TAG} --src-bucket + $${PRERELEASE_BUCKET} depends_on: - compile-build-cmd environment: @@ -2974,19 +2962,6 @@ steps: from_secret: prerelease_bucket image: grafana/grafana-ci-deploy:1.3.3 name: publish-artifacts -- commands: - - ./bin/build artifacts static-assets --tag ${DRONE_TAG} --static-asset-editions=grafana-oss - depends_on: - - compile-build-cmd - environment: - GCP_KEY: - from_secret: gcp_grafanauploads_base64 - PRERELEASE_BUCKET: - from_secret: prerelease_bucket - STATIC_ASSET_EDITIONS: - from_secret: static_asset_editions - image: grafana/grafana-ci-deploy:1.3.3 - name: publish-static-assets - commands: - ./bin/build artifacts storybook --tag ${DRONE_TAG} depends_on: @@ -3006,7 +2981,6 @@ steps: -f latest=$${LATEST} --repo=grafana/grafana release-pr.yml depends_on: - publish-artifacts - - publish-static-assets environment: GH_CLI_URL: https://github.com/cli/cli/releases/download/v2.50.0/gh_2.50.0_linux_amd64.tar.gz GITHUB_TOKEN: @@ -3046,7 +3020,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - yarn install --immutable || yarn install --immutable @@ -3138,6 +3112,7 @@ platform: services: [] steps: - commands: + - export version=$(echo ${TAG} | sed -e "s/+security-/-/g") - 'echo "Step 1: Updating package lists..."' - apt-get update >/dev/null 2>&1 - 'echo "Step 2: Installing prerequisites..."' @@ -3151,26 +3126,26 @@ steps: - echo "deb [signed-by=/etc/apt/keyrings/grafana.gpg] https://apt.grafana.com stable main" | tee -a /etc/apt/sources.list.d/grafana.list - 'echo "Step 5: Installing Grafana..."' - - for i in $(seq 1 10); do + - for i in $(seq 1 60); do - ' if apt-get update >/dev/null 2>&1 && DEBIAN_FRONTEND=noninteractive apt-get - install -yq grafana=${TAG} >/dev/null 2>&1; then' + install -yq grafana=$version >/dev/null 2>&1; then' - ' echo "Command succeeded on attempt $i"' - ' break' - ' else' - ' echo "Attempt $i failed"' - - ' if [ $i -eq 10 ]; then' + - ' if [ $i -eq 60 ]; then' - ' echo ''All attempts failed''' - ' exit 1' - ' fi' - - ' echo "Waiting 60 seconds before next attempt..."' - - ' sleep 60' + - ' echo "Waiting 30 seconds before next attempt..."' + - ' sleep 30' - ' fi' - done - 'echo "Step 6: Verifying Grafana installation..."' - - 'if dpkg -s grafana | grep -q "Version: ${TAG}"; then' - - ' echo "Successfully verified Grafana version ${TAG}"' + - 'if dpkg -s grafana | grep -q "Version: $version"; then' + - ' echo "Successfully verified Grafana version $version"' - else - - ' echo "Failed to verify Grafana version ${TAG}"' + - ' echo "Failed to verify Grafana version $version"' - ' exit 1' - fi - echo "Verification complete." @@ -3198,37 +3173,38 @@ steps: sslcacert=/etc/pki/tls/certs/ca-bundle.crt ' > /etc/yum.repos.d/grafana.repo - 'echo "Step 5: Checking RPM repository..."' - - dnf list available grafana-${TAG} + - export version=$(echo "${TAG}" | sed -e "s/+security-/^security_/g") + - dnf list available grafana-$version - if [ $? -eq 0 ]; then - ' echo "Grafana package found in repository. Installing from repo..."' - - for i in $(seq 1 5); do - - ' if dnf install -y --nogpgcheck grafana-${TAG} >/dev/null 2>&1; then' + - for i in $(seq 1 60); do + - ' if dnf install -y --nogpgcheck grafana-$version >/dev/null 2>&1; then' - ' echo "Command succeeded on attempt $i"' - ' break' - ' else' - ' echo "Attempt $i failed"' - - ' if [ $i -eq 5 ]; then' + - ' if [ $i -eq 60 ]; then' - ' echo ''All attempts failed''' - ' exit 1' - ' fi' - - ' echo "Waiting 60 seconds before next attempt..."' - - ' sleep 60' + - ' echo "Waiting 30 seconds before next attempt..."' + - ' sleep 30' - ' fi' - done - ' echo "Verifying GPG key..."' - ' rpm --import https://rpm.grafana.com/gpg.key' - ' rpm -qa gpg-pubkey* | xargs rpm -qi | grep -i grafana' - else - - ' echo "Grafana package version ${TAG} not found in repository."' + - ' echo "Grafana package version $version not found in repository."' - ' dnf repolist' - ' dnf list available grafana*' - ' exit 1' - fi - 'echo "Step 6: Verifying Grafana installation..."' - - if rpm -q grafana | grep -q "${TAG}"; then - - ' echo "Successfully verified Grafana version ${TAG}"' + - if rpm -q grafana | grep -q "$verison"; then + - ' echo "Successfully verified Grafana version $version"' - else - - ' echo "Failed to verify Grafana version ${TAG}"' + - ' echo "Failed to verify Grafana version $version"' - ' exit 1' - fi - echo "Verification complete." @@ -3270,7 +3246,7 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - depends_on: - compile-build-cmd @@ -3315,6 +3291,7 @@ steps: from_secret: packages_service_account target_bucket: grafana-packages - commands: + - export version=$(echo ${TAG} | sed -e "s/+security-/-/g") - 'echo "Step 1: Updating package lists..."' - apt-get update >/dev/null 2>&1 - 'echo "Step 2: Installing prerequisites..."' @@ -3328,26 +3305,26 @@ steps: - echo "deb [signed-by=/etc/apt/keyrings/grafana.gpg] https://apt.grafana.com stable main" | tee -a /etc/apt/sources.list.d/grafana.list - 'echo "Step 5: Installing Grafana..."' - - for i in $(seq 1 10); do + - for i in $(seq 1 60); do - ' if apt-get update >/dev/null 2>&1 && DEBIAN_FRONTEND=noninteractive apt-get - install -yq grafana=${TAG} >/dev/null 2>&1; then' + install -yq grafana=$version >/dev/null 2>&1; then' - ' echo "Command succeeded on attempt $i"' - ' break' - ' else' - ' echo "Attempt $i failed"' - - ' if [ $i -eq 10 ]; then' + - ' if [ $i -eq 60 ]; then' - ' echo ''All attempts failed''' - ' exit 1' - ' fi' - - ' echo "Waiting 60 seconds before next attempt..."' - - ' sleep 60' + - ' echo "Waiting 30 seconds before next attempt..."' + - ' sleep 30' - ' fi' - done - 'echo "Step 6: Verifying Grafana installation..."' - - 'if dpkg -s grafana | grep -q "Version: ${TAG}"; then' - - ' echo "Successfully verified Grafana version ${TAG}"' + - 'if dpkg -s grafana | grep -q "Version: $version"; then' + - ' echo "Successfully verified Grafana version $version"' - else - - ' echo "Failed to verify Grafana version ${TAG}"' + - ' echo "Failed to verify Grafana version $version"' - ' exit 1' - fi - echo "Verification complete." @@ -3376,37 +3353,38 @@ steps: sslcacert=/etc/pki/tls/certs/ca-bundle.crt ' > /etc/yum.repos.d/grafana.repo - 'echo "Step 5: Checking RPM repository..."' - - dnf list available grafana-${TAG} + - export version=$(echo "${TAG}" | sed -e "s/+security-/^security_/g") + - dnf list available grafana-$version - if [ $? -eq 0 ]; then - ' echo "Grafana package found in repository. Installing from repo..."' - - for i in $(seq 1 5); do - - ' if dnf install -y --nogpgcheck grafana-${TAG} >/dev/null 2>&1; then' + - for i in $(seq 1 60); do + - ' if dnf install -y --nogpgcheck grafana-$version >/dev/null 2>&1; then' - ' echo "Command succeeded on attempt $i"' - ' break' - ' else' - ' echo "Attempt $i failed"' - - ' if [ $i -eq 5 ]; then' + - ' if [ $i -eq 60 ]; then' - ' echo ''All attempts failed''' - ' exit 1' - ' fi' - - ' echo "Waiting 60 seconds before next attempt..."' - - ' sleep 60' + - ' echo "Waiting 30 seconds before next attempt..."' + - ' sleep 30' - ' fi' - done - ' echo "Verifying GPG key..."' - ' rpm --import https://rpm.grafana.com/gpg.key' - ' rpm -qa gpg-pubkey* | xargs rpm -qi | grep -i grafana' - else - - ' echo "Grafana package version ${TAG} not found in repository."' + - ' echo "Grafana package version $version not found in repository."' - ' dnf repolist' - ' dnf list available grafana*' - ' exit 1' - fi - 'echo "Step 6: Verifying Grafana installation..."' - - if rpm -q grafana | grep -q "${TAG}"; then - - ' echo "Successfully verified Grafana version ${TAG}"' + - if rpm -q grafana | grep -q "$verison"; then + - ' echo "Successfully verified Grafana version $version"' - else - - ' echo "Failed to verify Grafana version ${TAG}"' + - ' echo "Failed to verify Grafana version $version"' - ' exit 1' - fi - echo "Verification complete." @@ -3471,7 +3449,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.19.1 + ALPINE_BASE: alpine:3.20.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -3488,7 +3466,7 @@ steps: from_secret: gcp_key_base64 GITHUB_TOKEN: from_secret: github_token - GO_VERSION: 1.22.4 + GO_VERSION: 1.22.7 GPG_PASSPHRASE: from_secret: packages_gpg_passphrase GPG_PRIVATE_KEY: @@ -3546,13 +3524,13 @@ steps: depends_on: [] environment: CGO_ENABLED: 0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: compile-build-cmd - commands: - ./bin/build whatsnew-checker depends_on: - compile-build-cmd - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: whats-new-checker trigger: event: @@ -3572,143 +3550,6 @@ volumes: clone: retries: 3 depends_on: [] -environment: - EDITION: oss -image_pull_secrets: -- gcr -- gar -kind: pipeline -name: release-test-frontend -node: - type: no-parallel -platform: - arch: amd64 - os: linux -services: [] -steps: -- commands: - - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 - name: identify-runner -- commands: - - yarn install --immutable || yarn install --immutable - depends_on: [] - image: node:20.9.0-alpine - name: yarn-install -- commands: - - apk add --update git bash - - yarn betterer ci - depends_on: - - yarn-install - image: node:20.9.0-alpine - name: betterer-frontend -- commands: - - yarn run ci:test-frontend - depends_on: - - yarn-install - environment: - TEST_MAX_WORKERS: 50% - image: node:20.9.0-alpine - name: test-frontend -trigger: - event: - exclude: - - promote - ref: - exclude: - - refs/tags/*-cloud* - include: - - refs/tags/v* -type: docker -volumes: -- host: - path: /var/run/docker.sock - name: docker ---- -clone: - retries: 3 -depends_on: [] -environment: - EDITION: oss -image_pull_secrets: -- gcr -- gar -kind: pipeline -name: release-test-backend -node: - type: no-parallel -platform: - arch: amd64 - os: linux -services: [] -steps: -- commands: - - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 - name: identify-runner -- commands: - - '# It is required that code generated from Thema/CUE be committed and in sync - with its inputs.' - - '# The following command will fail if running code generators produces any diff - in output.' - - apk add --update make - - CODEGEN_VERIFY=1 make gen-cue - depends_on: [] - image: golang:1.22.4-alpine - name: verify-gen-cue -- commands: - - '# It is required that generated jsonnet is committed and in sync with its inputs.' - - '# The following command will fail if running code generators produces any diff - in output.' - - apk add --update make - - CODEGEN_VERIFY=1 make gen-jsonnet - depends_on: [] - image: golang:1.22.4-alpine - name: verify-gen-jsonnet -- commands: - - apk add --update make - - make gen-go - depends_on: - - verify-gen-cue - image: golang:1.22.4-alpine - name: wire-install -- commands: - - apk add --update build-base shared-mime-info shared-mime-info-lang - - go list -f '{{.Dir}}/...' -m | xargs go test -tags requires_buildifer -short -covermode=atomic - -timeout=5m - depends_on: - - wire-install - image: golang:1.22.4-alpine - name: test-backend -- commands: - - apk add --update build-base - - go test -count=1 -covermode=atomic -timeout=5m -run '^TestIntegration' $(find - ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' - | grep -o '\(.*\)/' | sort -u) - depends_on: - - wire-install - image: golang:1.22.4-alpine - name: test-backend-integration -trigger: - event: - exclude: - - promote - ref: - exclude: - - refs/tags/*-cloud* - include: - - refs/tags/v* -type: docker -volumes: -- host: - path: /var/run/docker.sock - name: docker ---- -clone: - retries: 3 -depends_on: -- release-test-backend -- release-test-frontend image_pull_secrets: - gcr - gar @@ -3727,7 +3568,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.19.1 + ALPINE_BASE: alpine:3.20.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -3744,7 +3585,7 @@ steps: from_secret: gcp_key_base64 GITHUB_TOKEN: from_secret: github_token - GO_VERSION: 1.22.4 + GO_VERSION: 1.22.7 GPG_PASSPHRASE: from_secret: packages_gpg_passphrase GPG_PRIVATE_KEY: @@ -3798,7 +3639,7 @@ steps: name: identify-runner - commands: - $$ProgressPreference = "SilentlyContinue" - - Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/windows/grabpl.exe + - Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/windows/grabpl.exe -OutFile grabpl.exe image: grafana/ci-wix:0.1.1 name: windows-init @@ -3889,9 +3730,7 @@ volumes: --- clone: retries: 3 -depends_on: -- release-test-backend -- release-test-frontend +depends_on: [] image_pull_secrets: - gcr - gar @@ -3910,7 +3749,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.19.1 + ALPINE_BASE: alpine:3.20.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -3927,7 +3766,7 @@ steps: from_secret: gcp_key_base64 GITHUB_TOKEN: from_secret: github_token - GO_VERSION: 1.22.4 + GO_VERSION: 1.22.7 GPG_PASSPHRASE: from_secret: packages_gpg_passphrase GPG_PRIVATE_KEY: @@ -4012,7 +3851,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - yarn install --immutable || yarn install --immutable @@ -4067,7 +3906,7 @@ services: [] steps: - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -4077,7 +3916,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -4086,14 +3925,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - apk add --update build-base shared-mime-info shared-mime-info-lang @@ -4101,7 +3940,7 @@ steps: -timeout=5m depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: test-backend - commands: - apk add --update build-base @@ -4110,7 +3949,7 @@ steps: | grep -o '\(.*\)/' | sort -u) depends_on: - wire-install - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: test-backend-integration trigger: cron: @@ -4148,7 +3987,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.19.1 + ALPINE_BASE: alpine:3.20.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4165,7 +4004,7 @@ steps: from_secret: gcp_key_base64 GITHUB_TOKEN: from_secret: github_token - GO_VERSION: 1.22.4 + GO_VERSION: 1.22.7 GPG_PASSPHRASE: from_secret: packages_gpg_passphrase GPG_PRIVATE_KEY: @@ -4295,7 +4134,7 @@ steps: environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.19.1 + ALPINE_BASE: alpine:3.20.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4312,7 +4151,7 @@ steps: from_secret: gcp_key_base64 GITHUB_TOKEN: from_secret: github_token - GO_VERSION: 1.22.4 + GO_VERSION: 1.22.7 GPG_PASSPHRASE: from_secret: packages_gpg_passphrase GPG_PRIVATE_KEY: @@ -4401,11 +4240,11 @@ steps: - commands: - 'dagger run --silent /src/grafana-build artifacts -a $${ARTIFACTS} --grafana-ref=$${GRAFANA_REF} --enterprise-ref=$${ENTERPRISE_REF} --grafana-repo=$${GRAFANA_REPO} --version=$${VERSION} ' - - --go-version=1.22.4 + - --go-version=1.22.7 environment: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: from_secret: dagger_token - ALPINE_BASE: alpine:3.19.1 + ALPINE_BASE: alpine:3.20.3 CDN_DESTINATION: from_secret: rgm_cdn_destination DESTINATION: @@ -4422,7 +4261,7 @@ steps: from_secret: gcp_key_base64 GITHUB_TOKEN: from_secret: github_token - GO_VERSION: 1.22.4 + GO_VERSION: 1.22.7 GPG_PASSPHRASE: from_secret: packages_gpg_passphrase GPG_PRIVATE_KEY: @@ -4512,20 +4351,20 @@ steps: - commands: [] depends_on: - clone - image: golang:1.22.4-windowsservercore-1809 + image: golang:1.22.7-windowsservercore-1809 name: windows-init - commands: - go install github.com/google/wire/cmd/wire@v0.5.0 - wire gen -tags oss ./pkg/server depends_on: - windows-init - image: golang:1.22.4-windowsservercore-1809 + image: golang:1.22.7-windowsservercore-1809 name: wire-install - commands: - go test -tags requires_buildifer -short -covermode=atomic -timeout=5m ./pkg/... depends_on: - wire-install - image: golang:1.22.4-windowsservercore-1809 + image: golang:1.22.7-windowsservercore-1809 name: test-backend trigger: event: @@ -4602,13 +4441,13 @@ services: steps: - commands: - mkdir -p bin - - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.50/grabpl + - curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v3.0.56/grabpl - chmod +x bin/grabpl image: byrnedo/alpine-curl:0.1.8 name: grabpl - commands: - echo $DRONE_RUNNER_NAME - image: alpine:3.19.1 + image: alpine:3.20.3 name: identify-runner - commands: - '# It is required that code generated from Thema/CUE be committed and in sync @@ -4618,7 +4457,7 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-cue depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-cue - commands: - '# It is required that generated jsonnet is committed and in sync with its inputs.' @@ -4627,14 +4466,14 @@ steps: - apk add --update make - CODEGEN_VERIFY=1 make gen-jsonnet depends_on: [] - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: verify-gen-jsonnet - commands: - apk add --update make - make gen-go depends_on: - verify-gen-cue - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: wire-install - commands: - dockerize -wait tcp://postgres:5432 -timeout 120s @@ -4655,7 +4494,7 @@ steps: GRAFANA_TEST_DB: postgres PGPASSWORD: grafanatest POSTGRES_HOST: postgres - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: postgres-integration-tests - commands: - dockerize -wait tcp://mysql57:3306 -timeout 120s @@ -4676,7 +4515,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql57 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-5.7-integration-tests - commands: - dockerize -wait tcp://mysql80:3306 -timeout 120s @@ -4697,7 +4536,7 @@ steps: environment: GRAFANA_TEST_DB: mysql MYSQL_HOST: mysql80 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: mysql-8.0-integration-tests - commands: - dockerize -wait tcp://redis:6379 -timeout 120s @@ -4713,7 +4552,7 @@ steps: - wait-for-redis environment: REDIS_URL: redis://redis:6379/0 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: redis-integration-tests - commands: - dockerize -wait tcp://memcached:11211 -timeout 120s @@ -4729,7 +4568,7 @@ steps: - wait-for-memcached environment: MEMCACHED_HOSTS: memcached:11211 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: memcached-integration-tests - commands: - dockerize -wait tcp://mimir_backend:8080 -timeout 120s @@ -4745,7 +4584,7 @@ steps: environment: AM_TENANT_ID: test AM_URL: http://mimir_backend:8080 - image: golang:1.22.4-alpine + image: golang:1.22.7-alpine name: remote-alertmanager-integration-tests trigger: event: @@ -5100,12 +4939,12 @@ steps: - commands: - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM docker:27-cli - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM alpine/git:2.40.1 - - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM golang:1.22.4-alpine + - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM golang:1.22.7-alpine - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM node:20.9.0-alpine - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM node:20-bookworm - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM google/cloud-sdk:431.0.0 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM grafana/grafana-ci-deploy:1.3.3 - - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM alpine:3.19.1 + - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM alpine:3.20.3 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM ubuntu:22.04 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM byrnedo/alpine-curl:0.1.8 - trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM plugins/slack @@ -5137,12 +4976,12 @@ steps: - commands: - trivy --exit-code 1 --severity HIGH,CRITICAL docker:27-cli - trivy --exit-code 1 --severity HIGH,CRITICAL alpine/git:2.40.1 - - trivy --exit-code 1 --severity HIGH,CRITICAL golang:1.22.4-alpine + - trivy --exit-code 1 --severity HIGH,CRITICAL golang:1.22.7-alpine - trivy --exit-code 1 --severity HIGH,CRITICAL node:20.9.0-alpine - trivy --exit-code 1 --severity HIGH,CRITICAL node:20-bookworm - trivy --exit-code 1 --severity HIGH,CRITICAL google/cloud-sdk:431.0.0 - trivy --exit-code 1 --severity HIGH,CRITICAL grafana/grafana-ci-deploy:1.3.3 - - trivy --exit-code 1 --severity HIGH,CRITICAL alpine:3.19.1 + - trivy --exit-code 1 --severity HIGH,CRITICAL alpine:3.20.3 - trivy --exit-code 1 --severity HIGH,CRITICAL ubuntu:22.04 - trivy --exit-code 1 --severity HIGH,CRITICAL byrnedo/alpine-curl:0.1.8 - trivy --exit-code 1 --severity HIGH,CRITICAL plugins/slack @@ -5257,7 +5096,7 @@ name: gar --- get: name: pat - path: infra/data/ci/github/grafanabot + path: ci/data/repo/grafana/grafana/grafanabot kind: secret name: github_token --- @@ -5424,6 +5263,6 @@ kind: secret name: gcr_credentials --- kind: signature -hmac: faa6a717a8a8140633e39cefbd43432d1121392ac118cefb3985395857868889 +hmac: ec762bbe26644048a3c3b06db1da7a391b8ab9cd3d88607a86d0795af6d6158b ... \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3758fd58fb92e..b137ac052cb9f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -43,12 +43,12 @@ /docs/sources/dashboards/ @imatwawana /docs/sources/datasources/ @jdbaldry /docs/sources/explore/ @grafana/explore-squad @lwandz13 -/docs/sources/fundamentals @chri2547 -/docs/sources/getting-started/ @chri2547 -/docs/sources/introduction/ @chri2547 +/docs/sources/fundamentals @irenerl24 +/docs/sources/getting-started/ @irenerl24 +/docs/sources/introduction/ @irenerl24 /docs/sources/panels-visualizations/ @imatwawana /docs/sources/release-notes/ @Eve832 @GrafanaWriter -/docs/sources/setup-grafana/ @chri2547 +/docs/sources/setup-grafana/ @irenerl24 /docs/sources/upgrade-guide/ @imatwawana /docs/sources/whatsnew/ @imatwawana diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 48246cc7f425a..bf378c4ad4558 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -66,8 +66,15 @@ jobs: sparse-checkout: | .github/workflows CHANGELOG.md + .nvmrc + .prettierignore + .prettierrc.js fetch-depth: 0 fetch-tags: true + - name: Setup nodejs environment + uses: actions/setup-node@v4 + with: + node-version-file: .nvmrc - name: "Configure git user" run: | git config --local user.name "github-actions[bot]" @@ -111,9 +118,11 @@ jobs: fi git diff CHANGELOG.md - git add CHANGELOG.md + + - name: "Prettify CHANGELOG.md" + run: npx prettier --write CHANGELOG.md - name: "Commit changelog changes" - run: git commit --allow-empty -m "Update changelog" CHANGELOG.md + run: git add CHANGELOG.md && git commit --allow-empty -m "Update changelog" CHANGELOG.md - name: "git push" if: ${{ inputs.dry_run }} != true run: git push diff --git a/.github/workflows/publish-technical-documentation-next.yml b/.github/workflows/publish-technical-documentation-next.yml index 56fc984700e91..6b2cd7489b385 100644 --- a/.github/workflows/publish-technical-documentation-next.yml +++ b/.github/workflows/publish-technical-documentation-next.yml @@ -1,38 +1,21 @@ -name: "publish-technical-documentation-next" +name: publish-technical-documentation-next on: push: branches: - - "main" + - main paths: - "docs/sources/**" workflow_dispatch: jobs: sync: if: github.repository == 'grafana/grafana' - runs-on: "ubuntu-latest" + permissions: + contents: read + id-token: write + runs-on: ubuntu-latest steps: - - name: "Checkout Grafana repo" - uses: "actions/checkout@v4" - - - name: "Clone website-sync Action" - # WEBSITE_SYNC_TOKEN is a fine-grained GitHub Personal Access Token that expires. - # It must be regenerated in the grafanabot GitHub account and requires a Grafana organization - # GitHub administrator to update the organization secret. - # The IT helpdesk can update the organization secret. - run: "git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.WEBSITE_SYNC_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync" - - - name: "Publish to website repository (next)" - uses: "./.github/actions/website-sync" - id: "publish-next" + - uses: actions/checkout@v4 + - uses: grafana/writers-toolkit/publish-technical-documentation@publish-technical-documentation/v1 with: - repository: "grafana/website" - branch: "master" - host: "github.com" - # PUBLISH_TO_WEBSITE_TOKEN is a fine-grained GitHub Personal Access Token that expires. - # It must be regenerated in the grafanabot GitHub account and requires a Grafana organization - # GitHub administrator to update the organization secret. - # The IT helpdesk can update the organization secret. - github_pat: "grafanabot:${{ secrets.PUBLISH_TO_WEBSITE_TOKEN }}" - source_folder: "docs/sources" - target_folder: "content/docs/grafana/next" + website_directory: content/docs/grafana/next diff --git a/.github/workflows/publish-technical-documentation-release.yml b/.github/workflows/publish-technical-documentation-release.yml index c22bb41cb4897..13f7c93df59fa 100644 --- a/.github/workflows/publish-technical-documentation-release.yml +++ b/.github/workflows/publish-technical-documentation-release.yml @@ -1,4 +1,4 @@ -name: "publish-technical-documentation-release" +name: publish-technical-documentation-release on: push: @@ -12,63 +12,18 @@ on: jobs: sync: if: github.repository == 'grafana/grafana' - runs-on: "ubuntu-latest" + permissions: + contents: read + id-token: write + runs-on: ubuntu-latest steps: - - name: "Checkout Grafana repo" - uses: "actions/checkout@v4" + - uses: actions/checkout@v4 with: fetch-depth: 0 - - - name: "Checkout Actions library" - uses: "actions/checkout@v4" + - uses: grafana/writers-toolkit/publish-technical-documentation-release@publish-technical-documentation-release/v1 with: - repository: "grafana/grafana-github-actions" - path: "./actions" - - - name: "Install Actions from library" - run: "npm install --production --prefix ./actions" - - - name: "Determine if there is a matching release tag" - id: "has-matching-release-tag" - uses: "./actions/has-matching-release-tag" - with: - ref_name: "${{ github.ref_name }}" release_tag_regexp: "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$" release_branch_regexp: "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.x$" - - - name: "Determine technical documentation version" - if: "steps.has-matching-release-tag.outputs.bool == 'true'" - uses: "./actions/docs-target" - id: "target" - with: - ref_name: "${{ github.ref_name }}" - - - name: "Clone website-sync Action" - if: "steps.has-matching-release-tag.outputs.bool == 'true'" - # WEBSITE_SYNC_TOKEN is a fine-grained GitHub Personal Access Token that expires. - # It must be regenerated in the grafanabot GitHub account and requires a Grafana organization - # GitHub administrator to update the organization secret. - # The IT helpdesk can update the organization secret. - run: "git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.WEBSITE_SYNC_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync" - - - name: "Switch to HEAD of version branch for tags" - # Tags aren't necessarily made to the HEAD of the version branch. - # The documentation to be published is always on the HEAD of the version branch. - if: "steps.has-matching-release-tag.outputs.bool == 'true' && github.ref_type == 'tag'" - run: "git switch --detach origin/${{ steps.target.outputs.target }}.x" - - - name: "Publish to website repository (release)" - if: "steps.has-matching-release-tag.outputs.bool == 'true'" - uses: "./.github/actions/website-sync" - id: "publish-release" - with: - repository: "grafana/website" - branch: "master" - host: "github.com" - # PUBLISH_TO_WEBSITE_TOKEN is a fine-grained GitHub Personal Access Token that expires. - # It must be regenerated in the grafanabot GitHub account and requires a Grafana organization - # GitHub administrator to update the organization secret. - # The IT helpdesk can update the organization secret. - github_pat: "grafanabot:${{ secrets.PUBLISH_TO_WEBSITE_TOKEN }}" - source_folder: "docs/sources" - target_folder: "content/docs/grafana/${{ steps.target.outputs.target }}" + release_branch_with_patch_regexp: "^v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)$" + website_directory: content/docs/grafana + version_suffix: "" diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml index f960bb5ca745e..476a1447201bf 100644 --- a/.github/workflows/release-pr.yml +++ b/.github/workflows/release-pr.yml @@ -69,6 +69,10 @@ jobs: fetch-depth: '0' fetch-tags: 'false' path: .grafana-main + - name: Setup nodejs environment + uses: actions/setup-node@v4 + with: + node-version-file: .nvmrc - name: Configure git user run: | git config --local user.name "github-actions[bot]" @@ -115,7 +119,9 @@ jobs: rm -f CHANGELOG.part changelog_items.md git diff CHANGELOG.md - + + - name: "Prettify CHANGELOG.md" + run: npx prettier --write CHANGELOG.md - name: Commit CHANGELOG.md changes run: git add CHANGELOG.md && git commit --allow-empty -m "Update changelog" CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md index f9823cdeeff8f..628358117bff2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,80 @@ + + +# 11.1.7+security-01 (2024-10-17) + +### Bug fixes + +- **SQL Expressions**: Fixes CVE-2024-9264 + + + + +# 11.1.7 (2024-10-01) + +### Features and enhancements + +- **Chore:** Bump Go to 1.22.7 [#93355](https://github.com/grafana/grafana/pull/93355), [@hairyhenderson](https://github.com/hairyhenderson) +- **Chore:** Bump Go to 1.22.7 (Enterprise) + +### Bug fixes + +- **Alerting:** Fix preview of silences when label name contains spaces [#93050](https://github.com/grafana/grafana/pull/93050), [@tomratcliffe](https://github.com/tomratcliffe) +- **Alerting:** Make query wrapper match up datasource UIDs if necessary [#93115](https://github.com/grafana/grafana/pull/93115), [@tomratcliffe](https://github.com/tomratcliffe) +- **AzureMonitor:** Deduplicate resource picker rows [#93704](https://github.com/grafana/grafana/pull/93704), [@aangelisc](https://github.com/aangelisc) +- **AzureMonitor:** Improve resource picker efficiency [#93439](https://github.com/grafana/grafana/pull/93439), [@aangelisc](https://github.com/aangelisc) +- **AzureMonitor:** Remove Basic Logs retention warning [#93122](https://github.com/grafana/grafana/pull/93122), [@aangelisc](https://github.com/aangelisc) +- **Correlations:** Limit access to correlations page to users who can access Explore [#93675](https://github.com/grafana/grafana/pull/93675), [@ifrost](https://github.com/ifrost) +- **Plugins:** Avoid returning 404 for `AutoEnabled` apps [#93487](https://github.com/grafana/grafana/pull/93487), [@wbrowne](https://github.com/wbrowne) + + + + +# 11.1.6+security-01 (2024-10-17) + +### Bug fixes + +- **SQL Expressions**: Fixes CVE-2024-9264 + + + + +# 11.1.6 (2024-09-26) + +### Features and enhancements + +- **Chore:** Update swagger ui (4.3.0 to 5.17.14) [#92341](https://github.com/grafana/grafana/pull/92341), [@ryantxu](https://github.com/ryantxu) + +### Bug fixes + +- **Templating:** Fix searching non-latin template variables [#92892](https://github.com/grafana/grafana/pull/92892), [@leeoniya](https://github.com/leeoniya) +- **TutorialCard:** Fix link to tutorial not opening [#92646](https://github.com/grafana/grafana/pull/92646), [@eledobleefe](https://github.com/eledobleefe) +- **Alerting:** Fixed CVE-2024-8118. + +### Plugin development fixes & changes + +- **Bugfix:** QueryField typeahead missing background color [#92316](https://github.com/grafana/grafana/pull/92316), [@mckn](https://github.com/mckn) + + + + +# 11.1.5 (2024-08-27) + +### Bug fixes + +- **Alerting:** Fix permissions for prometheus rule endpoints [#91414](https://github.com/grafana/grafana/pull/91414), [@yuri-tceretian](https://github.com/yuri-tceretian) +- **Alerting:** Fix persisting result fingerprint that is used by recovery threshold [#91290](https://github.com/grafana/grafana/pull/91290), [@yuri-tceretian](https://github.com/yuri-tceretian) +- **Auditing:** Fix a possible crash when audit logger parses responses for failed requests (Enterprise) +- **RBAC:** Fix an issue with server admins not being able to manage users in orgs that they don't belong to [#92273](https://github.com/grafana/grafana/pull/92273), [@IevaVasiljeva](https://github.com/IevaVasiljeva) +- **RBAC:** Fix an issue with server admins not being able to manage users in orgs that they dont belong to (Enterprise) +- **RBAC:** Fix seeder failures when inserting duplicated permissions (Enterprise) +- **Snapshots:** Fix panic when snapshot_remove_expired is true [#91232](https://github.com/grafana/grafana/pull/91232), [@ryantxu](https://github.com/ryantxu) +- **VizTooltip:** Fix positioning at bottom and right edges on mobile [#92137](https://github.com/grafana/grafana/pull/92137), [@leeoniya](https://github.com/leeoniya) + +### Plugin development fixes & changes + +- **Bugfix:** QueryField typeahead missing background color [#92316](https://github.com/grafana/grafana/pull/92316), [@mckn](https://github.com/mckn) + + # 11.1.4 (2024-08-14) diff --git a/Dockerfile b/Dockerfile index 35111861e33f5..506f8308cd14f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ ARG BASE_IMAGE=alpine:3.19.1 ARG JS_IMAGE=node:20-alpine ARG JS_PLATFORM=linux/amd64 -ARG GO_IMAGE=golang:1.22.4-alpine +ARG GO_IMAGE=golang:1.22.7-alpine ARG GO_SRC=go-builder ARG JS_SRC=js-builder diff --git a/Makefile b/Makefile index fdc1037664c72..762e0e56be2df 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ include .bingo/Variables.mk GO = go -GO_VERSION = 1.22.4 +GO_VERSION = 1.22.7 GO_FILES ?= ./pkg/... ./pkg/apiserver/... ./pkg/apimachinery/... ./pkg/promlib/... SH_FILES ?= $(shell find ./scripts -name *.sh) GO_RACE := $(shell [ -n "$(GO_RACE)" -o -e ".go-race-enabled-locally" ] && echo 1 ) diff --git a/apps/.gitkeep b/apps/.gitkeep new file mode 100644 index 0000000000000..d5e8e8f26a041 --- /dev/null +++ b/apps/.gitkeep @@ -0,0 +1 @@ +# workaround for grafana-build quirk diff --git a/conf/defaults.ini b/conf/defaults.ini index 9a5d332dba0f3..ebaa160ecaf00 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -1689,6 +1689,9 @@ ha_engine_address = "127.0.0.1:6379" # ha_engine_password allows setting an optional password to authenticate with the engine ha_engine_password = "" +# ha_prefix is a prefix for keys in the HA engine. It's used to separate keys for different Grafana instances. +ha_prefix = + #################################### Grafana Image Renderer Plugin ########################## [plugin.grafana-image-renderer] # Instruct headless browser instance to use a default timezone when not provided by Grafana, e.g. when rendering panel image of alert. diff --git a/conf/sample.ini b/conf/sample.ini index fc54e3d0a716f..5f086e544cd44 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -1546,6 +1546,9 @@ max_annotations_to_keep = # ha_engine_password allows setting an optional password to authenticate with the engine ;ha_engine_password = "" +# ha_prefix is a prefix for keys in the HA engine. It's used to separate keys for different Grafana instances. +;ha_prefix = + #################################### Grafana Image Renderer Plugin ########################## [plugin.grafana-image-renderer] # Instruct headless browser instance to use a default timezone when not provided by Grafana, e.g. when rendering panel image of alert. diff --git a/docs/make-docs b/docs/make-docs index 170e361431ac7..2dc6726d9198c 100755 --- a/docs/make-docs +++ b/docs/make-docs @@ -6,6 +6,15 @@ # [Semantic versioning](https://semver.org/) is used to help the reader identify the significance of changes. # Changes are relevant to this script and the support docs.mk GNU Make interface. # +# ## 8.1.0 (2024-08-22) +# +# ### Added +# +# - Additional website mounts for projects that use the website repository. +# +# Mounts are required for `make docs` to work in the website repository or with the website project. +# The Makefile is also mounted for convenient development of the procedure that repository. +# # ## 8.0.1 (2024-07-01) # # ### Fixed @@ -727,6 +736,9 @@ POSIX_HERESTRING _repo="$(repo_path website)" volumes="--volume=${_repo}/config:/hugo/config:z" + volumes="${volumes} --volume=${_repo}/content/guides:/hugo/content/guides:z" + volumes="${volumes} --volume=${_repo}/content/whats-new:/hugo/content/whats-new:z" + volumes="${volumes} --volume=${_repo}/Makefile:/hugo/Makefile:z" volumes="${volumes} --volume=${_repo}/layouts:/hugo/layouts:z" volumes="${volumes} --volume=${_repo}/scripts:/hugo/scripts:z" fi diff --git a/docs/sources/alerting/alerting-rules/create-grafana-managed-rule.md b/docs/sources/alerting/alerting-rules/create-grafana-managed-rule.md index ff5156f3df9cb..b125829db887a 100644 --- a/docs/sources/alerting/alerting-rules/create-grafana-managed-rule.md +++ b/docs/sources/alerting/alerting-rules/create-grafana-managed-rule.md @@ -253,7 +253,7 @@ You can configure the alert instance state when its evaluation returns no data: | No Data configuration | Description | | --------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | No Data | The default option. Sets alert instance state to `No data`.
The alert rule also creates a new alert instance `DatasourceNoData` with the name and UID of the alert rule, and UID of the datasource that returned no data as labels. | -| Alerting | Sets alert instance state to `Alerting`. It transitions from `Pending` to `Alerting` after the [pending period](ref:pending-period) has finished. | +| Alerting | Sets the alert instance state to `Pending` and then transitions to `Alerting` once the [pending period](ref:pending-period) ends. If you sent the pending period to 0, the alert instance state is immediately set to `Alerting`. | | Normal | Sets alert instance state to `Normal`. | | Keep Last State | Maintains the alert instance in its last state. Useful for mitigating temporary issues, refer to [Keep last state](ref:keep-last-state). | diff --git a/docs/sources/alerting/alerting-rules/create-mimir-loki-managed-rule.md b/docs/sources/alerting/alerting-rules/create-mimir-loki-managed-rule.md index c21dc2e9fa737..cb1677d5eb84e 100644 --- a/docs/sources/alerting/alerting-rules/create-mimir-loki-managed-rule.md +++ b/docs/sources/alerting/alerting-rules/create-mimir-loki-managed-rule.md @@ -43,33 +43,35 @@ refs: # Configure data source-managed alert rules -Create alert rules for an external Grafana Mimir or Loki instance that has ruler API enabled; these are called data source-managed alert rules. +Create data source-managed alert rules for Grafana Mimir or Grafana Loki data sources, which have been configured to support rule creation. + +To configure your Grafana Mimir or Loki data source for alert rule creation, enable either the Loki Ruler API or the Mimir Ruler API. + +For more information, refer to [Loki Ruler API](/docs/loki//api/#ruler) or [Mimir Ruler API](/docs/mimir//references/http-api/#ruler). **Note**: -Alert rules for an external Grafana Mimir or Loki instance can be edited or deleted by users with Editor or Admin roles. +Alert rules for a Grafana Mimir or Loki instance can be edited or deleted by users with Editor or Admin roles. If you delete an alerting resource created in the UI, you can no longer retrieve it. To make a backup of your configuration and to be able to restore deleted alerting resources, create your alerting resources using file provisioning, Terraform, or the Alerting API. ## Before you begin -- Verify that you have write permission to the Prometheus or Loki data source. Otherwise, you will not be able to create or update Grafana Mimir managed alert rules. +- Verify that you have write permission to the Mimir or Loki data source. Otherwise, you cannot create or update Grafana Mimir or Loki-managed alert rules. -- For Grafana Mimir and Loki data sources, enable the Ruler API by configuring their respective services. +- Enable the Mimir or Loki Ruler API. - **Loki** - The `local` rule storage type, default for the Loki data source, supports only viewing of rules. To edit rules, configure one of the other rule storage types. - **Grafana Mimir** - use the `/prometheus` prefix. The Prometheus data source supports both Grafana Mimir and Prometheus, and Grafana expects that both the [Query API](/docs/mimir/latest/operators-guide/reference-http-api/#querier--query-frontend) and [Ruler API](/docs/mimir/latest/operators-guide/reference-http-api/#ruler) are under the same URL. You cannot provide a separate URL for the Ruler API. -Watch this video to learn more about how to create a Mimir managed alert rule: {{< vimeo 720001865 >}} +Watch this video to learn more about how to create a Mimir-managed alert rule: {{< vimeo 720001865 >}} {{% admonition type="note" %}} -If you do not want to manage alert rules for a particular Loki or Prometheus data source, go to its settings and clear the **Manage alerts via Alerting UI** checkbox. +If you do not want to manage alert rules for a particular Loki or Mimir data source, go to its settings and clear the **Manage alerts via Alerting UI** checkbox. {{% /admonition %}} -In the following sections, we’ll guide you through the process of creating your data source-managed alert rules. - To create a data source-managed alert rule, use the in-product alert creation flow and follow these steps to help you. ## Set alert rule name diff --git a/docs/sources/alerting/configure-notifications/manage-contact-points/integrations/configure-email.md b/docs/sources/alerting/configure-notifications/manage-contact-points/integrations/configure-email.md index 8295288797b94..81b6ccac4d8e2 100644 --- a/docs/sources/alerting/configure-notifications/manage-contact-points/integrations/configure-email.md +++ b/docs/sources/alerting/configure-notifications/manage-contact-points/integrations/configure-email.md @@ -75,6 +75,9 @@ To set up email integration, complete the following steps. 1. Enter a contact point name. 1. From the Integration list, select **Email**. 1. Enter the email addresses you want to send notifications to. + + E-mail addresses are case sensitive. Ensure that the e-mail address entered is correct. + 1. Click **Test** to check that your integration works. 1. Click **Save contact point**. diff --git a/docs/sources/alerting/configure-notifications/template-notifications/create-notification-templates.md b/docs/sources/alerting/configure-notifications/template-notifications/create-notification-templates.md index 42c39fcda40fe..0e6d8a78eb30a 100644 --- a/docs/sources/alerting/configure-notifications/template-notifications/create-notification-templates.md +++ b/docs/sources/alerting/configure-notifications/template-notifications/create-notification-templates.md @@ -53,7 +53,7 @@ To create a notification template that contains more than one template: ## Preview notification templates -Preview how your notification templates will look before using them in your contact points, helping you understand the result of the template you are creating as well as enabling you to fix any errors before saving it. +Preview how your notification templates should look before using them in your contact points, helping you understand the result of the template you are creating as well as enabling you to fix any errors before saving it. **Note:** This feature is only for Grafana Alertmanager. @@ -81,7 +81,7 @@ To preview your notification templates: c. Click **Add alert data**. - d. Click **Refresh preview** to see what your template content will look like and the corresponding payload data. + d. Click **Refresh preview** to see what your template content should look like and the corresponding payload data. If there are any errors in your template, they are displayed in the Preview and you can correct them before saving. @@ -162,6 +162,86 @@ Resolved alerts: {{ template "email.message" . }} ``` +## Group multiple alert instances into one email notification + +To make alerts more concise, you can group multiple instances of a firing alert into a single email notification in a table format. This way, you avoid long, repetitive emails and make alerts easier to digest. + +Follow these steps to create a custom notification template that consolidates alert instances into a table. + +1. Modify the alert rule to include an annotation that is referenced in the notification template later on. +1. Enter a name for the **custom annotation**: In this example, _ServerInfo_. +1. Enter the following code as the value for the annotation. It retrieves the server's instance name and a corresponding metric value, formatted as a table row: + + ``` + {{ index $labels "instance" }}{{- "\t" -}}{{ index $values "A"}}{{- "\n" -}} + ``` + + This line of code returns the labels and their values in the form of a table. Assuming $labels has `{"instance": "node1"}` and $values has `{"A": "123"}`, the output would be: + + ``` + node1 123 + ``` + +1. Create a notification template that references the _ServerInfo_ annotation. + + ```go + {{ define "Table" }} + {{- "\nHost\t\tValue\n" -}} + {{ range .Alerts -}} + {{ range .Annotations.SortedPairs -}} + {{ if (eq .Name "ServerInfo") -}} + {{ .Value -}} + {{- end }} + {{- end }} + {{- end }} + {{ end }} + ``` + + The notification template outputs a list of server information from the "ServerInfo" annotation for each alert instance. + +1. Navigate to your contact point in Grafana +1. In the **Message** field, reference the template by name (see **Optional Email settings** section): + + ``` + {{ template "Table" . }} + ``` + + This generates a neatly formatted table in the email, grouping information for all affected servers into a single notification. + +## Conditional notification template + +Template alert notifications based on a label. In this example the label represents a namespace. + +1. Use the following code in your notification template to display different messages based on the namespace: + + ```go + {{ define "my_conditional_notification" }} + {{ if eq .CommonLabels.namespace "namespace-a" }} + Alert: CPU limits have reached 80% in namespace-a. + {{ else if eq .CommonLabels.namespace "namespace-b" }} + Alert: CPU limits have reached 80% in namespace-b. + {{ else if eq .CommonLabels.namespace "namespace-c" }} + Alert: CPU limits have reached 80% in namespace-c. + {{ else }} + Alert: CPU limits have reached 80% for {{ .CommonLabels.namespace }} namespace. + {{ end }} + {{ end }} + ``` + + `.CommonLabels` is a map containing the labels that are common to all the alerts firing. + + Make sure to replace the `.namespace` label with a label that exists in your alert rule. + +1. Replace `namespace-a`, `namespace-b`, and `namespace-c` with your specific namespace values. +1. Navigate to your contact point in Grafana +1. In the **Message** field, reference the template by name (see **Optional settings** section): + + ``` + {{ template "my_conditional_notification" . }} + ``` + + This template alters the content of alert notifications depending on the namespace value. + ## Template the title of a Slack message Template the title of a Slack message to contain the number of firing and resolved alerts: diff --git a/docs/sources/alerting/fundamentals/_index.md b/docs/sources/alerting/fundamentals/_index.md index 3ef02d5c1b5a1..d8ec2362cbdeb 100644 --- a/docs/sources/alerting/fundamentals/_index.md +++ b/docs/sources/alerting/fundamentals/_index.md @@ -57,7 +57,11 @@ refs: # Introduction to Alerting -Whether you’re just starting out or you're a more experienced user of Grafana Alerting, learn more about the fundamentals and available features that help you create, manage, and respond to alerts; and improve your team’s ability to resolve issues quickly. For a hands-on introduction, refer to our [tutorial to get started with Grafana Alerting](http://grafana.com/tutorials/alerting-get-started/). +Whether you’re just starting out or you're a more experienced user of Grafana Alerting, learn more about the fundamentals and available features that help you create, manage, and respond to alerts; and improve your team’s ability to resolve issues quickly. + +{{< admonition type="tip" >}} +For a hands-on introduction, refer to our [tutorial to get started with Grafana Alerting](http://grafana.com/tutorials/alerting-get-started/). +{{< /admonition >}} The following diagram gives you an overview of Grafana Alerting and introduces you to some of the fundamental features that are the principles of how Grafana Alerting works. diff --git a/docs/sources/alerting/fundamentals/alert-rule-evaluation/state-and-health.md b/docs/sources/alerting/fundamentals/alert-rule-evaluation/state-and-health.md index 6b516e80a92ff..4ea464f6eed89 100644 --- a/docs/sources/alerting/fundamentals/alert-rule-evaluation/state-and-health.md +++ b/docs/sources/alerting/fundamentals/alert-rule-evaluation/state-and-health.md @@ -72,6 +72,15 @@ In [Configure no data and error handling](ref:no-data-and-error-handling), you c {{< figure src="/media/docs/alerting/alert-rule-configure-no-data-and-error.png" alt="A screenshot of the `Configure no data and error handling` option in Grafana Alerting." max-width="500px" >}} +To reduce the number of **No Data** or **Error** state alerts received, try the following. + +1. Use the **Keep last state** option. For more information, refer to the section below. This option allows the alert to retain its last known state when there is no data available, rather than switching to a **No Data** state. +1. For **No Data** alerts, you can optimize your alert rule by expanding the time range of the query. However, if the time range is too big, it affects the performance of the query and can lead to errors due to timeout. + + To minimize timeouts resulting in the **Error** state, reduce the time range to request less data every evaluation cycle. + +1. Change the default [evaluation time out](https://grafana.com/docs/grafana/latest/setup-grafana/configure-grafana/#evaluation_timeout). The default is set at 30 seconds. To increase the default evaluation timeout, open a support ticket from the [Cloud Portal](https://grafana.com/docs/grafana-cloud/account-management/support/#grafana-cloud-support-options). Note that this should be a last resort, because it may affect the performance of all alert rules and cause missed evaluations if the timeout is too long. + #### Keep last state The "Keep Last State" option helps mitigate temporary data source issues, preventing alerts from unintentionally firing, resolving, and re-firing. diff --git a/docs/sources/alerting/fundamentals/alert-rules/_index.md b/docs/sources/alerting/fundamentals/alert-rules/_index.md index 5cdc3b5442aac..204eab737dea5 100644 --- a/docs/sources/alerting/fundamentals/alert-rules/_index.md +++ b/docs/sources/alerting/fundamentals/alert-rules/_index.md @@ -69,7 +69,7 @@ Grafana supports two different alert rule types: Grafana-managed alert rules and ## Grafana-managed alert rules -Grafana-managed alert rules are the most flexible alert rule type. They allow you to create alerts that can act on data from any of the [supported data sources](#supported-data-sources), and use multiple data sources in a single alert rule. +Grafana-managed alert rules are the most flexible alert rule type. They allow you to create alert rules that can act on data from any of the [supported data sources](#supported-data-sources), and use multiple data sources in a single alert rule. You can also add expressions to transform your data and set alert conditions. Using images in alert notifications is also supported. Additionally, you can also add [expressions to transform your data](ref:expression-queries), set custom alert conditions, and include [images in alert notifications](ref:notification-images). @@ -87,9 +87,11 @@ Find the public data sources supporting Alerting in the [Grafana Plugins directo ## Data source-managed alert rules -Data source-managed alert rules can improve query performance via [recording rules](#recording-rules) and ensure high-availability and fault tolerance when implementing a distributed architecture. +Data source-managed alert rules can be used for Grafana Mimir or Grafana Loki data sources which have been configured to support rule creation. -They are only supported for Prometheus-based or Loki data sources with the Ruler API enabled. For more information, refer to the [Loki Ruler API](/docs/loki//api/#ruler) or [Mimir Ruler API](/docs/mimir//references/http-api/#ruler). +They can improve query performance via [recording rules](#recording-rules) and ensure high-availability and fault tolerance when implementing a distributed architecture. + +They are only supported for Grafana Mimir or Grafana Loki data sources with the Ruler API enabled. For more information, refer to the [Loki Ruler API](/docs/loki//api/#ruler) or [Mimir Ruler API](/docs/mimir//references/http-api/#ruler). {{< figure src="/media/docs/alerting/mimir-managed-alerting-architecture-v2.png" max-width="750px" caption="Mimir-managed alerting architecture" >}} diff --git a/docs/sources/alerting/set-up/configure-high-availability/_index.md b/docs/sources/alerting/set-up/configure-high-availability/_index.md index 539b4c1fc9bad..0c9e15c23b464 100644 --- a/docs/sources/alerting/set-up/configure-high-availability/_index.md +++ b/docs/sources/alerting/set-up/configure-high-availability/_index.md @@ -18,6 +18,17 @@ labels: - oss title: Configure high availability weight: 600 +refs: + state-history: + - pattern: /docs/grafana/ + destination: /docs/grafana//alerting/manage-notifications/view-state-health/ + - pattern: /docs/grafana-cloud/ + destination: /docs/grafana-cloud/alerting-and-irm/alerting/manage-notifications/view-state-health/ + meta-monitoring: + - pattern: /docs/grafana/ + destination: /docs/grafana//alerting/monitor/ + - pattern: /docs/grafana-cloud/ + destination: /docs/grafana-cloud/alerting-and-irm/alerting/monitor/ --- # Configure high availability @@ -28,18 +39,13 @@ Grafana Alerting uses the Prometheus model of separating the evaluation of alert When running multiple instances of Grafana, all alert rules are evaluated on all instances. You can think of the evaluation of alert rules as being duplicated by the number of running Grafana instances. This is how Grafana Alerting makes sure that as long as at least one Grafana instance is working, alert rules are still be evaluated and notifications for alerts are still sent. -You can find this duplication in state history and it is a good way to confirm if you are using high availability. +You can find this duplication in state history and it is a good way to [verify your high availability setup](#verify-your-high-availability-setup). -While the alert generator evaluates all alert rules on all instances, the alert receiver makes a best-effort attempt to avoid sending duplicate notifications. Alertmanager chooses availability over consistency, which may result in occasional duplicated or out-of-order notifications. It takes the opinion that duplicate or out-of-order notifications are better than no notifications. +While the alert generator evaluates all alert rules on all instances, the alert receiver makes a best-effort attempt to avoid duplicate notifications. The alertmanagers use a gossip protocol to share information between them to prevent sending duplicated notifications. -The Alertmanager uses a gossip protocol to share information about notifications between Grafana instances. It also gossips silences, which means a silence created on one Grafana instance is replicated to all other Grafana instances. Both notifications and silences are persisted to the database periodically, and during graceful shut down. +Alertmanager chooses availability over consistency, which may result in occasional duplicated or out-of-order notifications. It takes the opinion that duplicate or out-of-order notifications are better than no notifications. -{{% admonition type="note" %}} - -If using a mix of `execute_alerts=false` and `execute_alerts=true` on the HA nodes, since the alert state is not shared amongst the Grafana instances, the instances with `execute_alerts=false` do not show any alert status. -This is because the HA settings (`ha_peers`, etc) only apply to the alert notification delivery (i.e. de-duplication of alert notifications, and silences, as mentioned above). - -{{% /admonition %}} +Alertmanagers also gossip silences, which means a silence created on one Grafana instance is replicated to all other Grafana instances. Both notifications and silences are persisted to the database periodically, and during graceful shut down. ## Enable alerting high availability using Memberlist @@ -54,8 +60,11 @@ Since gossiping of notifications and silences uses both TCP and UDP port `9094`, You must have at least one (1) Grafana instance added to the `ha_peers` section. 1. Set `[ha_listen_address]` to the instance IP address using a format of `host:port` (or the [Pod's](https://kubernetes.io/docs/concepts/workloads/pods/) IP in the case of using Kubernetes). By default, it is set to listen to all interfaces (`0.0.0.0`). +1. Set `[ha_advertise_address]` to the instance's hostname or IP address in the format "host:port". Use this setting when the instance is behind NAT (Network Address Translation), such as in Docker Swarm or Kubernetes service, where external and internal addresses differ. This address helps other cluster instances communicate with it. The setting is optional. 1. Set `[ha_peer_timeout]` in the `[unified_alerting]` section of the custom.ini to specify the time to wait for an instance to send a notification via the Alertmanager. The default value is 15s, but it may increase if Grafana servers are located in different geographic regions or if the network latency between them is high. +For a demo, see this [example using Docker Compose](https://github.com/grafana/alerting-ha-docker-examples/tree/main/memberlist). + ## Enable alerting high availability using Redis As an alternative to Memberlist, you can use Redis for high availability. This is useful if you want to have a central @@ -67,20 +76,9 @@ database for HA and cannot support the meshing of all Grafana servers. 1. Optional: Set the username and password if authentication is enabled on the Redis server using `ha_redis_username` and `ha_redis_password`. 1. Optional: Set `ha_redis_prefix` to something unique if you plan to share the Redis server with multiple Grafana instances. 1. Optional: Set `ha_redis_tls_enabled` to `true` and configure the corresponding `ha_redis_tls_*` fields to secure communications between Grafana and Redis with Transport Layer Security (TLS). +1. Set `[ha_advertise_address]` to `ha_advertise_address = "${POD_IP}:9094"` This is required if the instance doesn't have an IP address that is part of RFC 6890 with a default route. -The following metrics can be used for meta monitoring, exposed by the `/metrics` endpoint in Grafana: - -| Metric | Description | -| ---------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -| alertmanager_cluster_messages_received_total | Total number of cluster messages received. | -| alertmanager_cluster_messages_received_size_total | Total size of cluster messages received. | -| alertmanager_cluster_messages_sent_total | Total number of cluster messages sent. | -| alertmanager_cluster_messages_sent_size_total | Total number of cluster messages received. | -| alertmanager_cluster_messages_publish_failures_total | Total number of messages that failed to be published. | -| alertmanager_cluster_members | Number indicating current number of members in cluster. | -| alertmanager_peer_position | Position the Alertmanager instance believes it's in. The position determines a peer's behavior in the cluster. | -| alertmanager_cluster_pings_seconds | Histogram of latencies for ping messages. | -| alertmanager_cluster_pings_failures_total | Total number of failed pings. | +For a demo, see this [example using Docker Compose](https://github.com/grafana/alerting-ha-docker-examples/tree/main/redis). ## Enable alerting high availability using Kubernetes @@ -148,3 +146,58 @@ The following metrics can be used for meta monitoring, exposed by the `/metrics` ha_advertise_address = "${POD_IP}:9094" ha_peer_timeout = 15s ``` + +## Verify your high availability setup + +When running multiple Grafana instances, all alert rules are evaluated on every instance. This multiple evaluation of alert rules is visible in the [state history](ref:state-history) and provides a straightforward way to verify that your high availability configuration is working correctly. + +{{% admonition type="note" %}} + +If using a mix of `execute_alerts=false` and `execute_alerts=true` on the HA nodes, since the alert state is not shared amongst the Grafana instances, the instances with `execute_alerts=false` do not show any alert status. + +The HA settings (`ha_peers`, etc.) apply only to communication between alertmanagers, synchronizing silences and attempting to avoid duplicate notifications, as described in the introduction. + +{{% /admonition %}} + +You can also confirm your high availability setup by monitoring Alertmanager metrics exposed by Grafana. + +| Metric | Description | +| ---------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | +| alertmanager_cluster_members | Number indicating current number of members in cluster. | +| alertmanager_cluster_messages_received_total | Total number of cluster messages received. | +| alertmanager_cluster_messages_received_size_total | Total size of cluster messages received. | +| alertmanager_cluster_messages_sent_total | Total number of cluster messages sent. | +| alertmanager_cluster_messages_sent_size_total | Total number of cluster messages received. | +| alertmanager_cluster_messages_publish_failures_total | Total number of messages that failed to be published. | +| alertmanager_cluster_pings_seconds | Histogram of latencies for ping messages. | +| alertmanager_cluster_pings_failures_total | Total number of failed pings. | +| alertmanager_peer_position | The position an Alertmanager instance believes it holds, which defines its role in the cluster. Peers should be numbered sequentially, starting from zero. | + +You can confirm the number of Grafana instances in your alerting high availability setup by querying the `alertmanager_cluster_members` and `alertmanager_peer_position` metrics. + +Note that these alerting high availability metrics are exposed via the `/metrics` endpoint in Grafana, and are not automatically collected or displayed. If you have a Prometheus instance connected to Grafana, add a `scrape_config` to scrape Grafana metrics and then query these metrics in Explore. + +```yaml +- job_name: grafana + honor_timestamps: true + scrape_interval: 15s + scrape_timeout: 10s + metrics_path: /metrics + scheme: http + follow_redirects: true + static_configs: + - targets: + - grafana:3000 +``` + +For more information on monitoring alerting metrics, refer to [Alerting meta-monitoring](ref:meta-monitoring). For a demo, see [alerting high availability examples using Docker Compose](https://github.com/grafana/alerting-ha-docker-examples/). + +## Prevent duplicate notifications + +In high-availability mode, each Grafana instance runs its own pre-configured alertmanager to handle alert notifications. + +When multiple Grafana instances are running, all alert rules are evaluated on each instance. By default, each instance sends firing alerts to its respective alertmanager. This results in notification handling being duplicated across all running Grafana instances. + +Alertmanagers in HA mode communicate with each other to coordinate notification delivery. However, this setup can sometimes lead to duplicated or out-of-order notifications. By design, HA prioritizes sending duplicate notifications over the risk of missing notifications. + +To avoid duplicate notifications, you can configure a shared alertmanager to manage notifications for all Grafana instances. For more information, refer to [add an external alertmanager](/docs/grafana//alerting/set-up/configure-alertmanager/). diff --git a/docs/sources/alerting/set-up/configure-rbac/access-roles/index.md b/docs/sources/alerting/set-up/configure-rbac/access-roles/index.md index c841f96f5a6ee..0b06882eb3d9f 100644 --- a/docs/sources/alerting/set-up/configure-rbac/access-roles/index.md +++ b/docs/sources/alerting/set-up/configure-rbac/access-roles/index.md @@ -43,19 +43,19 @@ Fixed roles provide users more granular access to create, view, and update Alert Details of the fixed roles and the access they provide for Grafana Alerting are below. -| Fixed role | Permissions | Description | -| -------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | -| `fixed:alerting.instances:writer` | All permissions from `fixed:alerting.instances:reader` and
`alert.instances:create`
`alert.instances:write` for organization scope
`alert.instances.external:write` for scope `datasources:*` | Create, update and expire all silences. | -| `fixed:alerting.instances:reader` | `alert.instances:read` for organization scope
`alert.instances.external:read` for scope `datasources:*` | Read all alerts and silences. | -| `fixed:alerting.notifications:writer` | All permissions from `fixed:alerting.notifications:reader` and
`alert.notifications:write`for organization scope
`alert.notifications.external:read` for scope `datasources:*` | Create, update, and delete contact points, templates, mute timings and notification policies for Grafana and external Alertmanager. | -| `fixed:alerting.notifications:reader` | `alert.notifications:read` for organization scope
`alert.notifications.external:read` for scope `datasources:*` | Read all Grafana and Alertmanager contact points, templates, and notification policies. | -| `fixed:alerting.rules:writer` | All permissions from `fixed:alerting.rules:reader` and
`alert.rule:create`
`alert.rule:write`
`alert.rule:delete`
`alert.silences:create`
`alert.silences:write` for scope `folders:*`
`alert.rules.external:write` for scope `datasources:*` | Create, update, and delete all alert rules and manage rule-specific silences. | -| `fixed:alerting.rules:reader` | `alert.rule:read`, `alert.silences:read` for scope `folders:*`
`alert.rules.external:read` for scope `datasources:*`
`alert.notifications.time-intervals:read`
`alert.notifications.receivers:list` | Read all alert rules and read rule-specific silences. | -| `fixed:alerting:writer` | All permissions from `fixed:alerting.rules:writer`
`fixed:alerting.instances:writer`
`fixed:alerting.notifications:writer` | Create, update, and delete all alert rules, silences, contact points, templates, mute timings, and notification policies. | -| `fixed:alerting:reader` | All permissions from `fixed:alerting.rules:reader`
`fixed:alerting.instances:reader`
`fixed:alerting.notifications:reader` | Read-only permissions for all alert rules, alerts, contact points, and notification policies. | -| `fixed:alerting.provisioning.secrets:reader` | `alert.provisioning:read` and `alert.provisioning.secrets:read` | Read-only permissions for Provisioning API and let export resources with decrypted secrets. | -| `fixed:alerting.provisioning:writer` | `alert.provisioning:read` and `alert.provisioning:write` | Create, update and delete Grafana alert rules, notification policies, contact points, templates, etc via provisioning API. | -| `fixed:alerting.provisioning.status:writer` | `alert.provisioning.provenance:write` | Set provenance status to alert rules, notification policies, contact points, etc. Should be used together with regular writer roles. | +| Display name in UI / Fixed role | Permissions | Description | +| ---------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Silences Writer: `fixed:alerting.instances:writer` | All permissions from `fixed:alerting.instances:reader` and
`alert.instances:create`
`alert.instances:write` for organization scope
`alert.instances.external:write` for scope `datasources:*` | Add and update silences in Grafana and external providers. | +| Instances and Silences Reader: `fixed:alerting.instances:reader` | `alert.instances:read` for organization scope
`alert.instances.external:read` for scope `datasources:*` | Read alert instances and silences in Grafana and external providers. | +| Notifications Writer: `fixed:alerting.notifications:writer` | All permissions from `fixed:alerting.notifications:reader` and
`alert.notifications:write`for organization scope
`alert.notifications.external:read` for scope `datasources:*` | Add, update, and delete notification policies and contact points in Grafana and external providers. | +| Notifications Reader: `fixed:alerting.notifications:reader` | `alert.notifications:read` for organization scope
`alert.notifications.external:read` for scope `datasources:*` | Read notification policies and contact points in Grafana and external providers. | +| Rules Writer: `fixed:alerting.rules:writer` | All permissions from `fixed:alerting.rules:reader` and
`alert.rule:create`
`alert.rule:write`
`alert.rule:delete`
`alert.silences:create`
`alert.silences:write` for scope `folders:*`
`alert.rules.external:write` for scope `datasources:*` | Create, update, and delete all alert rules and manage rule-specific silences. | +| Rules Reader: `fixed:alerting.rules:reader` | `alert.rule:read`, `alert.silences:read` for scope `folders:*`
`alert.rules.external:read` for scope `datasources:*`
`alert.notifications.time-intervals:read`
`alert.notifications.receivers:list` | Read all alert rules and rule-specific silences in Grafana and external providers. | +| Full access: `fixed:alerting:writer` | All permissions from `fixed:alerting.rules:writer`
`fixed:alerting.instances:writer`
`fixed:alerting.notifications:writer` | Add, update, and delete alert rules, silences, contact points, and notification policies in Grafana and external providers. | +| Full read-only access: `fixed:alerting:reader` | All permissions from `fixed:alerting.rules:reader`
`fixed:alerting.instances:reader`
`fixed:alerting.notifications:reader` | Read alert rules, alert instances, silences, contact points, and notification policies in Grafana and external providers. | +| Read via Provisioning API + Export Secrets: `fixed:alerting.provisioning.secrets:reader` | `alert.provisioning:read` and `alert.provisioning.secrets:read` | Read alert rules, alert instances, silences, contact points, and notification policies using the provisioning API and use export with decrypted secrets. | +| Access to alert rules provisioning API: `fixed:alerting.provisioning:writer` | `alert.provisioning:read` and `alert.provisioning:write` | Manage all alert rules, notification policies, contact points, templates, in the organization using the provisioning API. | +| Set provisioning status: `fixed:alerting.provisioning.status:writer` | `alert.provisioning.provenance:write` | Set provisioning rules for Alerting resources. Should be used together with other regular roles (Notifications Writer and/or Rules Writer.) | ## Create custom roles diff --git a/docs/sources/dashboards/build-dashboards/view-dashboard-json-model/index.md b/docs/sources/dashboards/build-dashboards/view-dashboard-json-model/index.md index e639d4d54a7ea..762da8a801c9a 100644 --- a/docs/sources/dashboards/build-dashboards/view-dashboard-json-model/index.md +++ b/docs/sources/dashboards/build-dashboards/view-dashboard-json-model/index.md @@ -57,7 +57,6 @@ In the following JSON, id is shown as null which is the default value assigned t "to": "now" }, "timepicker": { - "time_options": [], "refresh_intervals": [] }, "templating": { @@ -136,17 +135,6 @@ The grid has a negative gravity that moves panels up if there is empty space abo "now": true, "hidden": false, "nowDelay": "", - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ], "refresh_intervals": [ "5s", "10s", @@ -174,7 +162,6 @@ Usage of the fields is explained below: | **now** | | | **hidden** | whether timepicker is hidden or not | | **nowDelay** | override the now time by entering a time delay. Use this option to accommodate known delays in data aggregation to avoid null values. | -| **time_options** | options available in the time picker dropdown | | **refresh_intervals** | interval options available in the refresh picker dropdown | | **status** | | | **type** | | diff --git a/docs/sources/dashboards/create-reports/index.md b/docs/sources/dashboards/create-reports/index.md index 425070ce01647..ae06d3becc9ce 100644 --- a/docs/sources/dashboards/create-reports/index.md +++ b/docs/sources/dashboards/create-reports/index.md @@ -335,11 +335,11 @@ You can customize the branding options. Report branding: -- **Company logo:** Company logo displayed in the report PDF. It can be configured by specifying a URL, or by uploading a file. Defaults to the Grafana logo. +- **Company logo:** Company logo displayed in the report PDF. It can be configured by specifying a URL, or by uploading a file. The maximum file size is 16 MB. Defaults to the Grafana logo. Email branding: -- **Company logo:** Company logo displayed in the report email. It can be configured by specifying a URL, or by uploading a file. Defaults to the Grafana logo. +- **Company logo:** Company logo displayed in the report email. It can be configured by specifying a URL, or by uploading a file. The maximum file size is 16 MB. Defaults to the Grafana logo. - **Email footer:** Toggle to enable the report email footer. Select **Sent by** or **None**. - **Footer link text:** Text of the link in the report email footer. Defaults to `Grafana`. - **Footer link URL:** Link of the report email footer. diff --git a/docs/sources/datasources/loki/query-editor/index.md b/docs/sources/datasources/loki/query-editor/index.md index 3ff47a25215fd..37750aadd1044 100644 --- a/docs/sources/datasources/loki/query-editor/index.md +++ b/docs/sources/datasources/loki/query-editor/index.md @@ -171,6 +171,8 @@ The following options are the same for both **Builder** and **Code** mode: - **Line limit** -Defines the upper limit for the number of log lines returned by a query. The default is `1000` +- **Direction** - Determines the search order. **Backward** is a backward search starting at the end of the time range. **Forward** is a forward search starting at the beginning of the time range. The default is **Backward** + - **Step** Sets the step parameter of Loki metrics queries. The default value equals to the value of `$__interval` variable, which is calculated using the time range and the width of the graph (the number of pixels). - **Resolution** Deprecated. Sets the step parameter of Loki metrics range queries. With a resolution of `1/1`, each pixel corresponds to one data point. `1/2` retrieves one data point for every other pixel, `1/10` retrieves one data point per 10 pixels, and so on. Lower resolutions perform better. diff --git a/docs/sources/datasources/mysql/_index.md b/docs/sources/datasources/mysql/_index.md index 9a538f06ebb4f..8148733ef3f3e 100644 --- a/docs/sources/datasources/mysql/_index.md +++ b/docs/sources/datasources/mysql/_index.md @@ -73,7 +73,7 @@ For instructions on how to add a data source to Grafana, refer to the [administr Only users with the organization administrator role can add data sources. Administrators can also [configure the data source via YAML](#provision-the-data-source) with Grafana's provisioning system. -{{< docs/play title="MySQL: Cities of the World Sample Data Set" url="https://play.grafana.org/d/8JOvPQr7k/" >}} +{{< docs/play title="MySQL Overview" url="https://play.grafana.org/d/edyh1ib7db6rkb/mysql-overview" >}} ## Configure the data source diff --git a/docs/sources/datasources/prometheus/_index.md b/docs/sources/datasources/prometheus/_index.md index e5fa61ef1d618..433e14aedf8cc 100644 --- a/docs/sources/datasources/prometheus/_index.md +++ b/docs/sources/datasources/prometheus/_index.md @@ -155,7 +155,7 @@ We also bundle a dashboard within Grafana so you can start viewing your metrics 1. Navigate to the data source's [configuration page](ref:configure-prometheus-data-source). 1. Select the **Dashboards** tab. -This displays dashboards for Grafana and Prometheus. + This displays dashboards for Grafana and Prometheus. 1. Select **Import** for the dashboard to import. diff --git a/docs/sources/developers/http_api/_index.md b/docs/sources/developers/http_api/_index.md index 806ab1f865051..28ec50df49242 100644 --- a/docs/sources/developers/http_api/_index.md +++ b/docs/sources/developers/http_api/_index.md @@ -80,40 +80,41 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk - [Admin API]({{< relref "admin/" >}}) - [Alerting API (unstable)](https://editor.swagger.io/?url=https://raw.githubusercontent.com/grafana/grafana/main/pkg/services/ngalert/api/tooling/post.json) -- [Alerting Provisioning API]({{< relref "alerting_provisioning/" >}}) +- [Alerting provisioning API]({{< relref "alerting_provisioning/" >}}) - [Annotations API]({{< relref "annotations/" >}}) - [Correlations API]({{< relref "correlations/" >}}) - [Dashboard API]({{< relref "dashboard/" >}}) -- [Dashboard Permissions API]({{< relref "dashboard_permissions/" >}}) -- [Dashboard Versions API]({{< relref "dashboard_versions/" >}}) +- [Dashboard permissions API]({{< relref "dashboard_permissions/" >}}) +- [Dashboard versions API]({{< relref "dashboard_versions/" >}}) - [Data source API]({{< relref "data_source/" >}}) - [Folder API]({{< relref "folder/" >}}) -- [Folder Permissions API]({{< relref "folder_permissions/" >}}) -- [Folder/Dashboard Search API]({{< relref "folder_dashboard_search/" >}}) -- [Library Element API]({{< relref "library_element/" >}}) +- [Folder permissions API]({{< relref "folder_permissions/" >}}) +- [Folder/Dashboard search API]({{< relref "folder_dashboard_search/" >}}) +- [Library element API]({{< relref "library_element/" >}}) - [Organization API]({{< relref "org/" >}}) - [Other API]({{< relref "other/" >}}) - [Playlists API]({{< relref "playlist/" >}}) - [Preferences API]({{< relref "preferences/" >}}) -- [Short URL API]({{< relref "short_url/" >}}) +- [Public dashboard API]({{< relref "dashboard_public/" >}}) - [Query history API]({{< relref "query_history/" >}}) +- [Service account API]({{< relref "serviceaccount/" >}}) +- [Short URL API]({{< relref "short_url/" >}}) - [Snapshot API]({{< relref "snapshot/" >}}) +- [SSO settings API]({{< relref "sso-settings/" >}}) - [Team API]({{< relref "team/" >}}) - [User API]({{< relref "user/" >}}) ## Deprecated HTTP APIs -- [Alerting Notification Channels API]({{< relref "alerting_notification_channels/" >}}) -- [Alerting API]({{< relref "alerting/" >}}) - [Authentication API]({{< relref "auth/" >}}) ## Grafana Enterprise HTTP APIs Grafana Enterprise includes all of the Grafana OSS APIs as well as those that follow: -- [Role-based access control API]({{< relref "access_control/" >}}) - [Data source permissions API]({{< relref "datasource_permissions/" >}}) -- [Team sync API]({{< relref "team_sync/" >}}) - [License API]({{< relref "licensing/" >}}) -- [Reporting API]({{< relref "reporting/" >}}) - [Query and resource caching API]({{< relref "query_and_resource_caching/" >}}) +- [Reporting API]({{< relref "reporting/" >}}) +- [Role-based access control API]({{< relref "access_control/" >}}) +- [Team sync API]({{< relref "team_sync/" >}}) diff --git a/docs/sources/developers/http_api/dashboard_versions.md b/docs/sources/developers/http_api/dashboard_versions.md index 5e0621433590e..fc98f7fe5e836 100644 --- a/docs/sources/developers/http_api/dashboard_versions.md +++ b/docs/sources/developers/http_api/dashboard_versions.md @@ -217,31 +217,7 @@ Content-Length: 1300 "from": "now-6h", "to": "now" }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, + "timepicker": {}, "timezone": "browser", "title": "test", "version": 1 @@ -328,31 +304,7 @@ Content-Length: 1300 "from": "now-6h", "to": "now" }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, + "timepicker": {}, "timezone": "browser", "title": "test", "version": 1 diff --git a/docs/sources/developers/http_api/preferences.md b/docs/sources/developers/http_api/preferences.md index a3f5b6b1afd25..cfcc79fdab9a4 100644 --- a/docs/sources/developers/http_api/preferences.md +++ b/docs/sources/developers/http_api/preferences.md @@ -13,7 +13,7 @@ labels: products: - enterprise - oss -title: 'HTTP Preferences API ' +title: 'Preferences API' --- # User and Org Preferences API diff --git a/docs/sources/developers/http_api/snapshot.md b/docs/sources/developers/http_api/snapshot.md index 54360b768ddd9..8329d4b62b25f 100644 --- a/docs/sources/developers/http_api/snapshot.md +++ b/docs/sources/developers/http_api/snapshot.md @@ -13,7 +13,7 @@ labels: products: - enterprise - oss -title: 'HTTP Snapshot API ' +title: 'Snapshot API' --- # Snapshot API diff --git a/docs/sources/explore/correlations-editor-in-explore.md b/docs/sources/explore/correlations-editor-in-explore.md index 04ba3c709c831..500532261580b 100644 --- a/docs/sources/explore/correlations-editor-in-explore.md +++ b/docs/sources/explore/correlations-editor-in-explore.md @@ -4,7 +4,7 @@ labels: - enterprise - oss title: Correlations Editor in Explore -weight: 400 +weight: 20 --- # Correlations Editor in Explore diff --git a/docs/sources/explore/explore-inspector.md b/docs/sources/explore/explore-inspector.md index fe685328f9884..6880f292e8236 100644 --- a/docs/sources/explore/explore-inspector.md +++ b/docs/sources/explore/explore-inspector.md @@ -8,7 +8,7 @@ labels: keywords: - Explore title: Query inspector in Explore -weight: 40 +weight: 15 --- # Query inspector in Explore diff --git a/docs/sources/explore/logs-integration.md b/docs/sources/explore/logs-integration.md index 3e8372e67c36b..11caec96f2f88 100644 --- a/docs/sources/explore/logs-integration.md +++ b/docs/sources/explore/logs-integration.md @@ -9,7 +9,7 @@ labels: - enterprise - oss title: Logs in Explore -weight: 15 +weight: 25 --- # Logs in Explore diff --git a/docs/sources/explore/simplified-exploration/_index.md b/docs/sources/explore/simplified-exploration/_index.md new file mode 100644 index 0000000000000..b778b3b346350 --- /dev/null +++ b/docs/sources/explore/simplified-exploration/_index.md @@ -0,0 +1,18 @@ +--- +description: Use your telemetry data to explore and determine the root cause of issues without performing queries. +keywords: + - Simplified exploration + - queryless + - Explore apps +title: Simplified exploration +menuTitle: Simplified exploration +weight: 100 +--- + +# Simplified exploration + +Introducing the Grafana Explore apps, designed for effortless data exploration through intuitive, queryless interactions. + +Easily explore telemetry signals with these specialized tools, tailored specifically for the Grafana databases to provide quick and accurate insights. + +{{< section >}} diff --git a/docs/sources/explore/explore-metrics.md b/docs/sources/explore/simplified-exploration/metrics/index.md similarity index 83% rename from docs/sources/explore/explore-metrics.md rename to docs/sources/explore/simplified-exploration/metrics/index.md index be3fc570f6cd7..127dd6cb2651e 100644 --- a/docs/sources/explore/explore-metrics.md +++ b/docs/sources/explore/simplified-exploration/metrics/index.md @@ -6,7 +6,8 @@ labels: - oss title: Explore Metrics aliases: -canonical: https://grafana.com/docs/grafana/latest/explore/explore-metrics/ + - ../explore-metrics/ # /docs/grafana/latest/explore/explore-metrics/ +canonical: https://grafana.com/docs/grafana/latest/explore/simplified-exploration/metrics/ description: Explore Metrics lets you browse Prometheus-compatible metrics using an intuitive, queryless experience. weight: 200 --- @@ -15,18 +16,14 @@ weight: 200 Grafana Explore Metrics is a query-less experience for browsing **Prometheus-compatible** metrics. Quickly find related metrics with just a few simple clicks, without needing to write PromQL queries to retrieve metrics. -{{% admonition type="caution" %}} -Explore Metrics is currently in [public preview](/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available. -{{% /admonition %}} - With Explore Metrics, you can: -- Easily slice and dice metrics based on their labels, so you can immediately see anomalies and identify issues -- See the right visualization for your metric based on its type (gauge vs. counter, for example) without building it yourself -- Surface other metrics relevant to the current metric -- “Explore in a drawer” - expand a drawer over a dashboard with more content so you don’t lose your place -- View a history of user steps when navigating through metrics and their filters - +- Easily segment metrics based on their labels, so you can immediately spot anomalies and identify issues. +- Automatically display the optimal visualization for each metric type (gauge vs. counter, for example) without manual setup. +- Uncover related metrics relevant to the one you're viewing. +- “Explore in a drawer” - overlay additional content on your dashboard without losing your current view. +- View a history of user steps when navigating through metrics and their filters. +- Seamlessly pivot to related telemetry, including log data. {{< docs/play title="Explore Metrics" url="https://play.grafana.org/explore/metrics/trail?from=now-1h&to=now&var-ds=grafanacloud-demoinfra-prom&var-filters=&refresh=&metricPrefix=all" >}} diff --git a/docs/sources/explore/trace-integration.md b/docs/sources/explore/trace-integration.md index da14c571cbc6a..f9b0779b12d37 100644 --- a/docs/sources/explore/trace-integration.md +++ b/docs/sources/explore/trace-integration.md @@ -8,7 +8,7 @@ labels: - enterprise - oss title: Traces in Explore -weight: 20 +weight: 40 --- # Traces in Explore diff --git a/docs/sources/panels-visualizations/visualizations/alert-list/index.md b/docs/sources/panels-visualizations/visualizations/alert-list/index.md index d40d3acf325d5..e0bcecfcc9ab9 100644 --- a/docs/sources/panels-visualizations/visualizations/alert-list/index.md +++ b/docs/sources/panels-visualizations/visualizations/alert-list/index.md @@ -50,6 +50,10 @@ Once you’ve created a [dashboard](https://grafana.com/docs/grafana/}} +{{< figure src="/static/img/docs/bar-chart-panel/barchart_small_example.png" max-width="1000px" alt="Bar chart" >}} + +You can use the bar chart visualization if you need to show: + +- Population distribution by age or location +- CPU usage per application +- Sales per division +- Server cost distribution + +## Configure a bar chart + +The following video shows you how to create and configure a bar chart visualization: + +{{< youtube id="qyKE9-71KkE" >}} {{< docs/play title="Grafana Bar Charts and Pie Charts" url="https://play.grafana.org/d/ktMs4D6Mk/" >}} ## Supported data formats -Only one data frame is supported and it must have at least one string field that will be used as the category for an X or Y axis and one or more numerical fields. +To create a bar chart visualization, you need a dataset containing one string or time field (or column) and at least one numeric field, though preferably more than one to make best use of the visualization. + +The text or time field is used to label the bars or values in each row of data and the numeric fields are represented by proportionally sized bars. -Example: +### Example 1 -| Browser | Market share | -| ------- | ------------ | -| Chrome | 50 | -| IE | 17.5 | +| Group | Value1 | Value2 | Value3 | +| ----- | ------ | ------ | ------ | +| uno | 5 | 3 | 2 | -If you have more than one numerical field the visualization will show grouped bars. +![Bar chart single row example](/media/docs/grafana/panels-visualizations/screenshot-grafana-11.1-barchart-example1.png 'Bar chart single row example') -### Visualizing time series or multiple result sets +If you have more than one text or time field, by default, the visualization uses the first one, but you can change this in the x-axis option as described in the [Bar chart options](#bar-chart-options) section. -If you have multiple time series or tables you first need to join them using a join or reduce transform. For example if you -have multiple time series and you want to compare their last and max value add the **Reduce** transform and specify **Max** and **Last** as options under **Calculations**. +### Example 2 -{{< figure src="/static/img/docs/bar-chart-panel/bar-chart-time-series-v8-0.png" max-width="1025px" caption="Bar chart time series example" >}} +If your dataset contains multiple rows, the visualization displays multiple bar chart groups where each group contains multiple bars representing all the numeric values for a row. + +| Group | Value1 | Value2 | Value3 | +| ----- | ------ | ------ | ------ | +| uno | 5 | 3 | 2 | +| dos | 10 | 6 | 4 | +| tres | 20 | 8 | 2 | + +![Bar chart multiple row example](/media/docs/grafana/panels-visualizations/screenshot-grafana-11.1-barchart-example2.png 'Bar chart multiple row example') + +While the first field can be time-based and you can use a bar chart to plot time-series data, for large amounts of time-series data, we recommend that you use the [time series visualization](https://grafana.com/docs/grafana/latest/panels-visualizations/visualizations/time-series/) and configure it to be displayed as bars. + +We recommend that you only use one dataset in a bar chart because using multiple datasets can result in unexpected behavior. ## Panel options @@ -75,6 +101,10 @@ have multiple time series and you want to compare their last and max value add t Use these options to refine your visualization. +### X Axis + +Specify which field is used for the x-axis. + ### Orientation - **Auto** - Grafana decides the bar orientation based on what the panel dimensions. diff --git a/docs/sources/panels-visualizations/visualizations/bar-gauge/index.md b/docs/sources/panels-visualizations/visualizations/bar-gauge/index.md index 3e08403717374..59111ee41a7ee 100644 --- a/docs/sources/panels-visualizations/visualizations/bar-gauge/index.md +++ b/docs/sources/panels-visualizations/visualizations/bar-gauge/index.md @@ -25,14 +25,76 @@ refs: # Bar gauge -Bar gauges simplify your data by reducing every field to a single value. You choose how Grafana calculates the reduction. - -This panel can show one or more bar gauges depending on how many series, rows, or columns your query returns. +Bar gauges simplify your data by reducing every field to a single value. You choose how Grafana calculates the reduction. This visualization can show one or more bar gauges depending on how many series, rows, or columns your query returns. {{< figure src="/static/img/docs/v66/bar_gauge_cover.png" max-width="1025px" alt="Bar gauge panel" >}} +The bar gauge visualization displays values as bars with various lengths or fills proportional to the values they represent. They differ from traditional bar charts in that they act as gauges displaying metrics between ranges. One example is a thermometer displaying body temperature in a bar filling up. + +You can use a bar gauge visualization when you need to show: + +- Key performance indicators (KPIs) +- System health +- Savings goals +- Attendance +- Process completion rates + +## Configure a bar gauge visualization + +The following video shows you how to create and configure a bar gauge visualization: + +{{< youtube id="7PhDysObEXA" >}} + {{< docs/play title="Bar Gauge" url="https://play.grafana.org/d/vmie2cmWz/" >}} +## Supported data formats + +To create a bar gauge visualization, you need a dataset querying at least one numeric field. Every numeric field in the dataset is displayed as a bar gauge. Text or time fields aren't required but if they're present, they're used for labeling. + +### Example 1 + +| Label | Value1 | Value2 | Value3 | +| ----- | ------ | ------ | ------ | +| Row1 | 5 | 3 | 2 | + +![Bar gauge with single row of data](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-bargauge-example1.png) + +The minimum and maximum range for the bar gauges is automatically pulled from the largest and smallest numeric values in the dataset. You can also manually define the minimum and maximum values as indicated in the [Standard options](#standard-options) section. + +You can also define the minimum and maximum from the dataset provided. + +### Example 2 + +| Label | Value | Max | Min | +| ----- | ----- | --- | --- | +| Row1 | 3 | 6 | 1 | + +![Bar gauge with single row of data including maximum and minimum](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-bargauge-example2.png) + +If you don’t want to show gauges for the min and max values, you can configure only one field to be displayed as described in the [Value options](#value-options) section. + +![Bar gauge, single row of data with max and min displaying value](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-bargauge-example3.png) + +Even if the min and max aren’t displayed, the visualization still pulls the range from the data set. + +### Example 3 + +The bar gauge visualization also supports multiple records (rows) in the dataset. + +| Label | Value1 | Value2 | Value3 | +| ----- | ------ | ------ | ------ | +| Row1 | 5 | 3 | 2 | +| Row2 | 10 | 6 | 4 | +| Row3 | 20 | 8 | 2 | + +![Bar gauge with multiple rows of data displaying last row](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-bargauge-example4.png) + +By default, the visualization is configured to [calculate](#value-options) a single value per column or series and to display only the last set of data. However, it derives the minimum and maximum from the full dataset even if those values aren’t visible. In this example, that means only the last row of data is displayed in the gauges and the minimum and maximum values are defined as 2 and 20, pulled from the whole dataset. + +If you want to show one gauge per cell you can change the [Show](#show) setting from [Calculate](#calculate) to [All values](#all-values) and each bar is labeled by concatenating the text column with each value's column name. + +![Bar gauge with multiple rows of data displaying all the values](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-bargauge-example5.png) + ## Panel options {{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} @@ -156,6 +218,10 @@ This option only applies when bar size is set to manual. {{< docs/shared lookup="visualizations/thresholds-options-2.md" source="grafana" version="" >}} +Last, colors of the bar gauge thresholds can be configured as described above. + +![Bar gauge with colored thresholds configured](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-bargauge-example6.png) + ## Field overrides {{< docs/shared lookup="visualizations/overrides-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/canvas/index.md b/docs/sources/panels-visualizations/visualizations/canvas/index.md index 943281c9ad675..18ae79749aa8f 100644 --- a/docs/sources/panels-visualizations/visualizations/canvas/index.md +++ b/docs/sources/panels-visualizations/visualizations/canvas/index.md @@ -30,11 +30,26 @@ refs: # Canvas -Canvases combine the power of Grafana with the flexibility of custom elements. Canvases are extensible form-built visualizations that allow you to explicitly place elements within static and dynamic layouts. This empowers you to design custom visualizations and overlay data in ways that aren't possible with standard Grafana panels, all within Grafana's UI. If you've used popular UI and web design tools, then designing canvases will feel very familiar. +Canvases combine the power of Grafana with the flexibility of custom elements. +They are extensible visualizations that allow you to add and arrange elements wherever you want within unstructured static and dynamic layouts. +This lets you design custom visualizations and overlay data in ways that aren't possible with standard Grafana visualizations, all within the Grafana UI. -> We would love your feedback on Canvas. Please check out the [open Github issues](https://github.com/grafana/grafana/issues?page=1&q=is%3Aopen+is%3Aissue+label%3Aarea%2Fpanel%2Fcanvas) and [submit a new feature request](https://github.com/grafana/grafana/issues/new?assignees=&labels=type%2Ffeature-request,area%2Fpanel%2Fcanvas&title=Canvas:&projects=grafana-dataviz&template=1-feature_requests.md) as needed. +{{< video-embed src="/static/img/docs/canvas-panel/canvas-beta-overview-9-2-0.mp4" max-width="750px" alt="Canvas beta overview" >}} -{{< video-embed src="/static/img/docs/canvas-panel/canvas-beta-overview-9-2-0.mp4" max-width="750px" caption="Canvas beta overview" >}} +If you've used popular UI and web design tools, then designing canvases will feel very familiar. +With all of these dynamic elements, there's almost no limit to what a canvas can display. + +{{< admonition type="note" >}} +We'd love your feedback on the canvas visualization. Please check out the [open Github issues](https://github.com/grafana/grafana/issues?page=1&q=is%3Aopen+is%3Aissue+label%3Aarea%2Fpanel%2Fcanvas) and [submit a new feature request](https://github.com/grafana/grafana/issues/new?assignees=&labels=type%2Ffeature-request,area%2Fpanel%2Fcanvas&title=Canvas:&projects=grafana-dataviz&template=1-feature_requests.md) as needed. +{{< /admonition >}} + +## Supported data formats + +The canvas visualization is unique in that it doesn't have any specific data requirements. You can even start adding and configuring visual elements without providing any data. However, any data you plan to consume should be accessible through supported Grafana data sources and structured in a way that ensures smooth integration with your custom elements. + +If your canvas is going to update in real time, your data should support refreshes at your desired intervals without degrading the user experience. + +You can tie [Elements](#elements) and [Connections](#connections) to data through options like text, colors, and background pattern images, etc. available in the canvas visualization. ## Elements diff --git a/docs/sources/panels-visualizations/visualizations/gauge/index.md b/docs/sources/panels-visualizations/visualizations/gauge/index.md index d9528a6b42710..0c8553138d913 100644 --- a/docs/sources/panels-visualizations/visualizations/gauge/index.md +++ b/docs/sources/panels-visualizations/visualizations/gauge/index.md @@ -25,16 +25,94 @@ refs: # Gauge -Gauges are single-value visualizations that can repeat a gauge for every series, column or row. +Gauges are single-value visualizations that allow you to quickly visualize where a value falls within a defined or calculated min and max range. With repeat options, you can display multiple gauges, each corresponding to a different series, column, or row. {{< figure src="/static/img/docs/v66/gauge_panel_cover.png" max-width="1025px" alt="A gauge visualization">}} -{{< docs/play title="Grafana Gauge Visualization" url="https://play.grafana.org/d/KIhkVD6Gk/" >}} +You can use gauges if you need to track: + +- Service level objectives (SLOs) +- How full a piece of equipment is +- How fast a vehicle is moving within a set of limits +- Network latency +- Equipment state with setpoint and alarm thresholds +- CPU consumption (0-100%) +- RAM availability + +## Configure a time series visualization The following video provides beginner steps for creating gauge panels. You'll learn the data requirements and caveats, special customizations, and much more: {{< youtube id="QwXj3y_YpnE" >}} +{{< docs/play title="Grafana Gauge Visualization" url="https://play.grafana.org/d/KIhkVD6Gk/" >}} + +## Supported data formats + +To create a gauge visualization you need a dataset containing at least one numeric field. These values are identified by the field name. Additional text fields aren’t required but can be used for identification and labeling. + +### Example - One value + +| GaugeName | GaugeValue | +| --------- | ---------- | +| MyGauge | 5 | + +![Gauge with single numeric value](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example1.png 'Gauge with single numeric value') + +This dataset generates a visualization with one empty gauge showing the numeric value. This is because the gauge visualization automatically defines the upper and lower range from the minimum and maximum values in the dataset. This dataset has only one value, so it’s set as both minimum and maximum. + +If you only have one value, but you want to define a different minimum and maximum, you can set them manually in the [Standard options](#standard-options) settings to generate a more typical looking gauge. + +![Gauge with single numeric value and hardcoded max and min](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example2.png 'Gauge with single numeric value and hardcoded max-min') + +### Example - One row, multiple values + +The gauge visualization can support multiple fields in a dataset. + +| Identifier | value1 | value2 | value3 | +| ---------- | ------ | ------ | ------ | +| Gauges | 5 | 3 | 10 | + +![Gauge visualization with multiple numeric values in a single row](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example3.png 'Gauge with multiple numeric values in a single row') + +When there are multiple values in the dataset, the visualization displays multiple gauges and automatically defines the minimum and maximum. In this case, those are 3 and 10. Because the minimum and maximum values are defined, each gauge is shaded in to show that value in relation to the minimum and maximum. + +### Example - Multiple rows and values + +The gauge visualization can display datasets with multiple rows of data or even multiple datasets. + +| Identifier | value1 | value2 | value3 | +| ---------- | ------ | ------ | ------ | +| Gauges | 5 | 3 | 10 | +| Indicators | 6 | 9 | 15 | +| Defaults | 1 | 4 | 8 | + +![Gauge visualization with multiple rows and columns of numeric values showing the last row](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example6.png 'Gauge viz with multiple rows and columns of numeric values showing the last row') + +By default, the visualization is configured to [calculate](#value-options) a single value per column or series and to display only the last row of data. However, it derives the minimum and maximum from the full dataset, even if those values aren’t visible. + +In this example, that means only the last row of data is displayed in the gauges and the minimum and maximum values are 1 and 10. The value 1 is displayed because it’s in the last row, while 10 is not. + +If you want to show one gauge per table cell, you can change the **Show** setting from **Calculate** to **All values**, and each gauge is labeled by concatenating the text column with each value's column name. + +![Gauge visualization with multiple rows and columns of numeric values showing all the values](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example7.png 'Gauge viz with multiple rows and columns of numeric values showing all the values') + +### Example - Defined min and max + +You can also define minimum and maximum values as part of the dataset. + +| Identifier | value | max | min | +| ---------- | ----- | --- | --- | +| Gauges | 5 | 10 | 2 | + +![Gauge visualization with numeric values defining max and minimum](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example4.png 'Gauge with numeric values defining max and minimum') + +If you don’t want to display gauges for the `min` and `max` values, you can configure only one field to be displayed as described in the [value options](#value-options) section. + +![Gauge visualization with numeric values defining max and minimum but hidden](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example5.png 'Gauge with numeric values defining max and minimum but hidden') + +Even when minimum and maximum values aren’t displayed, the visualization still pulls the range from them. + ## Panel options {{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} @@ -135,6 +213,10 @@ Adjust the sizes of the gauge text. {{< docs/shared lookup="visualizations/thresholds-options-2.md" source="grafana" version="" >}} +Last, gauge colors and thresholds (the outer bar markers) of the gauge can be configured as described above. + +![Gauge viz with multiple rows and columns of numeric values showing all the values and thresholds defined for 0-6-11](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.2-gauge-example8.png 'Gauge viz with multiple rows and columns of numeric values showing all the values and thresholds defined for 0-6-11') + ## Field overrides {{< docs/shared lookup="visualizations/overrides-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/geomap/index.md b/docs/sources/panels-visualizations/visualizations/geomap/index.md index d64d6bda9697d..b42d1f78a6c9f 100644 --- a/docs/sources/panels-visualizations/visualizations/geomap/index.md +++ b/docs/sources/panels-visualizations/visualizations/geomap/index.md @@ -49,13 +49,28 @@ refs: # Geomap -Geomaps allow you to view and customize the world map using geospatial data. You can configure various overlay styles and map view settings to easily focus on the important location-based characteristics of the data. +Geomaps allow you to view and customize the world map using geospatial data. It's the ideal visualization if you have data that includes location information and you want to see it displayed in a map. -> We would love your feedback on geomaps. Please check out the [open Github issues](https://github.com/grafana/grafana/issues?page=1&q=is%3Aopen+is%3Aissue+label%3Aarea%2Fpanel%2Fgeomap) and [submit a new feature request](https://github.com/grafana/grafana/issues/new?assignees=&labels=type%2Ffeature-request,area%2Fpanel%2Fgeomap&title=Geomap:&projects=grafana-dataviz&template=1-feature_requests.md) as needed. +You can configure and overlay [map layers](#layer-type), like heatmaps and networks, and blend included basemaps or your own custom maps. This helps you to easily focus on the important location-based characteristics of the data. -{{< figure src="/static/img/docs/geomap-panel/geomap-example-8-1-0.png" max-width="1200px" caption="Geomap panel" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-example-8-1-0.png" max-width="1200px" alt="Geomap visualization" >}} -Pan the map, while it's in focus, by using the arrow keys. Zoom in and out by using the `+` and `-` keys. +When a geomap is in focus, in addition to typical mouse controls, you can pan around using the arrow keys or zoom in and out using the plus (`+`) and minus (`-`) keys or icons. + +Geomaps are also useful when you have location data that’s changing in real time and you want to visualize where an element is moving, using auto-refresh. + +You can use a geomap visualization if you need to: + +- Track your fleet of vehicles and associated metrics +- Show the locations and statuses of data centers or other connected assets in a network +- Display geographic trends in a heatmap +- Visualize the relationship of your locations' HVAC consumption or solar production with the sun's location + +{{< admonition type="note" >}} +We'd love your feedback on the geomap visualization. Please check out the [open Github issues](https://github.com/grafana/grafana/issues?page=1&q=is%3Aopen+is%3Aissue+label%3Aarea%2Fpanel%2Fgeomap) and [submit a new feature request](https://github.com/grafana/grafana/issues/new?assignees=&labels=type%2Ffeature-request,area%2Fpanel%2Fgeomap&title=Geomap:&projects=grafana-dataviz&template=1-feature_requests.md) as needed. +{{< /admonition >}} + +## Configure a geomap visualization The following video provides beginner steps for creating geomap visualizations. You'll learn the data requirements and caveats, special customizations, preconfigured displays and much more: @@ -63,42 +78,105 @@ The following video provides beginner steps for creating geomap visualizations. {{< docs/play title="Geomap Examples" url="https://play.grafana.org/d/panel-geomap/" >}} -## Panel options +## Supported data formats + +To create a geomap visualization, you need datasets containing fields with location information. + +The supported location formats are: + +- Latitude and longitude +- Geohash +- Lookup codes: country, US states, or airports + +To learn more, refer to [Location mode](#location-mode). + +Geomaps also support additional fields with various data types to define things like labels, numbers, heat sizes, and colors. + +### Example - Latitude and longitude + +If you plan to use latitude and longitude coordinates, the dataset must include at least two fields (or columns): one called `latitude` (you can also use`lat`), and one called `longitude` (also `lon` or `lng`). When you use this naming convention, the visualization automatically detects the fields and displays the elements. The order of the fields doesn't matter as long as there is one latitude and one longitude. + +| Name | latitude | longitude | value | +| --------------- | --------- | --------- | ----- | +| Disneyland | 33.8121 | -117.9190 | 4 | +| DisneyWorld | 28.3772 | -81.5707 | 10 | +| EuroDisney | 48.867374 | 2.784018 | 3 | +| Tokyo Disney | 35.6329 | 139.8804 | 70 | +| Shanghai Disney | 31.1414 | 121.6682 | 1 | + +If your latitude and longitude fields are named differently, you can specify them, as indicated in the [Location mode](#location-mode) section. + +### Example - Geohash + +If your location data is in geohash format, the visualization requires at least one field (or column) containing location data. + +If the field is named `geohash`, the visualization automatically detects the location and displays the elements. The order of the fields doesn't matter and the data set can have multiple other numeric, text, and time fields. + +| Name | geohash | trips | +| --------- | ------------ | ----- | +| Cancun | d5f21 | 8 | +| Honolulu | 87z9ps | 0 | +| Palm Cove | rhzxudynb014 | 1 | +| Mykonos | swdj02ey9gyx | 3 | + +If your field containing geohash location data is not named as above, you can configure the visualization to use geohash and specify which field to use, as explained in the [Location mode](#location-mode) section. + +### Example - Lookup codes + +The geomap visualization can identify locations based on country, airport, or US state codes. + +For this configuration, the dataset must contain at least one field (or column) containing the location code. + +If the field is named `lookup`, the visualization automatically detects it and displays points based on country codes. + +| Year | lookup | gdp | +| ---- | ------ | --------- | +| 2016 | MEX | 104171935 | +| 2016 | DEU | 94393454 | +| 2016 | FRA | 83654250 | +| 2016 | BRA | 80921527 | +| 2016 | CAN | 79699762 | + +The other location types— airport codes or US state codes—aren't automatically detected. + +If you want to use other codes or give the field a custom name, you can follow the steps in the [Location mode](#location-mode) section. + +## Configuration options + +### Panel options {{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} -## Map View +### Map view options The map view controls the initial view of the map when the dashboard loads. -### Initial View +#### Initial View The initial view configures how the geomap renders when the panel is first loaded. -- **View** sets the center for the map when the panel first loads. - - **Fit to data** fits the map view based on the data extents of Map layers and updates when data changes. - - **Data** option allows selection of extent based on data from "All layers", a single "Layer", or the "Last value" from a selected layer. - - **Layer** can be selected if fitting data from a single "Layer" or the "Last value" of a layer. - - **Padding** sets padding in relative percent beyond data extent (not available when looking at "Last value" only). - - **Max Zoom** sets the maximum zoom level when fitting data. - - **Coordinates** sets the map view based on: - - **Latitude** - - **Longitude** - - Default Views are also available including: - - **(0°, 0°)** - - **North America** - - **South America** - - **Europe** - - **Africa** - - **West Asia** - - **South Asia** - - **South-East Asia** - - **East Asia** - - **Australia** - - **Oceania** -- **Zoom** sets the initial zoom level. - -### Share view +- **View** - Sets the center for the map when the panel first loads. Refer to the table following this list for view selections. +- **Zoom** - Sets the initial zoom level. +- **Use current map settings** - Use the settings of the current map to set the center. + + +| View selection | Description | +|---|---| +| Fit to data | fits the map view based on the data extents of Map layers and updates when data changes.
  • **Data** - option allows selection of extent based on data from "All layers", a single "Layer", or the "Last value" from a selected layer.
  • **Layer** - can be selected if fitting data from a single "Layer" or the "Last value" of a layer.
  • **Padding** - sets padding in relative percent beyond data extent (not available when looking at "Last value" only).
  • **Max zoom** - sets the maximum zoom level when fitting data.
  • | +| (0°, 0°) | | +| Coordinates | sets the map view based on: **Latitude** and **Longitude**. | + + +Default Views are also available including: + + +| | | | | | +| ------------- | ------------- | ------ | ------ | --------- | +| North America | South America | Europe | Africa | West Asia | +| South Asia | South-East Asia | East Asia | Australia | Oceania | + + +#### Share view The **Share view** option allows you to link the movement and zoom actions of multiple map visualizations within the same dashboard. The map visualizations that have this option enabled act in tandem when one of them is moved or zoomed, leaving the other ones independent. @@ -106,11 +184,28 @@ The **Share view** option allows you to link the movement and zoom actions of mu You might need to reload the dashboard for this feature to work. {{< /admonition >}} -## Map layers +### Map layers options Geomaps support showing multiple layers. Each layer determines how you visualize geospatial data on top of the base map. -### Types +There are three options that you need to set for all maps: + +- [Layer type](#layer-type) +- [Data](#data) +- [Location mode](#location-mode) + +Other options are dependent on your map layer type and are described within the layer type section. + +The layer controls allow you to create layers, change their name, reorder and delete layers. + +- **Add layer** creates an additional, configurable data layer for the geomap. When you add a layer, you are prompted to select a layer type. You can change the layer type at any point during panel configuration. See the **Layer Types** section above for details on each layer type. +- **Edit layer name (pencil icon)** renames the layer. +- **Trash Bin** deletes the layer. +- **Reorder (six dots/grab handle)** allows you to change the layer order. Data on higher layers will appear above data on lower layers. The visualization will update the layer order as you drag and drop to help simplify choosing a layer order. + +You can add multiple layers of data to a single geomap in order to create rich, detailed visualizations. + +#### Layer type There are seven map layer types to choose from in a geomap. @@ -121,6 +216,10 @@ There are seven map layer types to choose from in a geomap. - [Route (Beta)](#route-layer-beta) render data points as a route. - [Photos (Beta)](#photos-layer-beta) renders a photo at each data point. - [Network (Beta)](#network-layer-beta) visualizes a network graph from the data. +- [Open Street Map](#open-street-map-layer) adds a map from a collaborative free geographic world database. +- [CARTO basemap](#carto-basemap-layer) adds a layer from CARTO Raster basemaps. +- [ArcGIS MapServer](#arcgis-mapserver-layer) adds a layer from an ESRI ArcGIS MapServer. +- [XYZ Tile layer](#xyz-tile-layer) adds a map from a generic tile layer. {{% admonition type="note" %}} Beta is equivalent to the [public preview](/docs/release-life-cycle/) release stage. @@ -131,9 +230,7 @@ There are also two experimental (or alpha) layer types. - **Icon at last point (alpha)** renders an icon at the last data point. - **Dynamic GeoJSON (alpha)** styles a GeoJSON file based on query results. -{{% admonition type="note" %}} -To enable experimental layers: -Set `enable_alpha` to `true` in your configuration file: +To enable experimental layers. Set `enable_alpha` to `true` in your configuration file: ``` [panels] @@ -146,29 +243,11 @@ To enable the experimental layers using Docker, run the following command: docker run -p 3000:3000 -e "GF_PANELS_ENABLE_ALPHA=true" grafana/grafana: ``` -{{% /admonition %}} - -{{% admonition type="note" %}} -[Basemap layer types](#types-1) can also be added as layers. You can specify an opacity. -{{% /admonition %}} - -### Layer Controls - -The layer controls allow you to create layers, change their name, reorder and delete layers. - -- **Add layer** creates an additional, configurable data layer for the geomap. When you add a layer, you are prompted to select a layer type. You can change the layer type at any point during panel configuration. See the **Layer Types** section above for details on each layer type. -- The layer controls allow you to rename, delete, and reorder the layers of the visualization. - - **Edit layer name (pencil icon)** renames the layer. - - **Trash Bin** deletes the layer. - - **Reorder (six dots/grab handle)** allows you to change the layer order. Data on higher layers will appear above data on lower layers. The visualization will update the layer order as you drag and drop to help simplify choosing a layer order. - -You can add multiple layers of data to a single geomap in order to create rich, detailed visualizations. - -### Data +#### Data Geomaps need a source of geographical data gathered from a data source query which can return multiple datasets. By default Grafana picks the first dataset, but this drop-down allows you to pick other datasets if the query returns more than one. -### Location mode +#### Location mode There are four options to map the data returned by the selected query: @@ -181,110 +260,30 @@ There are four options to map the data returned by the selected query: - **Geohash** specifies that your query holds geohash data. You will be prompted to select a string data field for the geohash from your database query. - **Lookup** specifies that your query holds location name data that needs to be mapped to a value. You will be prompted to select the lookup field from your database query and a gazetteer. The gazetteer is the directory that is used to map your queried data to a geographical point. -## Basemap layer - -A basemap layer provides the visual foundation for a mapping application. It typically contains data with global coverage. Several base layer options -are available each with specific configuration options to style the base map. - -### Types - -There are four basemap layer types to choose from in a geomap. - -- [Open Street Map](#open-street-map-layer) adds a map from a collaborative free geographic world database. -- [CARTO](#carto-layer) adds a layer from CARTO Raster basemaps. -- [ArcGIS](#arcgis-layer) adds a layer from an ESRI ArcGIS MapServer. -- [XYZ](#xyz-tile-layer) adds a map from a generic tile layer. - -### Default - -The default base layer uses the [CARTO](#carto-layer) map. You can define custom default base layers in the `.ini` configuration file. - -![Basemap layer options](/static/img/docs/geomap-panel/geomap-baselayer-8-1-0.png) - -#### Configure the default base layer with provisioning - -You can configure the default base map using config files with Grafana’s provisioning system. For more information on all the settings, refer to the [provisioning docs page](ref:provisioning-docs-page). - -Use the JSON configuration option `default_baselayer_config` to define the default base map. There are currently four base map options to choose from: `carto`, `esri-xyz`, `osm-standard`, `xyz`. Here are some provisioning examples for each base map option. - -- **carto** loads the CartoDB tile server. You can choose from `auto`, `dark`, and `light` theme for the base map and can be set as shown below. The `showLabels` tag determines whether or not Grafana shows the Country details on top of the map. Here is an example: - -```ini -geomap_default_baselayer = `{ - "type": "carto", - "config": { - "theme": "auto", - "showLabels": true - } -}` -``` - -- **esri-xyz** loads the ESRI tile server. There are already multiple server instances implemented to show the various map styles: `world-imagery`, `world-physical`, `topo`, `usa-topo`, and `ocean`. The `custom` server option allows you to configure your own ArcGIS map server. Here are some examples: - -```ini -geomap_default_baselayer = `{ - "type": "esri-xyz", - "config": { - "server": "world-imagery" - } -}` -``` - -```ini -geomap_default_baselayer = `{ - "type": "esri-xyz", - "config": { - "server": "custom", - "url": "[tile server url]", - "attribution": "[tile server attribution]" - } -}` -``` - -- **osm-standard** loads the OpenStreetMap tile server. There are no additional configurations needed and the `config` fields can be left blank. Here is an example: - -```ini -default_baselayer_config = `{ - "type": "osm-standard", - "config": {} -}` -``` - -- **xyz** loads a custom tile server defined by the user. Set a valid tile server `url`, with {z}/{x}/{y} for this option in order to properly load a default base map. Here is an example: - -```ini -default_baselayer_config = `{ - "type": "xyz", - "config": { - "attribution": "Open street map", - "url": "https://tile.openstreetmap.org/{z}/{x}/{y}.png" - } -}` -``` - -`enable_custom_baselayers` allows you to enable or disable custom open source base maps that are already implemented. The default is `true`. - -## Markers layer +#### Markers layer The markers layer allows you to display data points as different marker shapes such as circles, squares, triangles, stars, and more. ![Markers Layer](/static/img/docs/geomap-panel/geomap-markers-8-1-0.png) -{{< figure src="/media/docs/grafana/panels-visualizations/geomap-markers-options-11-1-0.png" max-width="350px" alt="Markers layer options" >}} - -- **Data** and **Location mode** configure the data settings for the layer. For more information, refer to [Data](#data) and [Location mode](#location-mode). -- **Size** configures the size of the markers. The default is `Fixed size`, which makes all marker sizes the same regardless of the data; however, there is also an option to size the markers based on data corresponding to a selected field. `Min` and `Max` marker sizes have to be set such that the markers can scale within this range. -- **Symbol** allows you to choose the symbol, icon, or graphic to aid in providing additional visual context to your data. Choose from assets that are included with Grafana such as simple symbols or the Unicon library. You can also specify a URL containing an image asset. The image must be a scalable vector graphic (SVG). -- **Symbol Vertical Align** configures the vertical alignment of the symbol relative to the data point. Note that the symbol's rotation angle is applied first around the data point, then the vertical alignment is applied relative to the rotation of the symbol. -- **Symbol Horizontal Align** configures the horizontal alignment of the symbol relative to the data point. Note that the symbol's rotation angle is applied first around the data point, then the horizontal alignment is applied relative to the rotation of the symbol. -- **Color** configures the color of the markers. The default `Fixed color` sets all markers to a specific color. There is also an option to have conditional colors depending on the selected field data point values and the color scheme set in the `Standard options` section. -- **Fill opacity** configures the transparency of each marker. -- **Rotation angle** configures the rotation angle of each marker. The default is `Fixed value`, which makes all markers rotate to the same angle regardless of the data; however, there is also an option to set the rotation of the markers based on data corresponding to a selected field. -- **Text label** configures a text label for each marker. -- **Show legend** allows you to toggle the legend for the layer. -- **Display tooltip** allows you to toggle tooltips for the layer. - -## Heatmap layer + +| Option | Description | +| ------ | ----------- | +| Data | Configure the data settings for the layer. For more information, refer to [Data](#data). | +| Location | Configure the data settings for the layer. For more information, refer to [Location mode](#location-mode). | +| Size | Configures the size of the markers. The default is `Fixed size`, which makes all marker sizes the same regardless of the data; however, there is also an option to size the markers based on data corresponding to a selected field. `Min` and `Max` marker sizes have to be set such that the markers can scale within this range. | +| Symbol | Allows you to choose the symbol, icon, or graphic to aid in providing additional visual context to your data. Choose from assets that are included with Grafana such as simple symbols or the Unicon library. You can also specify a URL containing an image asset. The image must be a scalable vector graphic (SVG). | +| Symbol Vertical Align | Configures the vertical alignment of the symbol relative to the data point. Note that the symbol's rotation angle is applied first around the data point, then the vertical alignment is applied relative to the rotation of the symbol. | +| Symbol Horizontal Align | Configures the horizontal alignment of the symbol relative to the data point. Note that the symbol's rotation angle is applied first around the data point, then the horizontal alignment is applied relative to the rotation of the symbol. | +| Color | Configures the color of the markers. The default `Fixed color` sets all markers to a specific color. There is also an option to have conditional colors depending on the selected field data point values and the color scheme set in the `Standard options` section. | +| Fill opacity | Configures the transparency of each marker. | +| Rotation angle | Configures the rotation angle of each marker. The default is `Fixed value`, which makes all markers rotate to the same angle regardless of the data; however, there is also an option to set the rotation of the markers based on data corresponding to a selected field. | +| Text label | Configures a text label for each marker. | +| Show legend | Allows you to toggle the legend for the layer. | +| Display tooltip | Allows you to toggle tooltips for the layer. | + + +#### Heatmap layer The heatmap layer clusters various data points to visualize locations with different densities. To add a heatmap layer: @@ -295,72 +294,69 @@ Similar to `Markers`, you are prompted with various options to determine which d ![Heatmap Layer](/static/img/docs/geomap-panel/geomap-heatmap-8-1-0.png) -{{< figure src="/media/docs/grafana/panels-visualizations/geomap-heatmap-options-11-1-0.png" max-width="350px" alt="Heatmap layer options" >}} - -- **Data** and **Location mode** configure the data settings for the layer. For more information, refer to [Data](#data) and [Location mode](#location-mode). -- **Weight values** configure the intensity of the heatmap clusters. `Fixed value` keeps a constant weight value throughout all data points. This value should be in the range of 0~1. Similar to Markers, there is an alternate option in the drop-down to automatically scale the weight values depending on data values. -- **Radius** configures the size of the heatmap clusters. -- **Blur** configures the amount of blur on each cluster. -- **Opacity** configures the opacity of each cluster. -- **Display tooltip** allows you to toggle tooltips for the layer. + +| Option | Description | +| ------ | ----------- | +| Data | Configure the data settings for the layer. For more information, refer to [Data](#data). | +| Location | Configure the data settings for the layer. For more information, refer to [Location mode](#location-mode). | +| Weight values | Configures the size of the markers. The default is `Fixed size`, which makes all marker sizes the same regardless of the data; however, there is also an option to size the markers based on data corresponding to a selected field. `Min` and `Max` marker sizes have to be set such that the markers can scale within this range. | +| Radius | Configures the size of the heatmap clusters. | +| Blur | Configures the amount of blur on each cluster. | +| Opacity | Configures the opacity of each cluster. | +| Display tooltip | Allows you to toggle tooltips for the layer. | + -## GeoJSON layer +#### GeoJSON layer The GeoJSON layer allows you to select and load a static GeoJSON file from the filesystem. -- **GeoJSON URL** provides a choice of GeoJSON files that ship with Grafana. -- **Default Style** controls which styles to apply when no rules above match. - - **Color** configures the color of the default style - - **Opacity** configures the default opacity -- **Style Rules** apply styles based on feature properties - - **Rule** allows you to select a _feature_, _condition_, and _value_ from the GeoJSON file in order to define a rule. The trash bin icon can be used to delete the current rule. - - **Color** configures the color of the style for the current rule - - **Opacity** configures the transparency level for the current rule -- **Add style rule** creates additional style rules. -- **Display tooltip** allows you to toggle tooltips for the layer. + +| Option | Description | +| ------ | ----------- | +| GeoJSON URL | Provides a choice of GeoJSON files that ship with Grafana. | +| Default Style | Controls which styles to apply when no rules above match.
    • **Color** - configures the color of the default style
    • **Opacity** - configures the default opacity
    | +| Style Rules | Apply styles based on feature properties
    • **Rule** - allows you to select a _feature_, _condition_, and _value_ from the GeoJSON file in order to define a rule. The trash bin icon can be used to delete the current rule.
    • **Color** - configures the color of the style for the current rule
    • **Opacity** - configures the transparency level for the current rule
    • | +| Display tooltip | Allows you to toggle tooltips for the layer. | + -{{% admonition type="note" %}} Styles can be set within the "properties" object of the GeoJSON with support for the following geometries: -- Polygon, MultiPolygon +**Polygon, MultiPolygon** - - **"fill"** - The color of the interior of the polygon(s) - - **"fill-opacity"** - The opacity of the interior of the polygon(s) - - **"stroke-width"** - The width of the line component of the polygon(s) +- **"fill"** - The color of the interior of the polygon(s) +- **"fill-opacity"** - The opacity of the interior of the polygon(s) +- **"stroke-width"** - The width of the line component of the polygon(s) -- Point, MultiPoint +**Point, MultiPoint** - - **"marker-color"** - The color of the point(s) - - **"marker-size"** - The size of the point(s) +- **"marker-color"** - The color of the point(s) +- **"marker-size"** - The size of the point(s) -- LineString, MultiLineString - - **"stroke"** - The color of the line(s) - - **"stroke-width"** - The width of the line(s) +**LineString, MultiLineString** -{{% /admonition %}} +- **"stroke"** - The color of the line(s) +- **"stroke-width"** - The width of the line(s) -## Night / Day layer +#### Night / Day layer The Night / Day layer displays night and day regions based on the current time range. -{{< figure src="/static/img/docs/geomap-panel/geomap-day-night-9-1-0.png" max-width="1200px" caption="Geomap panel Night / Day" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-day-night-9-1-0.png" max-width="1200px" alt="Geomap panel Night / Day" >}} -### Options + +| Option | Description | +| ------ | ----------- | +| Data | Configures the data set for the layer. For more information, refer to [Data](#data). | +| Show | Toggles the time source from panel time range. | +| Night region color | Picks the color for the night region. | +| Display sun | Toggles the sun icon. | +| Opacity | Set the opacity from `0` (transparent) to `1` (opaque). | +| Display tooltip | Allows you to toggle tooltips for the layer. | + -- **Data** configures the data set for the layer. For more information, refer to [Data](#data). -- **Show** toggles the time source from panel time range. -- **Night region color** picks the color for the night region. -- **Display sun** toggles the sun icon. -- **Opacity** set the opacity from `0` (transparent) to `1` (opaque). -- **Display tooltip** allows you to toggle tooltips for the layer. +[Extensions for OpenLayers - DayNight](https://viglino.github.io/ol-ext/examples/layer/map.daynight.html) -{{< figure src="/static/img/docs/geomap-panel/geomap-day-night-options-9-1-0.png" max-width="1200px" caption="Geomap panel Night / Day options" >}} - -### More information - -- [**Extensions for OpenLayers - DayNight**](https://viglino.github.io/ol-ext/examples/layer/map.daynight.html) - -## Route layer (Beta) +#### Route layer (Beta) {{% admonition type="caution" %}} The Route layer is currently in [public preview](/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available. @@ -368,28 +364,28 @@ The Route layer is currently in [public preview](/docs/release-life-cycle/). Gra The Route layer renders data points as a route. -{{< figure src="/media/docs/grafana/geomap-route-layer-basic-9-4-0.png" max-width="1200px" caption="Geomap panel Route" >}} - -### Options +{{< figure src="/media/docs/grafana/geomap-route-layer-basic-9-4-0.png" max-width="1200px" alt="Geomap panel Route" >}} -- **Data** and **Location mode** configure the data settings for the layer. For more information, refer to [Data](#data) and [Location mode](#location-mode). -- **Size** sets the route thickness. Fixed value by default. When field data is selected you can set the Min and Max range in which field data can scale. -- **Color** sets the route color. Set to `Fixed color` by default. You can also tie the color to field data. -- **Fill opacity** configures the opacity of the route. -- **Text label** configures a text label for each route. -- **Arrow** sets the arrow styling to display along route, in order of data. - - **None** - - **Forward** - - **Reverse** -- **Display tooltip** allows you to toggle tooltips for the layer. +The layer can also render a route with arrows. -{{< figure src="/media/docs/grafana/geomap-route-layer-arrow-size-9-4-0.png" max-width="1200px" caption="Geomap panel Route arrows with size" >}} +{{< figure src="/media/docs/grafana/geomap-route-layer-arrow-size-9-4-0.png" max-width="1200px" alt="Geomap panel Route arrows with size" >}} -### More information + +| Option | Description | +| ------ | ----------- | +| Data | configure the data settings for the layer. For more information, refer to [Data](#data). | +| Location | configure the data settings for the layer. For more information, refer to [Location mode](#location-mode). | +| Size | sets the route thickness. Fixed value by default. When field data is selected you can set the Min and Max range in which field data can scale. | +| Color | sets the route color. Set to `Fixed color` by default. You can also tie the color to field data. | +| Fill opacity | configures the opacity of the route. | +| Text label | configures a text label for each route. | +| Arrow | sets the arrow styling to display along route, in order of data. Choose from: **None**, **Forward**, and **Reverse** | +| Display tooltip | allows you to toggle tooltips for the layer. | + -- [**Extensions for OpenLayers - Flow Line Style**](http://viglino.github.io/ol-ext/examples/style/map.style.gpxline.html) +[Extensions for OpenLayers - Flow Line Style](http://viglino.github.io/ol-ext/examples/style/map.style.gpxline.html) -## Photos layer (Beta) +#### Photos layer (Beta) {{% admonition type="caution" %}} The Photos layer is currently in [public preview](/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available. @@ -397,33 +393,26 @@ The Photos layer is currently in [public preview](/docs/release-life-cycle/). Gr The Photos layer renders a photo at each data point. -{{< figure src="/static/img/docs/geomap-panel/geomap-photos-9-3-0.png" max-width="1200px" caption="Geomap panel Photos" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-photos-9-3-0.png" max-width="1200px" alt="Geomap panel Photos" >}} -### Options + +| Option | Description | +| ------ | ----------- | +| Data | Configure the data settings for the layer. For more information, refer to [Data](#data). | +| Location | Configure the data settings for the layer. For more information, refer to [Location mode](#location-mode). | +| Image Source field | Allows you to select a string field containing image data in either of the following formats:
      • **Image URLs**
      • **Base64 encoded** - Image binary ("data:image/png;base64,...")
      | +| Kind | Sets the frame style around the images. Choose from: **Square**, **Circle**, **Anchored**, and **Folio**. | +| Crop | Toggles whether the images are cropped to fit. | +| Shadow | Toggles a box shadow behind the images. | +| Border | Sets the border size around images. | +| Border color | Sets the border color around images. | +| Radius | Sets the overall size of images in pixels. | +| Display tooltip | Allows you to toggle tooltips for the layer. | + -- **Data** and **Location mode** configure the data settings for the layer. For more information, refer to [Data](#data) and [Location mode](#location-mode). -- **Image Source field** allows you to select a string field containing image data in either of the following formats: - - **Image URLs** - - **Base64 encoded** - Image binary ("data:image/png;base64,...") -- **Kind** sets the frame style around the images. Choose from: - - **Square** - - **Circle** - - **Anchored** - - **Folio** -- **Crop** toggles whether the images are cropped to fit. -- **Shadow** toggles a box shadow behind the images. -- **Border** sets the border size around images. -- **Border color** sets the border color around images. -- **Radius** sets the overall size of images in pixels. -- **Display tooltip** allows you to toggle tooltips for the layer. +[Extensions for OpenLayers - Image Photo Style](http://viglino.github.io/ol-ext/examples/style/map.style.photo.html) -{{< figure src="/static/img/docs/geomap-panel/geomap-photos-options-9-3-0.png" max-width="1200px" caption="Geomap panel Photos options" >}} - -### More information - -- [**Extensions for OpenLayers - Image Photo Style**](http://viglino.github.io/ol-ext/examples/style/map.style.photo.html) - -## Network layer (Beta) +#### Network layer (Beta) {{% admonition type="caution" %}} The Network layer is currently in [public preview](/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available. @@ -431,176 +420,256 @@ The Network layer is currently in [public preview](/docs/release-life-cycle/). G The Network layer renders a network graph. This layer supports the same [data format supported by the node graph visualization](ref:data-format) with the addition of [geospatial data](#location-mode) included in the nodes data. The geospatial data is used to locate and render the nodes on the map. -{{< figure src="/media/docs/grafana/screenshot-grafana-10-1-geomap-network-layer-v2.png" max-width="750px" caption="Geomap network layer" >}} -{{< video-embed src="/media/docs/grafana/screen-recording-10-1-geomap-network-layer-from-node-graph.mp4" max-width="750px" caption="Node graph to Geomap network layer" >}} - -### Options +{{< figure src="/media/docs/grafana/screenshot-grafana-10-1-geomap-network-layer-v2.png" max-width="750px" alt="Geomap network layer" >}} + +You can convert node graph data to a network layer: +{{< video-embed src="/media/docs/grafana/screen-recording-10-1-geomap-network-layer-from-node-graph.mp4" max-width="750px" alt="Node graph to Geomap network layer" >}} + + +| Option | Description | +| ------ | ----------- | +| Data | Configure the data settings for the layer. For more information, refer to [Data](#data). | +| Location | Configure the data settings for the layer. For more information, refer to [Location mode](#location-mode). | +| Arrow | Sets the arrow direction to display for each edge, with forward meaning source to target. Choose from: **None**, **Forward**, **Reverse** and **Both**. | +| Show legend | Allows you to toggle the legend for the layer. **Note:** The legend currently only supports node data. | +| Display tooltip | Allows you to toggle tooltips for the layer. | + + +##### Node styles options + + +| Option | Description | +| ------ | ----------- | +| Size | Configures the size of the nodes. The default is `Fixed size`, which makes all node sizes the same regardless of the data; however, there is also an option to size the nodes based on data corresponding to a selected field. `Min` and `Max` node sizes have to be set such that the nodes can scale within this range. | +| Symbol | Allows you to choose the symbol, icon, or graphic to aid in providing additional visual context to your data. Choose from assets that are included with Grafana such as simple symbols or the Unicon library. You can also specify a URL containing an image asset. The image must be a scalable vector graphic (SVG). | +| Color | Configures the color of the nodes. The default `Fixed color` sets all nodes to a specific color. There is also an option to have conditional colors depending on the selected field data point values and the color scheme set in the `Standard options` section. | +| Fill opacity | Configures the transparency of each node. | +| Rotation angle | Configures the rotation angle of each node. The default is `Fixed value`, which makes all nodes rotate to the same angle regardless of the data; however, there is also an option to set the rotation of the nodes based on data corresponding to a selected field. | +| Text label | Configures a text label for each node. | + + +##### Edge styles options + + +| Option | Description | +| ------ | ----------- | +| Size | Configures the line width of the edges. The default is `Fixed size`, which makes all edge line widths the same regardless of the data; however, there is also an option to size the edges based on data corresponding to a selected field. `Min` and `Max` eges sizes have to be set such that the edges can scale within this range. | +| Color | Configures the color of the edges. The default `Fixed color` sets all edges to a specific color. There is also an option to have conditional colors depending on the selected field data point values and the color scheme set in the `Standard options` section. | +| Fill opacity | Configures the transparency of each edge. | +| Text label | Configures a text label for each edge. | + + +#### Open Street Map layer -- **Data** and **Location mode** configure the data settings for the layer. For more information, refer to [Data](#data) and [Location mode](#location-mode). -- **Arrow** sets the arrow direction to display for each edge, with forward meaning source to target. Choose from: - - **None** - - **Forward** - - **Reverse** - - **Both** -- **Show legend** allows you to toggle the legend for the layer. **Note:** The legend currently only supports node data. -- **Display tooltip** allows you to toggle tooltips for the layer. - -#### Node styles +A map from a collaborative free geographic world database. -- **Size** configures the size of the nodes. The default is `Fixed size`, which makes all node sizes the same regardless of the data; however, there is also an option to size the nodes based on data corresponding to a selected field. `Min` and `Max` node sizes have to be set such that the nodes can scale within this range. -- **Symbol** allows you to choose the symbol, icon, or graphic to aid in providing additional visual context to your data. Choose from assets that are included with Grafana such as simple symbols or the Unicon library. You can also specify a URL containing an image asset. The image must be a scalable vector graphic (SVG). -- **Color** configures the color of the nodes. The default `Fixed color` sets all nodes to a specific color. There is also an option to have conditional colors depending on the selected field data point values and the color scheme set in the `Standard options` section. -- **Fill opacity** configures the transparency of each node. -- **Rotation angle** configures the rotation angle of each node. The default is `Fixed value`, which makes all nodes rotate to the same angle regardless of the data; however, there is also an option to set the rotation of the nodes based on data corresponding to a selected field. -- **Text label** configures a text label for each node. +{{< figure src="/static/img/docs/geomap-panel/geomap-osm-9-1-0.png" max-width="1200px" alt="Geomap panel Open Street Map" >}} -#### Edge styles +- **Opacity** from 0 (transparent) to 1 (opaque) +- **Display tooltip** - allows you to toggle tooltips for the layer. -- **Size** configures the line width of the edges. The default is `Fixed size`, which makes all edge line widths the same regardless of the data; however, there is also an option to size the edges based on data corresponding to a selected field. `Min` and `Max` eges sizes have to be set such that the edges can scale within this range. -- **Color** configures the color of the edges. The default `Fixed color` sets all edges to a specific color. There is also an option to have conditional colors depending on the selected field data point values and the color scheme set in the `Standard options` section. -- **Fill opacity** configures the transparency of each edge. -- **Text label** configures a text label for each edge. +[About Open Street Map](https://www.openstreetmap.org/about) -## CARTO layer +#### CARTO basemap layer A CARTO layer is from CARTO Raster basemaps. -### Options - - **Theme** - Auto - Light - {{< figure src="/static/img/docs/geomap-panel/geomap-carto-light-9-1-0.png" max-width="1200px" caption="Geomap panel CARTO light example" >}} + {{< figure src="/static/img/docs/geomap-panel/geomap-carto-light-9-1-0.png" max-width="1200px" alt="Geomap panel CARTO light example" >}} - Dark - {{< figure src="/static/img/docs/geomap-panel/geomap-carto-dark-9-1-0.png" max-width="1200px" caption="Geomap panel CARTO dark example" >}} + {{< figure src="/static/img/docs/geomap-panel/geomap-carto-dark-9-1-0.png" max-width="1200px" alt="Geomap panel CARTO dark example" >}} - **Show labels** shows the Country details on top of the map. - **Opacity** from 0 (transparent) to 1 (opaque) +- **Display tooltip** - allows you to toggle tooltips for the layer. -{{< figure src="/static/img/docs/geomap-panel/geomap-carto-options-9-1-0.png" max-width="1200px" caption="Geomap panel CARTO options" >}} +[About CARTO](https://carto.com/about-us/) -### More Information +#### ArcGIS MapServer layer -- [**About CARTO**](https://carto.com/about-us/) +An ArcGIS layer is a layer from an ESRI ArcGIS MapServer. -## XYZ tile layer +- **Server Instance** to select the map type. + - World Street Map + {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-wsm-9-1-0.png" max-width="1200px" alt="Geomap panel ArcGIS World Street Map" >}} + - World Imagery + {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-wi-9-1-0.png" max-width="1200px" alt="Geomap panel ArcGIS World Imagery" >}} + - World Physical + {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-wp-9-1-0.png" max-width="1200px" alt="Geomap panel ArcGIS World Physical" >}} + - Topographic + {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-topographic-9-1-0.png" max-width="1200px" alt="Geomap panel ArcGIS Topographic" >}} + - USA Topographic + {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-usa-topographic-9-1-0.png" max-width="1200px" alt="Geomap panel ArcGIS USA Topographic" >}} + - World Ocean + {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-ocean-9-1-0.png" max-width="1200px" alt="Geomap panel ArcGIS World Ocean" >}} + - Custom MapServer (see [XYZ](#xyz-tile-layer) for formatting) + - URL template + - Attribution +- **Opacity** from 0 (transparent) to 1 (opaque) +- **Display tooltip** - allows you to toggle tooltips for the layer. -The XYZ tile layer is a map from a generic tile layer. +##### More Information -{{< figure src="/static/img/docs/geomap-panel/geomap-xyz-9-1-0.png" max-width="1200px" caption="Geomap panel xyz example" >}} +- [ArcGIS Services](https://services.arcgisonline.com/arcgis/rest/services) +- [About ESRI](https://www.esri.com/en-us/about/about-esri/overview) -### Options +#### XYZ Tile layer -- **URL template** +The XYZ Tile layer is a map from a generic tile layer. - > **Note:** Set a valid tile server url, with {z}/{x}/{y} for example: https://tile.openstreetmap.org/{z}/{x}/{y}.png +{{< figure src="/static/img/docs/geomap-panel/geomap-xyz-9-1-0.png" max-width="1200px" alt="Geomap panel xyz example" >}} +- **URL template** - Set a valid tile server url, with {z}/{x}/{y} for example: https://tile.openstreetmap.org/{z}/{x}/{y}.png - **Attribution** sets the reference string for the layer if displayed in [map controls](#show-attribution) - **Opacity** from 0 (transparent) to 1 (opaque) -{{< figure src="/static/img/docs/geomap-panel/geomap-xyz-options-9-1-0.png" max-width="1200px" caption="Geomap panel xyz options" >}} +##### More information -### More information +- [Tiled Web Map Wikipedia](https://en.wikipedia.org/wiki/Tiled_web_map) +- [List of Open Street Map Tile Servers](https://wiki.openstreetmap.org/wiki/Tile_servers) -- [**Tiled Web Map Wikipedia**](https://en.wikipedia.org/wiki/Tiled_web_map) -- [**List of Open Street Map Tile Servers**](https://wiki.openstreetmap.org/wiki/Tile_servers) +### Basemap layer options -## Open Street Map layer +A basemap layer provides the visual foundation for a mapping application. It typically contains data with global coverage. Several base layer options +are available each with specific configuration options to style the base map. -A map from a collaborative free geographic world database. +Basemap layer types can also be added as layers. You can specify an opacity. -{{< figure src="/static/img/docs/geomap-panel/geomap-osm-9-1-0.png" max-width="1200px" caption="Geomap panel Open Street Map" >}} +There are four basemap layer types to choose from in a geomap. -### Options +- [Open Street Map](#open-street-map-layer) adds a map from a collaborative free geographic world database. +- [CARTO basemap](#carto-basemap-layer) adds a layer from CARTO Raster basemaps. +- [ArcGIS MapServer](#arcgis-mapserver-layer) adds a layer from an ESRI ArcGIS MapServer. +- [XYZ Tile layer](#xyz-tile-layer) adds a map from a generic tile layer. -- **Opacity** from 0 (transparent) to 1 (opaque) +The default basemap layer uses the CARTO map. You can define custom default base layers in the `.ini` configuration file. -{{< figure src="/static/img/docs/geomap-panel/geomap-osm-options-9-1-0.png" max-width="1200px" caption="Geomap panel Open Street Map options" >}} +![Basemap layer options](/static/img/docs/geomap-panel/geomap-baselayer-8-1-0.png) + +#### Configure the default base layer with provisioning -### More Information +You can configure the default base map using config files with Grafana’s provisioning system. For more information on all the settings, refer to the [provisioning docs page](ref:provisioning-docs-page). -- [**About Open Street Map**](https://www.openstreetmap.org/about) +Use the JSON configuration option `default_baselayer_config` to define the default base map. There are currently four base map options to choose from: `carto`, `esri-xyz`, `osm-standard`, `xyz`. Here are some provisioning examples for each base map option. -## ArcGIS layer +- **carto** loads the CartoDB tile server. You can choose from `auto`, `dark`, and `light` theme for the base map and can be set as shown below. The `showLabels` tag determines whether or not Grafana shows the Country details on top of the map. Here is an example: -An ArcGIS layer is a layer from an ESRI ArcGIS MapServer. +```ini +geomap_default_baselayer = `{ + "type": "carto", + "config": { + "theme": "auto", + "showLabels": true + } +}` +``` -### Options +- **esri-xyz** loads the ESRI tile server. There are already multiple server instances implemented to show the various map styles: `world-imagery`, `world-physical`, `topo`, `usa-topo`, and `ocean`. The `custom` server option allows you to configure your own ArcGIS map server. Here are some examples: -- **Server Instance** to select the map type. - - World Street Map - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-wsm-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS World Street Map" >}} - - World Imagery - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-wi-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS World Imagery" >}} - - World Physical - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-wp-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS World Physical" >}} - - Topographic - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-topographic-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS Topographic" >}} - - USA Topographic - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-usa-topographic-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS USA Topographic" >}} - - World Ocean - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-ocean-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS World Ocean" >}} - - Custom MapServer (see [XYZ](#xyz-tile-layer) for formatting) - - URL template - - Attribution -- **Opacity** from 0 (transparent) to 1 (opaque) +{{< tabs >}} +{{< tab-content name="World imagery" >}} - {{< figure src="/static/img/docs/geomap-panel/geomap-arcgis-options-9-1-0.png" max-width="1200px" caption="Geomap panel ArcGIS options" >}} +```ini +geomap_default_baselayer = `{ + "type": "esri-xyz", + "config": { + "server": "world-imagery" + } +}` +``` -### More Information +{{< /tab-content >}} +{{< tab-content name="Custom" >}} -- [**ArcGIS Services**](https://services.arcgisonline.com/arcgis/rest/services) -- [**About ESRI**](https://www.esri.com/en-us/about/about-esri/overview) +```ini +geomap_default_baselayer = `{ + "type": "esri-xyz", + "config": { + "server": "custom", + "url": "[tile server url]", + "attribution": "[tile server attribution]" + } +}` +``` -## Map Controls +{{< /tab-content >}} +{{< /tabs >}} -The map controls section contains various options for map information and tool overlays. -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-9-1-0.png" max-width="1200px" caption="Geomap panel map controls" >}} +- **osm-standard** loads the OpenStreetMap tile server. There are no additional configurations needed and the `config` fields can be left blank. Here is an example: + +```ini +default_baselayer_config = `{ + "type": "osm-standard", + "config": {} +}` +``` + +- **xyz** loads a custom tile server defined by the user. Set a valid tile server `url`, with {z}/{x}/{y} for this option in order to properly load a default base map. Here is an example: + +```ini +default_baselayer_config = `{ + "type": "xyz", + "config": { + "attribution": "Open street map", + "url": "https://tile.openstreetmap.org/{z}/{x}/{y}.png" + } +}` +``` -### Zoom +`enable_custom_baselayers` allows you to enable or disable custom open source base maps that are already implemented. The default is `true`. -This section describes each of the zoom controls. +### Map controls options + +The map controls section contains various options for map information and tool overlays. + + +| Option | Description | +| ------ | ----------- | +| [Show zoom control](#show-zoom-control) | Displays zoom controls in the upper left corner. | +| [Mouse wheel zoom](#mouse-wheel-zoom) | Enables the mouse wheel to be used for zooming in or out. | +| [Show attribution](#show-attribution) | Displays attribution for basemap layers. | +| [Show scale](#show-scale) | Displays scale information in the bottom left corner in meters (m) or kilometers (km). | +| [Show measure tools](#show-measure-tools) | Displays measure tools in the upper right corner. This includes the [Length](#length) and [Area](#area) options. | +| [Show debug](#show-debug) | Displays debug information in the upper right corner. | +| [Tooltip](#tooltip) | Controls display of tooltips. | + #### Show zoom control Displays zoom controls in the upper left corner. This control can be useful when using systems that don't have a mouse. -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-zoom-9-1-0.png" max-width="1200px" caption="Geomap panel zoom" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-zoom-9-1-0.png" max-width="1200px" alt="Geomap panel zoom" >}} #### Mouse wheel zoom Enables the mouse wheel to be used for zooming in or out. -### Show attribution +#### Show attribution Displays attribution for basemap layers. -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-attribution-9-1-0.png" max-width="1200px" caption="Geomap panel attribution" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-attribution-9-1-0.png" max-width="1200px" alt="Geomap panel attribution" >}} -### Show scale +#### Show scale -Displays scale information in the bottom left corner. +Displays scale information in the bottom left corner in meters (m) or kilometers (km). -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-scale-9-1-0.png" max-width="1200px" caption="Geomap panel scale" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-scale-9-1-0.png" max-width="1200px" alt="Geomap panel scale" >}} -{{% admonition type="note" %}} -Currently only displays units in [m]/[km]. -{{% /admonition %}} - -### Show measure tools +#### Show measure tools Displays measure tools in the upper right corner. Measurements appear only when this control is open. -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-measure-9-1-0.png" max-width="1200px" caption="Geomap panel measure" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-measure-9-1-0.png" max-width="1200px" alt="Geomap panel measure" >}} - **Click** to start measuring - **Continue clicking** to continue measurement - **Double-click** to end measurement -{{% admonition type="note" %}} When you change measurement type or units, the previous measurement is removed from the map. If the control is closed and then re-opened, the most recent measurement is displayed. A measurement can be modified by clicking and dragging on it. -{{% /admonition %}} -#### Length +##### Length Get the spherical length of a geometry. This length is the sum of the great circle distances between coordinates. For multi-part geometries, the length is the sum of the length of each part. Geometries are assumed to be in 'EPSG:3857'. @@ -609,9 +678,9 @@ Get the spherical length of a geometry. This length is the sum of the great circ - **Miles (mi)** - **Nautical miles (nmi)** -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-measure-length-9-1-0.png" max-width="1200px" caption="Geomap panel measure length" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-measure-length-9-1-0.png" max-width="1200px" alt="Geomap panel measure length" >}} -#### Area +##### Area Get the spherical area of a geometry. This area is calculated assuming that polygon edges are segments of great circles on a sphere. Geometries are assumed to be in 'EPSG:3857'. @@ -622,38 +691,38 @@ Get the spherical area of a geometry. This area is calculated assuming that poly - **Acres (acre)** - **Hectare (ha)** -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-measure-area-9-1-0.png" max-width="1200px" caption="Geomap panel measure area" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-measure-area-9-1-0.png" max-width="1200px" alt="Geomap panel measure area" >}} -### Show debug +#### Show debug Displays debug information in the upper right corner. This can be useful for debugging or validating a data source. - **Zoom** displays current zoom level of the map. - **Center** displays the current **longitude**, **latitude** of the map center. -{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-debug-9-1-0.png" max-width="1200px" caption="Geomap panel debug" >}} +{{< figure src="/static/img/docs/geomap-panel/geomap-map-controls-debug-9-1-0.png" max-width="1200px" alt="Geomap panel debug" >}} -### Tooltip +#### Tooltip - **None** displays tooltips only when a data point is clicked. - **Details** displays tooltips when a mouse pointer hovers over a data point. -## Standard options +### Standard options {{< docs/shared lookup="visualizations/standard-options.md" source="grafana" version="" >}} -## Data links +### Data links {{< docs/shared lookup="visualizations/datalink-options.md" source="grafana" version="" >}} -## Value mappings +### Value mappings {{< docs/shared lookup="visualizations/value-mappings-options.md" source="grafana" version="" >}} -## Thresholds +### Thresholds {{< docs/shared lookup="visualizations/thresholds-options-2.md" source="grafana" version="" >}} -## Field overrides +### Field overrides {{< docs/shared lookup="visualizations/overrides-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/histogram/index.md b/docs/sources/panels-visualizations/visualizations/histogram/index.md index f72c8ea4e2c3a..6412d37d09a73 100644 --- a/docs/sources/panels-visualizations/visualizations/histogram/index.md +++ b/docs/sources/panels-visualizations/visualizations/histogram/index.md @@ -100,6 +100,10 @@ The data is converted as follows: Use the following options to refine your histogram visualization. +### Bucket count + +Specifies the number of bins used to group your data in the histogram, affecting the granularity of the displayed distribution. Leave this empty for automatic bucket count of 30. + ### Bucket size The size of the buckets. Leave this empty for automatic bucket sizing (~10% of the full range). @@ -112,6 +116,14 @@ If the first bucket should not start at zero. A non-zero offset has the effect o This will merge all series and fields into a combined histogram. +### Stacking + +Controls how multiple series are displayed in the histogram. Choose from the following: + +- **Off** - Series are not stacked, but instead shown side by side. +- **Normal** - Series are stacked on top of each other, showing cumulative values. +- **100%** - Series are stacked to fill 100% of the chart, showing the relative proportion of each series. + ### Line width Controls line width of the bars. @@ -126,17 +138,12 @@ Set the mode of the gradient fill. Fill gradient is based on the line color. To Gradient display is influenced by the **Fill opacity** setting. -#### None - -No gradient fill. This is the default setting. - -#### Opacity - -Transparency of the gradient is calculated based on the values on the Y-axis. The opacity of the fill is increasing with the values on the Y-axis. - -#### Hue +Choose from the following: -Gradient color is generated based on the hue of the line color. +- **None** - No gradient fill. This is the default setting. +- **Opacity** - Transparency of the gradient is calculated based on the values on the Y-axis. The opacity of the fill is increasing with the values on the Y-axis. +- **Hue** - Gradient color is generated based on the hue of the line color. +- **Scheme** - The selected [color palette](https://grafana.com/docs/grafana/latest/panels-visualizations/configure-standard-options/#color-scheme) is applied to the histogram bars. ## Standard options diff --git a/docs/sources/panels-visualizations/visualizations/logs/index.md b/docs/sources/panels-visualizations/visualizations/logs/index.md index b467b43d6bdff..c2d73352c8acc 100644 --- a/docs/sources/panels-visualizations/visualizations/logs/index.md +++ b/docs/sources/panels-visualizations/visualizations/logs/index.md @@ -19,7 +19,7 @@ description: Configure options for Grafana's logs visualization title: Logs weight: 100 refs: - supported-log-levels-and-mappings-of-log-level-abbreviation-and-expressions: + log-levels: - pattern: /docs/grafana/ destination: /docs/grafana//explore/logs-integration/#log-level - pattern: /docs/grafana-cloud/ @@ -28,27 +28,51 @@ refs: # Logs -The logs visualization shows log lines from data sources that support logs, such as Elastic, Influx, and Loki. Typically you would use this visualization next to a graph visualization to display the log output of a related process. +_Logs_ are structured records of events or messages generated by a system or application—that is, a series of text records with status updates from your system or app. They generally include timestamps, messages, and context information like the severity of the logged event. + +The logs visualization displays these records from data sources that support logs, such as Elastic, Influx, and Loki. The logs visualization has colored indicators of log status, as well as collapsible log events that help you analyze the information generated. {{< figure src="/static/img/docs/v64/logs-panel.png" max-width="1025px" alt="Logs panel" >}} {{< docs/play title="Logs Panel" url="https://play.grafana.org/d/6NmftOxZz/" >}} -The logs visualization shows the result of queries that were entered in the Query tab. The results of multiple queries are merged and sorted by time. You can scroll inside the panel if the data source returns more lines than can be displayed at any one time. +Typically, you use logs with a graph visualization to display the log output of a related process. If you have an incident in your application or systems, such as a website disruption or code failure, you can use the logs visualization to help you figure out what went wrong, when, and even why. -To limit the number of lines rendered, you can use the **Max data points** setting in the **Query options**. If it is not set, then the data source will usually enforce a default limit. +## Configure a log visualization The following video provides a walkthrough of creating a logs visualization. You'll also learn how to customize some settings and log visualization caveats: {{< youtube id="jSSi_x-fD_8" >}} +## Supported data formats + +The logs visualization works best with log-type datasets such as queries from data sources like Loki, Elastic, and InlfuxDB. + +You can also build log-formatted data from other data sources as long as the first field is a time type followed by string, number, and time fields. The leading time field is used to sort and timestamp the logs and if the data contains other time-type fields, they’re included as elements of the logged record. + +The second field is used as the log record title regardless of whether it’s a time, numeric, or string field. Usually the second field is a text field containing multiple string elements, but if the message level (or `lvl`) is present, the visualization uses the values in it to add colors to the record, as described in [Log levels integration](ref:log-levels). + +Subsequent fields are collapsed inside of each log record and you can open them by clicking the expand (`>`) icon. + +To limit the number of log lines rendered in the visualization, you can use the **Max data points** setting in the panel **Query options**. If that option isn't set, then the data source typically enforces its own default limit. + +### Example + +| Time | TitleMessage | Element1 | Element2 | Element3 | +| ------------------- | -------------------- | -------- | -------- | ------------------- | +| 2023-02-01 12:00:00 | title=Log1 lvl=info | 1 | server2 | 2023-02-01 11:00:00 | +| 2023-02-01 11:30:00 | title=Log1 lvl=error | 1 | server2 | 2023-02-01 11:00:00 | +| 2023-02-01 11:00:00 | title=Log1 lvl=trace | 1 | server2 | 2023-02-01 11:00:00 | + +![Logs Example](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-logs-example.png 'Logs Example') + ## Panel options {{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} ## Log level -For logs where a **level** label is specified, we use the value of the label to determine the log level and update color accordingly. If the log doesn't have a level label specified, we try to find out if its content matches any of the supported expressions (see below for more information). The log level is always determined by the first match. In case Grafana is not able to determine a log level, it will be visualized with **unknown** log level. See [supported log levels and mappings of log level abbreviation and expressions](ref:supported-log-levels-and-mappings-of-log-level-abbreviation-and-expressions). +For logs where a **level** label is specified, we use the value of the label to determine the log level and update color accordingly. If the log doesn't have a level label specified, we try to find out if its content matches any of the supported expressions (see below for more information). The log level is always determined by the first match. In case Grafana is not able to determine a log level, it will be visualized with **unknown** log level. See [supported log levels and mappings of log level abbreviation and expressions](ref:log-levels). ## Log details diff --git a/docs/sources/panels-visualizations/visualizations/node-graph/index.md b/docs/sources/panels-visualizations/visualizations/node-graph/index.md index eabc6d2e60b66..3c8355aa4f7f1 100644 --- a/docs/sources/panels-visualizations/visualizations/node-graph/index.md +++ b/docs/sources/panels-visualizations/visualizations/node-graph/index.md @@ -21,10 +21,24 @@ weight: 100 # Node graph -Node graphs can visualize directed graphs or networks. They use a directed force layout to effectively position the nodes, so they can display complex infrastructure maps, hierarchies, or execution diagrams. +Node graphs are useful when you need to visualize elements that are related to each other. This is done by displaying circles—or _nodes_—for each element you want to visualize, connected by lines—or _edges_. The visualization uses a directed force layout that positions the nodes into a network of connected circles. + +Node graphs display useful information about each node, as well as the relationships between them, allowing you to visualize complex infrastructure maps, hierarchies, or execution diagrams. ![Node graph visualization](/static/img/docs/node-graph/node-graph-8-0.png 'Node graph') +The appearance of nodes and edges can also be customized in several ways including color, borders, and line style. + +You can use a node graph visualization if you need to show: + +- Solution topologies +- Networks +- Infrastructure +- Organizational charts +- Critical path diagrams +- Family trees +- Mind maps + ## Configure a node graph visualization The following video provides beginner steps for creating node panel visualizations. You'll learn the data requirements and caveats, special customizations, and much more: @@ -33,6 +47,38 @@ The following video provides beginner steps for creating node panel visualizatio {{< docs/play title="Node graph panel" url="https://play.grafana.org/d/bdodfbi3d57uoe/" >}} +## Supported data formats + +To create node graphs, you need two datasets: one containing the records for the displayed elements (nodes) and one dataset containing the records for the connections between those elements (edges). + +### Nodes dataset + +The nodes dataset must contain one alphanumeric ID field that gives each element a unique identifier. The visualization also accepts other options fields for titles, subtitles, main and secondary stats, arc information for how much of the circle border to paint, details, colors, icons, node size, and indicators for element highlighting. For more information and naming conventions for these fields, refer to the [Nodes data frame structure](#nodes-data-frame-structure) section. + +#### Example + +| id | title | subtitle | mainstat | secondarystat | color | icon | highlighted | +| ----- | ----- | -------- | -------- | ------------- | ----- | ---- | ----------- | +| node1 | PC | Windows | AMD | 16gbRAM | blue | | true | +| node2 | PC | Linux | Intel | 32gbRAM | green | eye | false | +| node3 | Mac | MacOS | M3 | 16gbRAM | gray | apps | false | +| node4 | Alone | SoLonely | JustHere | NotConnected | red | | false | + +If the icon field contains a value, it’s displayed instead of the title and subtitle. For a list of of available icons, refer to [Icons Overview](https://developers.grafana.com/ui/latest/index.html?path=/story/docs-overview-icon--icons-overview). + +### Edges dataset + +Similar to the nodes dataset, the edges dataset needs one unique ID field for each relationship, followed by two fields containing the source and the target nodes of the edge; that is, the nodes the edge connects. Other optional fields are main and secondary stats, context menu elements, line thickness, highlight indications, line colors, and configurations to turn the connection into a dashed line. For more information and naming conventions for these fields, refer to the [Edges data frame structure](#edges-data-frame-structure) section. + +#### Example + +| id | source | target | mainstat | seconddarystat | thickness | highlighted | color | +| ----- | ------ | ------ | -------- | -------------- | --------- | ----------- | ------ | +| edge1 | node1 | node2 | TheMain | TheSub | 3 | true | cyan | +| edge2 | node3 | node2 | Main2 | Sub2 | 1 | false | orange | + +If a node lacks edge connections, it’s displayed on its own outside of the network. + ## Panel options {{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/pie-chart/index.md b/docs/sources/panels-visualizations/visualizations/pie-chart/index.md index 01f49e3de1aca..9f56def9011f0 100644 --- a/docs/sources/panels-visualizations/visualizations/pie-chart/index.md +++ b/docs/sources/panels-visualizations/visualizations/pie-chart/index.md @@ -23,9 +23,18 @@ refs: # Pie chart -{{< figure src="/static/img/docs/pie-chart-panel/pie-chart-example.png" max-width="1200px" lightbox="true" caption="Pie charts" >}} +A pie chart is a graph that displays data as segments of a circle proportional to the whole, making it look like a sliced pie. Each slice corresponds to a value or measurement. -Pie charts display reduced series, or values in a series, from one or more queries, as they relate to each other, in the form of slices of a pie. The arc length, area and central angle of a slice are all proportional to the slices value, as it relates to the sum of all values. This type of chart is best used when you want a quick comparison of a small set of values in an aesthetically pleasing form. +{{< figure src="/static/img/docs/pie-chart-panel/pie-chart-example.png" max-width="1200px" lightbox="true" alt="Pie charts" >}} + +The pie chart visualization is ideal when you have data that adds up to a total and you want to show the proportion of each value compared to other slices, as well as to the whole of the pie. + +You can use a pie chart if you need to compare: + +- Browser share distribution in the market +- Incident causes per category +- Network traffic sources +- User demographics ## Configure a pie chart visualization @@ -35,6 +44,60 @@ The following video guides you through the creation steps and common customizati {{< docs/play title="Grafana Bar Charts and Pie Charts" url="https://play.grafana.org/d/ktMs4D6Mk/" >}} +## Supported data formats + +The pie chart is different from other visualizations in that it will only display one pie, regardless of the number of datasets, fields, or records queried in it. + +To create a pie chart visualization, you need a dataset containing a set of numeric values either in rows, columns, or both. + +### Example - One row + +The easiest way to provide data for a pie chart visualization is in a dataset with a single record (or row) containing the fields (or columns) that you want in the pie, as in the following example. The default settings of the pie chart visualization automatically display each column as a slice of the pie. + +| Value1 | Value2 | Value3 | Optional | +| ------ | ------ | ------ | -------- | +| 5 | 3 | 2 | Sums10 | + +![Pie chart visualization with multiple values in a single row](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-pie-example1.png) + +### Example - Multiple rows + +If you need to use numeric data that's in multiple rows, the default **Show** parameter of the visualization [Value options](#value-options) is set to **Calculate** and use data from the last row. + +| Value | Label | +| ----- | ------ | +| 5 | Value1 | +| 3 | Value2 | +| 2 | Value3 | + +![Pie chart visualization with multiple row values showing the last one](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-pie-example2.png) + +By default, the visualization is configured to [calculate](#value-options) a single value per column or series and to display only the last row of data. + +To allow values in multiple rows to be displayed, change the **Show** setting in the [Value options](#value-options) from **Calculate** to **All values**. + +![Pie chart visualization with multiple row values showing all values](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-pie-example3.png) + +### Example - Multiple rows and columns + +If your dataset contains multiple rows and columns with numeric data, by default only the last row's values are summed. + +| Value1 | Value2 | Value3 | Optional | +| ------ | ------ | ------ | -------- | +| 5 | 3 | 2 | Sums10 | +| 10 | 6 | 4 | Sums20 | +| 20 | 8 | 2 | Sums30 | + +![Pie chart visualization with multiple rows and columns showing the last one](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-pie-example4.png) + +If you want to display all the cells, change the **Show** setting in the [Value options](#value-options) from **Calculate** to **All values**. This also labels the elements by concatenating all the text fields (if you have any) with the column name. + +![Pie chart visualization with multiple rows and columns showing the all values](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-pie-example5.png) + +If you want to display only the values from a given field (or column), once the **Show** setting in the [Value options](#value-options) is set to **All values**, set the **Fields** option to the column you wish to sum in the display. The value labels are also concatenated as indicated before. + +![Pie chart visualization with multiple rows and columns showing values from one column](/media/docs/grafana/panels-visualizations/screenshot-grafana-12.1-pie-example6.png) + ## Panel options {{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} @@ -96,10 +159,6 @@ The following example shows a pie chart with **Name** and **Percent** labels dis ![Pie chart labels](/static/img/docs/pie-chart-panel/pie-chart-labels-7-5.png) -## Standard options - -{{< docs/shared lookup="visualizations/standard-options.md" source="grafana" version="" >}} - ## Tooltip options {{< docs/shared lookup="visualizations/tooltip-options-1.md" source="grafana" version="" >}} @@ -137,6 +196,14 @@ Select values to display in the legend. You can select more than one. - **Percent:** The percentage of the whole. - **Value:** The raw numerical value. +## Standard options + +{{< docs/shared lookup="visualizations/standard-options.md" source="grafana" version="" >}} + +## Data links + +{{< docs/shared lookup="visualizations/datalink-options.md" source="grafana" version="" >}} + ## Value mappings {{< docs/shared lookup="visualizations/value-mappings-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/status-history/index.md b/docs/sources/panels-visualizations/visualizations/status-history/index.md index 67326723ea591..b7825b94b828a 100644 --- a/docs/sources/panels-visualizations/visualizations/status-history/index.md +++ b/docs/sources/panels-visualizations/visualizations/status-history/index.md @@ -107,6 +107,10 @@ Use these options to refine the visualization. Controls whether values are rendered inside the value boxes. Auto will render values if there is sufficient space. +### Row height + +Controls the height of boxes. 1 = maximum space and 0 = minimum space. + ### Column width Controls the width of boxes. 1 = maximum space and 0 = minimum space. @@ -119,10 +123,6 @@ Controls line width of state regions. Controls the opacity of state regions. -## Standard options - -{{< docs/shared lookup="visualizations/standard-options.md" source="grafana" version="" >}} - ## Legend options {{< docs/shared lookup="visualizations/legend-options-2.md" source="grafana" version="" >}} @@ -131,6 +131,10 @@ Controls the opacity of state regions. {{< docs/shared lookup="visualizations/tooltip-options-1.md" source="grafana" version="" >}} +## Standard options + +{{< docs/shared lookup="visualizations/standard-options.md" source="grafana" version="" >}} + ## Data links {{< docs/shared lookup="visualizations/datalink-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/table/index.md b/docs/sources/panels-visualizations/visualizations/table/index.md index 6d203faabf935..e1b6ecdaaaf1b 100644 --- a/docs/sources/panels-visualizations/visualizations/table/index.md +++ b/docs/sources/panels-visualizations/visualizations/table/index.md @@ -54,13 +54,36 @@ refs: destination: /docs/grafana//panels-visualizations/configure-overrides/ - pattern: /docs/grafana-cloud/ destination: /docs/grafana-cloud/visualizations/panels-visualizations/configure-overrides/ + data-transformation: + - pattern: /docs/grafana/ + destination: /docs/grafana//panels-visualizations/query-transform-data/transform-data/ + - pattern: /docs/grafana-cloud/ + destination: /docs/grafana-cloud/visualizations/panels-visualizations/query-transform-data/transform-data/ + build-query: + - pattern: /docs/grafana/ + destination: /docs/grafana//panels-visualizations/query-transform-data/ + - pattern: /docs/grafana-cloud/ + destination: /docs/grafana-cloud/visualizations/panels-visualizations/query-transform-data/ --- # Table Tables are a highly flexible visualization designed to display data in columns and rows. They support various data types, including tables, time series, annotations, and raw JSON data. The table visualization can even take multiple data sets and provide the option to switch between them. With this versatility, it's the preferred visualization for viewing multiple data types, aiding in your data analysis needs. -{{< figure src="/static/img/docs/tables/table_visualization.png" max-width="1200px" lightbox="true" caption="Table visualization" >}} +{{< figure src="/static/img/docs/tables/table_visualization.png" max-width="1200px" lightbox="true" alt="Table visualization" >}} + +You can use a table visualization to show datasets such as: + +- Common database queries like logs, traces, metrics +- Financial reports +- Customer lists +- Product catalogs + +Any information you might want to put in a spreadsheet can often be best visualized in a table. + +Tables also provide different styles to visualize data inside the table cells such as colored text and cell backgrounds, gauges, sparklines, data links, JSON code, and images. + +## Configure a table visualization The following video provides a visual walkthrough of the options you can set in a table visualization. If you want to see a configuration in action, check out the video: @@ -72,243 +95,265 @@ The following video provides a visual walkthrough of the options you can set in Annotations and alerts are not currently supported for tables. {{< /admonition >}} -## Sort column - -Click a column title to change the sort order from default to descending to ascending. Each time you click, the sort order changes to the next option in the cycle. You can sort multiple columns by holding the `shift` key and clicking the column name. - -![Sort descending](/static/img/docs/tables/sort-descending.png 'Sort descending') +## Supported data formats -## Data set selector +The table visualization supports any data that has a column-row structure. -If the data queried contains multiple data sets, a table displays a drop-down list at the bottom, so you can select the data set you want to visualize. +### Example -![Table visualization with multiple data sets](/media/docs/grafana/panels-visualizations/TablePanelMultiSet.png) +``` +Column1, Column2, Column3 +value1 , value2 , value3 +value4 , value5 , value6 +value7 , value8 , value9 +``` -## Panel options +If a cell is missing or the table cell-row structure is not complete, the table visualization won’t display any of the data: -{{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} - -## Table options +``` +Column1, Column2, Column3 +value1 , value2 , value3 +gap1 , gap2 +value4 , value5 , value6 +``` -{{% admonition type="note" %}} -If you are using a table created before Grafana 7.0, then you need to migrate to the new table version in order to see these options. To migrate, on the Panel tab, click **Table** visualization. Grafana updates the table version and you can then access all table options. -{{% /admonition %}} +If you need to hide columns, you can do so using [data transformations](ref:data-transformation), [field overrides](#field-overrides), or by [building a query](ref:build-query) that returns only the needed columns. -### Show header +If you’re using a cell type such as sparkline or JSON, the data requirements may differ in a way that’s specific to that type. For more info refer to [Cell type](#cell-type). -Show or hide column names imported from your data source. +## Debugging in tables -### Column width +The table visualization helps with debugging when you need to know exactly what results your query is returning and why other visualizations might not be working. This functionality is also accessible in most visualizations by toggling on the **Table view** switch at the top of the panel: -By default, Grafana automatically calculates the column width based on the table size and the minimum column width. This field option can override the setting and define the width for all columns in pixels. +![The Table view switch](/media/docs/grafana/panels-visualizations/screenshot-table-view-on-11.2.png) -For example, if you enter `100` in the field, then when you click outside the field, all the columns will be set to 100 pixels wide. +## Turn on column filtering -### Minimum column width +1. In Grafana, navigate to the dashboard with the table with the columns that you want to filter. +1. On the table panel you want to filter, open the panel editor. +1. Expand the the **Table** options section. +1. Toggle on the [**Column filter** switch](#table-options). -By default, the minimum width of the table column is 150 pixels. This field option can override that default and will define the new minimum column width for the table in pixels. +A filter icon appears next to each column title. -For example, if you enter `75` in the field, then when you click outside the field, all the columns will scale to no smaller than 75 pixels wide. +{{< figure src="/static/img/docs/tables/column-filter-with-icon.png" max-width="350px" alt="Column filtering turned on" class="docs-image--no-shadow" >}} -For small-screen devices, such as smartphones or tablets, reduce the default `150` pixel value to`50` to allow table-based panels to render correctly in dashboards. +### Filter column values -### Column alignment +To filter column values, click the filter (funnel) icon next to a column title. Grafana displays the filter options for that column. -Choose how Grafana should align cell contents: +{{< figure src="/static/img/docs/tables/filter-column-values.png" max-width="300px" alt="Filter column values" class="docs-image--no-shadow" >}} -- Auto (default) -- Left -- Center -- Right +Click the check box next to the values that you want to display. Enter text in the search field at the top to show those values in the display so that you can select them rather than scroll to find them. -### Column filter +Choose from several operators to display column values: -You can temporarily change how column data is displayed. For example, you can order values from highest to lowest or hide specific values. For more information, refer to [Filter table columns](#filter-table-columns). +- **Contains** - Matches a regex pattern (operator by default). +- **Expression** - Evaluates a boolean expression. The character `$` represents the column value in the expression (for example, "$ >= 10 && $ <= 12"). +- The typical comparison operators: `=`, `!=`, `<`, `<=`, `>`, `>=`. -### Pagination +Click the check box above the **Ok** and **Cancel** buttons to add or remove all displayed values to/from the filter. -Use this option to enable or disable pagination. It is a front-end option that does not affect queries. When enabled, the page size automatically adjusts to the height of the table. +### Clear column filters -## Cell options +Columns with filters applied have a blue funnel displayed next to the title. -### Cell type +{{< figure src="/static/img/docs/tables/filtered-column.png" max-width="100px" alt="Filtered column" class="docs-image--no-shadow" >}} -By default, Grafana automatically chooses display settings. You can override the settings by choosing one of the following options to set the default for all fields. Additional configuration is available for some cell types. +To remove the filter, click the blue funnel icon and then click **Clear filter**. -{{% admonition type="note" %}} -If you set these in the Field tab, then the type will apply to all fields, including the time field. Many options will work best if you set them in the Override tab so that they can be restricted to one or more fields. -{{% /admonition %}} +## Sort columns -#### Auto +Click a column title to change the sort order from default to descending to ascending. Each time you click, the sort order changes to the next option in the cycle. You can sort multiple columns by holding the `shift` key and clicking the column name. -The **Auto** cell type automatically displays values, with sensible defaults applied. +{{< figure src="/static/img/docs/tables/sort-descending.png" max-width="350px" alt="Sort descending" class="docs-image--no-shadow" >}} -#### Color text +## Dataset selector -If thresholds are set, then the field text is displayed in the appropriate threshold color. +If the data queried contains multiple datasets, a table displays a drop-down list at the bottom, so you can select the dataset you want to visualize. -{{< figure src="/static/img/docs/tables/color-text.png" max-width="500px" caption="Color text" class="docs-image--no-shadow" >}} +{{< figure src="/media/docs/grafana/panels-visualizations/TablePanelMultiSet.png" max-width="650px" alt="Table visualization with multiple datasets" class="docs-image--no-shadow" >}} -#### Color background (gradient or solid) +## Configuration options -If thresholds are set, then the field background is displayed in the appropriate threshold color. +### Panel options -{{< figure src="/static/img/docs/tables/color-background.png" max-width="500px" caption="Color background" class="docs-image--no-shadow" >}} +{{< docs/shared lookup="visualizations/panel-options.md" source="grafana" version="" >}} -Toggle the **Apply to entire row** switch, to apply the background color that's configured for the cell to the whole row. +### Table options -{{< figure src="/static/img/docs/tables/colored-rows.png" max-width="500px" alt="Colored row background" class="docs-image--no-shadow" >}} +{{% admonition type="note" %}} +If you are using a table created before Grafana 7.0, then you need to migrate to the new table version in order to see these options. To migrate, on the Panel tab, click **Table** visualization. Grafana updates the table version and you can then access all table options. +{{% /admonition %}} -#### Gauge +| Option | Description | +| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Show table header | Show or hide column names imported from your data source. | +| Cell height | Set the height of the cell. Choose from **Small**, **Medium**, and **Large**. | +| Enable pagination | Toggle the switch to control how many table rows are visible at once. When switched on, the page size automatically adjusts to the height of the table. This option doesn't affect queries. | +| Minimum column width | Define the lower limit of the column width, in pixels. By default, the minimum width of the table column is 150 pixels. For small-screen devices, such as smartphones or tablets, reduce the default `150` pixel value to `50` to allow table-based panels to render correctly in dashboards. | +| Column width | Define a column width, in pixels, rather than allowing the width to be set automatically. By default, Grafana calculates the column width based on the table size and the minimum column width. | +| Column alignment | Set how Grafana should align cell contents. Choose from: **Auto** (default), **Left**, **Center**, and **Right**. | +| Column filter | Temporarily change how column data is displayed. For example, you can order values from highest to lowest or hide specific values. For more information, refer to [Filter table columns](#filter-table-columns). | -Cells can be displayed as a graphical gauge, with several different presentation types. +### Table footer options -{{< admonition type="note" >}} -The maximum and minimum values of the gauges are configured automatically from the smallest and largest values in your whole data set. If you don't want the max/min values to be pulled from the whole data set, you can configure them for each column with field overrides. -{{< /admonition >}} +Toggle the **Show table footer** switch on and off to control the display of the footer. When the toggle is switched on, you can use the table footer to show [calculations](ref:calculations) on fields. -##### Basic +After you activate the table footer, make selections in the following options: -The basic mode will show a simple gauge with the threshold levels defining the color of gauge. +- **Calculation** - The calculation that you want to apply. +- **Fields** - The fields to which you want to apply the calculations. The system applies the calculation to all numeric fields if you do not select a field. +- **Count rows** - This options is displayed if you select the **Count** calculation. If you want to show the number of rows in the dataset instead of the number of values in the selected fields, toggle on the **Count rows** switch. -{{< figure src="/static/img/docs/tables/basic-gauge.png" max-width="500px" caption="Gradient gauge" class="docs-image--no-shadow" >}} +### Cell options -##### Gradient +Cell options allow you to control how data is displayed in a table. -The threshold levels define a gradient. +#### Cell type -{{< figure src="/static/img/docs/tables/gradient-gauge.png" max-width="500px" caption="Gradient gauge" class="docs-image--no-shadow" >}} +By default, Grafana automatically chooses display settings. You can override the settings by choosing one of the following options to set the default for all fields. Additional configuration is available for some cell types. -##### LCD +{{% admonition type="note" %}} +If you set these in the Field tab, then the type will apply to all fields, including the time field. Many options will work best if you set them in the Override tab so that they can be restricted to one or more fields. +{{% /admonition %}} -The gauge is split up in small cells that are lit or unlit. +| Cell type | Description | +| ----------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Auto | The **Auto** cell type automatically displays values, with sensible defaults applied. | +| [Sparkline](#sparkline) | Shows values rendered as a sparkline. | +| [Colored text](#colored-text) | If thresholds are set, then the field text is displayed in the appropriate threshold color. | +| [Colored background](#colored-background) | If thresholds are set, then the field background is displayed in the appropriate threshold color. | +| [Gauge](#gauge) | Cells can be displayed as a graphical gauge, with several different presentation types. You can set the [Gauge display mode](#gauge-display-mode) and the [Value display](#value-display) options. | +| Data links | If you've configured data links, when the cell type is **Auto** mode, the cell text becomes clickable. If you change the cell type to **Data links**, the cell text reflects the titles of the configured data links. To control the application of data link text more granularly use a **Cell option > Cell type > Data links** field override. | +| [JSON View](#json-view) | Shows value formatted as code. | +| [Image](#image) | If you have a field value that is an image URL or a base64 encoded image you can configure the table to display it as an image. | -{{< figure src="/static/img/docs/tables/lcd-gauge.png" max-width="500px" caption="LCD gauge" class="docs-image--no-shadow" >}} +##### Sparkline -##### Label Options +Shows values rendered as a sparkline. You can show sparklines using the [Time series to table transformation](ref:time-series-to-table-transformation) on data with multiple time series to process it into a format the table can show. -Additionally, labels displayed alongside of the gauges can be set to be colored by value, match the theme text color, or be hidden. +{{< figure src="/static/img/docs/tables/sparkline2.png" max-width="500px" alt="Sparkline" class="docs-image--no-shadow" >}} -**Value Color** +You can customize sparklines with many of the same options as the [time series visualization](ref:time-series-panel) including line style and width, fill opacity, gradient mode, and more. You can also change the color of the sparkline by updating the [color scheme](ref:color-scheme) in the **Standard options** section of the panel configuration. -{{< figure src="/static/img/docs/tables/value-color-mode.png" max-width="500px" caption="Color Label by Value" class="docs-image--no-shadow" >}} +##### Colored text -**Text Color** +If thresholds are set, then the field text is displayed in the appropriate threshold color. -{{< figure src="/static/img/docs/tables/text-color-mode.png" max-width="500px" caption="Color Label by theme color" class="docs-image--no-shadow" >}} +{{< figure src="/static/img/docs/tables/color-text.png" max-width="500px" alt="Color text" class="docs-image--no-shadow" >}} -**Hidden** +{{< admonition type="note" >}} +This is an experimental feature. +{{< /admonition >}} -{{< figure src="/static/img/docs/tables/hidden-mode.png" max-width="500px" caption="Hide Label" class="docs-image--no-shadow" >}} +##### Colored background -#### Data links +If thresholds are set, then the field background is displayed in the appropriate threshold color. -If you've configured data links, when the cell type is **Auto** mode, the cell text becomes clickable. If you change the cell type to **Data links**, the cell text reflects the titles of the configured data links. To control the application of data link text more granularly use a **Cell option > Cell type > Data links** field override. +{{< figure src="/static/img/docs/tables/color-background.png" max-width="500px" alt="Color background" class="docs-image--no-shadow" >}} -#### JSON view +Choose between **Basic** and **Gradient** to set the **Background display mode**. -Shows value formatted as code. If a value is an object the JSON view allowing browsing the JSON object will appear on hover. +Toggle the **Apply to entire row** switch, to apply the background color that's configured for the cell to the whole row. -{{< figure src="/static/img/docs/tables/json-view.png" max-width="500px" caption="JSON view" class="docs-image--no-shadow" >}} +{{< figure src="/static/img/docs/tables/colored-rows.png" max-width="500px" alt="Colored row background" class="docs-image--no-shadow" >}} -#### Image +##### Gauge -> Only available in Grafana 7.3+ +Cells can be displayed as a graphical gauge, with several different presentation types controlled by the gauge display mode and the value display. -If you have a field value that is an image URL or a base64 encoded image you can configure the table to display it as an image. +{{< admonition type="note" >}} +The maximum and minimum values of the gauges are configured automatically from the smallest and largest values in your whole data set. If you don't want the max/min values to be pulled from the whole data set, you can configure them for each column with field overrides. +{{< /admonition >}} -{{< figure src="/static/img/docs/v73/table_hover.gif" max-width="900px" caption="Table hover" >}} +###### Gauge display mode -#### Sparkline +You can set three gauge display modes. -Shows values rendered as a sparkline. You can show sparklines using the [Time series to table transformation](ref:time-series-to-table-transformation) on data with multiple time series to process it into a format the table can show. +- **Basic** - Shows a simple gauge with the threshold levels defining the color of gauge. -{{< figure src="/static/img/docs/tables/sparkline2.png" max-width="500px" caption="Sparkline" class="docs-image--no-shadow" >}} + {{< figure src="/static/img/docs/tables/basic-gauge.png" max-width="500px" alt="Gradient gauge" class="docs-image--no-shadow" >}} -You can be customize sparklines with many of the same options as the [Time series panel](ref:time-series-panel) including line width, fill opacity, and more. You can also change the color of the sparkline by updating the [color scheme](ref:color-scheme) in the _Standard options_ section of the panel configuration. +- **Gradient** - The threshold levels define a gradient. -### Wrap text + {{< figure src="/static/img/docs/tables/gradient-gauge.png" max-width="500px" alt="Gradient gauge" class="docs-image--no-shadow" >}} -{{< admonition type="note" >}} -Text wrapping is in [public preview](https://grafana.com/docs/release-life-cycle/#public-preview), however, it’s available to use by default. We’d love hear from you about how this new feature is working. To provide feedback, you can open an issue in the [Grafana GitHub repository](https://github.com/grafana/grafana). -{{< /admonition >}} +- **Retro LCD** - The gauge is split up in small cells that are lit or unlit. -Toggle the **Wrap text** switch to wrap text in the cell with the longest content in your table. To wrap the text in a specific column only, use the Wrap Text option in a [field override](ref:field-override). + {{< figure src="/static/img/docs/tables/lcd-gauge.png" max-width="500px" alt="LCD gauge" class="docs-image--no-shadow" >}} -### Cell value inspect +###### Value display -Enables value inspection from table cell. The raw value is presented in a modal window. +Labels displayed alongside of the gauges can be set to be colored by value, match the theme text color, or be hidden. -{{% admonition type="note" %}} -Cell value inspection is only available when cell display mode is set to Auto, Color text, Color background or JSON View. -{{% /admonition %}} +- **Value color** -## Turn on column filtering + {{< figure src="/static/img/docs/tables/value-color-mode.png" max-width="500px" alt="Color Label by Value" class="docs-image--no-shadow" >}} -1. In Grafana, navigate to the dashboard with the table with the columns that you want to filter. -1. On the table panel you want to filter, open the panel editor. -1. Click the **Field** tab. -1. In Table options, turn on the **Column filter** option. +- **Text color** -A filter icon appears next to each column title. + {{< figure src="/static/img/docs/tables/text-color-mode.png" max-width="500px" alt="Color Label by theme color" class="docs-image--no-shadow" >}} -{{< figure src="/static/img/docs/tables/column-filter-with-icon.png" max-width="500px" caption="Column filtering turned on" class="docs-image--no-shadow" >}} +- **Hidden** -### Filter column values + {{< figure src="/static/img/docs/tables/hidden-mode.png" max-width="500px" alt="Hide Label" class="docs-image--no-shadow" >}} -To filter column values, click the filter (funnel) icon next to a column title. Grafana displays the filter options for that column. +##### JSON View -{{< figure src="/static/img/docs/tables/filter-column-values.png" max-width="500px" caption="Filter column values" class="docs-image--no-shadow" >}} +Shows value formatted as code. If a value is an object the JSON view allowing browsing the JSON object will appear on hover. -Click the check box next to the values that you want to display. Enter text in the search field at the top to show those values in the display so that you can select them rather than scroll to find them. +{{< figure src="/static/img/docs/tables/json-view.png" max-width="350px" alt="JSON view" class="docs-image--no-shadow" >}} -Choose from several operators to display column values: +##### Image -- **Contains** - Matches a regex pattern (operator by default). -- **Expression** - Evaluates a boolean expression. The character `$` represents the column value in the expression (for example, "$ >= 10 && $ <= 12"). -- The typical comparison operators: `=`, `!=`, `<`, `<=`, `>`, `>=`. +{{< admonition type="note" >}} +Only available in Grafana 7.3+ +{{< /admonition >}} -Click the check box above the **Ok** and **Cancel** buttons to add or remove all displayed values to/from the filter. +If you have a field value that is an image URL or a base64 encoded image you can configure the table to display it as an image. -### Clear column filters +{{< figure src="/static/img/docs/v73/table_hover.gif" max-width="900px" alt="Table hover" >}} -Columns with filters applied have a blue funnel displayed next to the title. +- **Alt text** - Set the alternative text of an image. The text will be available for screen readers and in cases when images can't be loaded. +- **Title text** - Set the text that's displayed when the image is hovered over with a cursor. -{{< figure src="/static/img/docs/tables/filtered-column.png" max-width="500px" caption="Filtered column" class="docs-image--no-shadow" >}} +#### Wrap text -To remove the filter, click the blue funnel icon and then click **Clear filter**. +{{< admonition type="note" >}} +Text wrapping is in [public preview](https://grafana.com/docs/release-life-cycle/#public-preview), however, it’s available to use by default. We’d love hear from you about how this new feature is working. To provide feedback, you can open an issue in the [Grafana GitHub repository](https://github.com/grafana/grafana). +{{< /admonition >}} -## Table footer +Toggle the **Wrap text** switch to wrap text in the cell with the longest content in your table. To wrap the text in a specific column only, use the Wrap Text option in a [field override](ref:field-override). -You can use the table footer to show [calculations](ref:calculations) on fields. +This option isn't available when you set the cell type to **Gauge** or Data links,JSON View, Image. -After you enable the table footer: +#### Cell value inspect -1. Select the **Calculation** -2. Select the **Fields** that you want to calculate +Enables value inspection from table cells. When the **Cell inspect value** switch is toggled on, clicking the inspect icon in a cell opens the **Inspect value** drawer. -The system applies the calculation to all numeric fields if you do not select a field. +The **Inspect value** drawer has two tabs, **Plain text** and **Code editor**. Grafana attempts to automatically detect the type of data in the cell and opens the drawer with the associated tab showing. However, you can switch back and forth between tabs. -### Count rows +Cell value inspection is only available when the **Cell type** selection is **Auto**, **Colored text**, **Colored background**, or **JSON View**. -If you want to show the number of rows in the dataset instead of the number of values in the selected fields, select the **Count** calculation and enable **Count rows**. +This option isn't available when you set the cell type to **Gauge** or Data links, Image, . -## Standard options +### Standard options {{< docs/shared lookup="visualizations/standard-options.md" source="grafana" version="" >}} -## Data links +### Data links {{< docs/shared lookup="visualizations/datalink-options.md" source="grafana" version="" >}} -## Value mappings +### Value mappings {{< docs/shared lookup="visualizations/value-mappings-options.md" source="grafana" version="" >}} -## Thresholds +### Thresholds {{< docs/shared lookup="visualizations/thresholds-options-2.md" source="grafana" version="" >}} -## Field overrides +### Field overrides {{< docs/shared lookup="visualizations/overrides-options.md" source="grafana" version="" >}} diff --git a/docs/sources/panels-visualizations/visualizations/time-series/index.md b/docs/sources/panels-visualizations/visualizations/time-series/index.md index 5a1907da8d2a1..e7f21000554d4 100644 --- a/docs/sources/panels-visualizations/visualizations/time-series/index.md +++ b/docs/sources/panels-visualizations/visualizations/time-series/index.md @@ -64,11 +64,16 @@ refs: destination: /docs/grafana//panels-visualizations/panel-editor-overview/#data-section - pattern: /docs/grafana-cloud/ destination: /docs/grafana-cloud/visualizations/panels-visualizations/panel-editor-overview/#data-section + data-transformation: + - pattern: /docs/grafana/ + destination: /docs/grafana//panels-visualizations/panel-editor-overview/#data-section + - pattern: /docs/grafana-cloud/ + destination: /docs/grafana-cloud/visualizations/panels-visualizations/panel-editor-overview/#data-section --- # Time series -Time series visualizations are the default way to visualize data points over intervals of time, as a graph. They can render series as lines, points, or bars and are versatile enough to display almost any time-series data. +Time series visualizations are the default way to show the variations of a set of data values over time. Each data point is matched to a timestamp and this _time series_ is displayed as a graph. The visualization can render series as lines, points, or bars and it's versatile enough to display almost any type of [time-series data](https://grafana.com/docs/grafana//fundamentals/timeseries/). {{< figure src="/static/img/docs/time-series-panel/time_series_small_example.png" max-width="1200px" alt="Time series" >}} @@ -76,6 +81,14 @@ Time series visualizations are the default way to visualize data points over int You can migrate from the legacy Graph visualization to the time series visualization. To migrate, open the panel and click the **Migrate** button in the side pane. {{< /admonition >}} +A time series visualization displays an x-y graph with time progression on the x-axis and the magnitude of the values on the y-axis. This visualization is ideal for displaying large numbers of timed data points that would be hard to track in a table or list. + +You can use the time series visualization if you need track: + +- Temperature variations throughout the day +- The daily progress of your retirement account +- The distance you jog each day over the course of a year + ## Configure a time series visualization The following video guides you through the creation steps and common customizations of time series visualizations, and is great for beginners: @@ -86,7 +99,72 @@ The following video guides you through the creation steps and common customizati ## Supported data formats -Time series visualizations require time-series data—a sequence of measurements, ordered in time, and formatted as a table—where every row in the table represents one individual measurement at a specific time. Learn more about [time-series data](https://grafana.com/docs/grafana//fundamentals/timeseries/). +Time series visualizations require time-series data—a sequence of measurements, ordered in time, and formatted as a table—where every row in the table represents one individual measurement at a specific time. Learn more about [time-series data](https://grafana.com/docs/grafana//fundamentals/timeseries/). + +The dataset must contain at least one numeric field, and in the case of multiple numeric fields, each one is plotted as a new line, point, or bar labeled with the field name in the tooltip. + +### Example 1 + +In the following example, there are three numeric fields represented by three lines in the chart: + +| Time | value1 | value2 | value3 | +| ------------------- | ------ | ------ | ------ | +| 2022-11-01 10:00:00 | 1 | 2 | 3 | +| 2022-11-01 11:00:00 | 4 | 5 | 6 | +| 2022-11-01 12:00:00 | 7 | 8 | 9 | +| 2022-11-01 13:00:00 | 4 | 5 | 6 | + +![Time series line chart with multiple numeric fields](/media/docs/grafana/panels-visualizations/screenshot-grafana-11.1-timeseries-example1v2.png 'Time series line chart with multiple numeric fields') + +If the time field isn't automatically detected, you might need to convert the data to a time format using a [data transformation](ref:data-transformation). + +### Example 2 + +The time series visualization also supports multiple datasets. If all datasets are in the correct format, the visualization plots the numeric fields of all datasets and labels them using the column name of the field. + +#### Query1 + +| Time | value1 | value2 | value3 | +| ------------------- | ------ | ------ | ------ | +| 2022-11-01 10:00:00 | 1 | 2 | 3 | +| 2022-11-01 11:00:00 | 4 | 5 | 6 | +| 2022-11-01 12:00:00 | 7 | 8 | 9 | + +#### Query2 + +| timestamp | number1 | number2 | number3 | +| ------------------- | ------- | ------- | ------- | +| 2022-11-01 10:30:00 | 11 | 12 | 13 | +| 2022-11-01 11:30:00 | 14 | 15 | 16 | +| 2022-11-01 12:30:00 | 17 | 18 | 19 | +| 2022-11-01 13:30:00 | 14 | 15 | 16 | + +![Time series line chart with two datasets](/media/docs/grafana/panels-visualizations/screenshot-grafana-11.1-timeseries-example2v2.png 'Time series line chart with two datasets') + +### Example 3 + +If you want to more easily compare events between different, but overlapping, time frames, you can do this by using a time offset while querying the compared dataset: + +#### Query1 + +| Time | value1 | value2 | value3 | +| ------------------- | ------ | ------ | ------ | +| 2022-11-01 10:00:00 | 1 | 2 | 3 | +| 2022-11-01 11:00:00 | 4 | 5 | 6 | +| 2022-11-01 12:00:00 | 7 | 8 | 9 | + +#### Query2 + +| timestamp(-30min) | number1 | number2 | number3 | +| ------------------- | ------- | ------- | ------- | +| 2022-11-01 10:30:00 | 11 | 12 | 13 | +| 2022-11-01 11:30:00 | 14 | 15 | 16 | +| 2022-11-01 12:30:00 | 17 | 18 | 19 | +| 2022-11-01 13:30:00 | 14 | 15 | 16 | + +![Time Series Example with second Data Set offset](/media/docs/grafana/panels-visualizations/screenshot-grafana-11.1-timeseries-example3v2.png 'Time Series Example with second Data Set offset') + +When you add the offset, the resulting visualization makes the datasets appear to be occurring at the same time so that you can compare them more easily. ## Alert rules diff --git a/docs/sources/setup-grafana/configure-grafana/_index.md b/docs/sources/setup-grafana/configure-grafana/_index.md index dae820f5ec6ce..88b3755746179 100644 --- a/docs/sources/setup-grafana/configure-grafana/_index.md +++ b/docs/sources/setup-grafana/configure-grafana/_index.md @@ -39,7 +39,7 @@ On Windows, the `sample.ini` file is located in the same directory as `defaults. ### macOS -By default, the configuration file is located at `/usr/local/etc/grafana/grafana.ini`. For a Grafana instance installed using Homebrew, edit the `grafana.ini` file directly. Otherwise, add a configuration file named `custom.ini` to the `conf` folder to override the settings defined in `conf/defaults.ini`. +By default, the configuration file is located at `/opt/homebrew/etc/grafana/grafana.ini` or `/usr/local/etc/grafana/grafana.ini`. For a Grafana instance installed using Homebrew, edit the `grafana.ini` file directly. Otherwise, add a configuration file named `custom.ini` to the `conf` folder to override the settings defined in `conf/defaults.ini`. ## Remove comments in the .ini files @@ -1325,7 +1325,7 @@ Either "OpportunisticStartTLS", "MandatoryStartTLS", "NoStartTLS". Default is `e ### enable_tracing -Enable trace propagation in e-mail headers, using the `traceparent`, `tracestate` and (optionally) `baggage` fields. Default is `false`. To enable, you must first configure tracing in one of the `tracing.oentelemetry.*` sections. +Enable trace propagation in e-mail headers, using the `traceparent`, `tracestate` and (optionally) `baggage` fields. Default is `false`. To enable, you must first configure tracing in one of the `tracing.opentelemetry.*` sections.
      diff --git a/docs/sources/setup-grafana/configure-security/configure-authentication/saml/index.md b/docs/sources/setup-grafana/configure-security/configure-authentication/saml/index.md index afd59a7f0d51c..dac259eb5acd8 100644 --- a/docs/sources/setup-grafana/configure-security/configure-authentication/saml/index.md +++ b/docs/sources/setup-grafana/configure-security/configure-authentication/saml/index.md @@ -183,13 +183,13 @@ Grafana supports user authentication through Okta, which is useful when you want - In the **Single sign on URL** field, use the `/saml/acs` endpoint URL of your Grafana instance, for example, `https://grafana.example.com/saml/acs`. - In the **Audience URI (SP Entity ID)** field, use the `/saml/metadata` endpoint URL, for example, `https://grafana.example.com/saml/metadata`. - Leave the default values for **Name ID format** and **Application username**. - - In the **ATTRIBUTE STATEMENTS (OPTIONAL)** section, enter the SAML attributes to be shared with Grafana, for example: + - In the **ATTRIBUTE STATEMENTS (OPTIONAL)** section, enter the SAML attributes to be shared with Grafana. The attribute names in Okta need to match exactly what is defined within Grafana, for example: - | Attribute name (in Grafana) | Value (in Okta profile) | - | --------------------------- | -------------------------------------- | - | Login | `user.login` | - | Email | `user.email` | - | DisplayName | `user.firstName + " " + user.lastName` | + | Attribute name (in Grafana) | Name and value (in Okta profile) | + | --------------------------- | -------------------------------------------------- | + | Login | Login `user.login` | + | Email | Email `user.email` | + | DisplayName | DisplayName `user.firstName + " " + user.lastName` | - In the **GROUP ATTRIBUTE STATEMENTS (OPTIONAL)** section, enter a group attribute name (for example, `Group`) and set filter to `Matches regex .*` to return all user groups. diff --git a/docs/sources/setup-grafana/image-rendering/troubleshooting/index.md b/docs/sources/setup-grafana/image-rendering/troubleshooting/index.md index 34d220c51b28b..65c63cdaf2508 100644 --- a/docs/sources/setup-grafana/image-rendering/troubleshooting/index.md +++ b/docs/sources/setup-grafana/image-rendering/troubleshooting/index.md @@ -97,6 +97,14 @@ On a minimal CentOS 8 installation, the following dependencies are required for libXcomposite libXdamage libXtst cups libXScrnSaver pango atk adwaita-cursor-theme adwaita-icon-theme at at-spi2-atk at-spi2-core cairo-gobject colord-libs dconf desktop-file-utils ed emacs-filesystem gdk-pixbuf2 glib-networking gnutls gsettings-desktop-schemas gtk-update-icon-cache gtk3 hicolor-icon-theme jasper-libs json-glib libappindicator-gtk3 libdbusmenu libdbusmenu-gtk3 libepoxy liberation-fonts liberation-narrow-fonts liberation-sans-fonts liberation-serif-fonts libgusb libindicator-gtk3 libmodman libproxy libsoup libwayland-cursor libwayland-egl libxkbcommon m4 mailx nettle patch psmisc redhat-lsb-core redhat-lsb-submod-security rest spax time trousers xdg-utils xkeyboard-config alsa-lib libX11-xcb ``` +**RHEL:** + +On a minimal RHEL 8 installation, the following dependencies are required for the image rendering to function: + +```bash +linux-vdso.so.1 libdl.so.2 libpthread.so.0 libgobject-2.0.so.0 libglib-2.0.so.0 libnss3.so libnssutil3.so libsmime3.so libnspr4.so libatk-1.0.so.0 libatk-bridge-2.0.so.0 libcups.so.2 libgio-2.0.so.0 libdrm.so.2 libdbus-1.so.3 libexpat.so.1 libxcb.so.1 libxkbcommon.so.0 libm.so.6 libX11.so.6 libXcomposite.so.1 libXdamage.so.1 libXext.so.6 libXfixes.so.3 libXrandr.so.2 libgbm.so.1 libpango-1.0.so.0 libcairo.so.2 libasound.so.2 libatspi.so.0 libgcc_s.so.1 libc.so.6 /lib64/ld-linux-x86-64.so.2 libgnutls.so.30 libpcre.so.1 libffi.so.6 libplc4.so libplds4.so librt.so.1 libgmodule-2.0.so.0 libgssapi_krb5.so.2 libkrb5.so.3 libk5crypto.so.3 libcom_err.so.2 libavahi-common.so.3 libavahi-client.so.3 libcrypt.so.1 libz.so.1 libselinux.so.1 libresolv.so.2 libmount.so.1 libsystemd.so.0 libXau.so.6 libXrender.so.1 libthai.so.0 libfribidi.so.0 libpixman-1.so.0 libfontconfig.so.1 libpng16.so.16 libxcb-render.so.0 libidn2.so.0 libunistring.so.2 libtasn1.so.6 libnettle.so.6 libhogweed.so.4 libgmp.so.10 libkrb5support.so.0 libkeyutils.so.1 libpcre2-8.so.0 libuuid.so.1 liblz4.so.1 libgcrypt.so.20 libbz2.so.1 +``` + ## Certificate signed by internal certificate authorities In many cases, Grafana runs on internal servers and uses certificates that have not been signed by a CA ([Certificate Authority](https://en.wikipedia.org/wiki/Certificate_authority)) known to Chrome, and therefore cannot be validated. Chrome internally uses NSS ([Network Security Services](https://en.wikipedia.org/wiki/Network_Security_Services)) for cryptographic operations such as the validation of certificates. diff --git a/docs/sources/setup-grafana/installation/_index.md b/docs/sources/setup-grafana/installation/_index.md index c3cc2ae4fb346..2f852fc3f2238 100644 --- a/docs/sources/setup-grafana/installation/_index.md +++ b/docs/sources/setup-grafana/installation/_index.md @@ -61,7 +61,7 @@ Grafana requires a database to store its configuration data, such as users, data Grafana supports the following databases: - [SQLite 3](https://www.sqlite.org/index.html) -- [MySQL 5.7+](https://www.mysql.com/support/supportedplatforms/database.html) +- [MySQL 8.0+](https://www.mysql.com/support/supportedplatforms/database.html) - [PostgreSQL 12+](https://www.postgresql.org/support/versioning/) By default Grafana uses an embedded SQLite database, which is stored in the Grafana installation location. diff --git a/docs/sources/setup-grafana/installation/debian/index.md b/docs/sources/setup-grafana/installation/debian/index.md index ddefa5b79887e..1aeb3203e4b9d 100644 --- a/docs/sources/setup-grafana/installation/debian/index.md +++ b/docs/sources/setup-grafana/installation/debian/index.md @@ -111,7 +111,7 @@ Complete any of the following steps to uninstall Grafana. To uninstall Grafana, run the following commands in a terminal window: -1. If you configured Grafana to run with systemd, stop the systemd servivce for Grafana server: +1. If you configured Grafana to run with systemd, stop the systemd service for Grafana server: ```shell sudo systemctl stop grafana-server diff --git a/docs/sources/setup-grafana/installation/helm/index.md b/docs/sources/setup-grafana/installation/helm/index.md index 566f73d5610b4..d34e5c35f0ed8 100644 --- a/docs/sources/setup-grafana/installation/helm/index.md +++ b/docs/sources/setup-grafana/installation/helm/index.md @@ -278,6 +278,67 @@ To install plugins in the Grafana Helm Charts, complete the following steps: 1. Search for the above plugins and they should be marked as installed. +### Configure a Private CA (Certificate Authority) + +In many enterprise networks, TLS certificates are issued by a private certificate authority and are not trusted by default (using the provided OS trust chain). + +If your Grafana instance needs to interact with services exposing certificates issued by these private CAs, then you need to ensure Grafana trusts the root certificate. + +You might need to configure this if you: + +- have plugins that require connectivity to other self hosted systems. For example, if you've installed the Grafana Enterprise Metrics, Logs, or Traces (GEM, GEL, GET) plugins, and your GEM (or GEL/GET) cluster is using a private certificate. +- want to connect to data sources which are listening on HTTPS with a private certificate. +- are using a backend database for persistence, or caching service that uses private certificates for encryption in transit. + +In some cases you can specify a self-signed certificate within Grafana (such as in some data sources), or choose to skip TLS certificate validation (this is not recommended unless absolutely necessary). + +A simple solution which should work across your entire instance (plugins, data sources, and backend connections) is to add your self-signed CA certificate to your Kubernetes deployment. + +1. Create a ConfigMap containing the certificate, and deploy it to your Kubernetes cluster + + ```yaml + # grafana-ca-configmap.yaml + --- + apiVersion: v1 + kind: ConfigMap + metadata: + name: grafana-ca-cert + data: + ca.pem: | + -----BEGIN CERTIFICATE----- + (rest of the CA cert) + -----END CERTIFICATE----- + ``` + + ```bash + kubectl apply --filename grafana-ca-configmap.yaml --namespace monitoring + ``` + +1. Open the Helm `values.yaml` file in your favorite editor. + +1. Find the line that says `extraConfigmapMounts:` and under that section, specify the additional ConfigMap that you want to mount. + + ```yaml + ....... + ............ + ...... + extraConfigmapMounts: + - name: ca-certs-configmap + mountPath: /etc/ssl/certs/ca.pem + subPath: ca.pem + configMap: grafana-ca-cert + readOnly: true + ....... + ............ + ...... + ``` + +1. Save the changes and use the `helm upgrade` command to update your Grafana deployment and mount the new ConfigMap: + + ```bash + helm upgrade my-grafana grafana/grafana --values values.yaml --namespace monitoring + ``` + ## Troubleshooting This section includes troubleshooting tips you might find helpful when deploying Grafana on Kubernetes via Helm. diff --git a/docs/sources/shared/alerts/alerting_provisioning.md b/docs/sources/shared/alerts/alerting_provisioning.md index 6146a1cf36961..3e78039d0e882 100644 --- a/docs/sources/shared/alerts/alerting_provisioning.md +++ b/docs/sources/shared/alerts/alerting_provisioning.md @@ -1621,9 +1621,9 @@ Status: Accepted ### Duration -| Name | Type | Go type | Default | Description | Example | -| -------- | ------------------------- | ------- | ------- | ----------- | ------- | -| Duration | int64 (formatted integer) | int64 | | | | +| Name | Type | Go type | Default | Description | Example | +| -------- | ------ | ------- | ------- | ----------- | ------- | +| Duration | string | int64 | | | | ### EmbeddedContactPoint diff --git a/docs/sources/shared/visualizations/thresholds-options-2.md b/docs/sources/shared/visualizations/thresholds-options-2.md index d2ea9c2a44a2c..f6defc3928ec8 100644 --- a/docs/sources/shared/visualizations/thresholds-options-2.md +++ b/docs/sources/shared/visualizations/thresholds-options-2.md @@ -8,11 +8,11 @@ comments: | A threshold is a value or limit you set for a metric that’s reflected visually when it’s met or exceeded. Thresholds are one way you can conditionally style and color your visualizations based on query results. -Set the following options: +For each threshold, set the following options: -- **Value** - Set the value for each threshold. -- **Thresholds mode** - Choose from: - - **Absolute** - - **Percentage** +| Option | Description | +| --------------- | -------------------------------------------- | +| Value | Set the value for each threshold. | +| Thresholds mode | Choose from **Absolute** and **Percentage**. | To learn more, refer to [Configure thresholds](../../configure-thresholds/). diff --git a/docs/sources/tutorials/provision-dashboards-and-data-sources/index.md b/docs/sources/tutorials/provision-dashboards-and-data-sources/index.md index 5e03387a8dda4..0dfc09eadb6c1 100644 --- a/docs/sources/tutorials/provision-dashboards-and-data-sources/index.md +++ b/docs/sources/tutorials/provision-dashboards-and-data-sources/index.md @@ -232,10 +232,7 @@ For more information on how to configure dashboard providers, refer to [Dashboar "from": "now-6h", "to": "now" }, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], - "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] - }, + "timepicker": {}, "timezone": "browser", "title": "Cluster", "version": 0 diff --git a/docs/sources/upgrade-guide/when-to-upgrade/index.md b/docs/sources/upgrade-guide/when-to-upgrade/index.md new file mode 100644 index 0000000000000..33fe3211ed605 --- /dev/null +++ b/docs/sources/upgrade-guide/when-to-upgrade/index.md @@ -0,0 +1,115 @@ +--- +description: Strategies for upgrading your self-managed Grafana instance +keywords: + - grafana + - configuration + - documentation + - upgrade +title: Strategies for upgrading your self-managed Grafana instance +menuTitle: Upgrade strategies +weight: 1000 +--- + +# Strategies for upgrading your self-managed Grafana instance + +At Grafana Labs, we believe in shipping features early and often, and in recent years we’ve increased our commitment to that philosophy. + +We no longer wait for the yearly major release to give you access to the next big improvement. Instead, we regularly make new features, bug fixes, and security patches available to our self-managing users ([Grafana OSS](https://grafana.com/oss/grafana/) and [Grafana Enterprise](https://grafana.com/products/enterprise/)) throughout the year. + +Having a dependable release process provides users like you with the best Grafana experience possible, and it provides the flexibility to upgrade in a manner that works best for you and your organization. + +## What to expect from each release type + +We split Grafana OSS and Grafana Enterprise releases into three main categories: + +- **Minor release (every other month)**: These releases can include new features, deprecation notices, notices about upcoming breaking changes, previously announced breaking changes, bug fixes, and security vulnerability patches. +- **Major release (once a year, in April/May)**: These are like a minor release, but accompanied by [GrafanaCON](https://grafana.com/events/grafanacon/) and a comprehensive upgrade guide for users who like to upgrade only once a year. +- **Patching release (every month)**: These include bug fixes for currently supported versions, as well as any security vulnerability patches. + +You can choose your cadence: For frequent self-managed updates, you should follow the minor release (for example, upgrade 10.1 to 10.2), which also gives you access to the latest features. If you need a longer period to review our new releases, you should follow the major releases. Both strategies get patching releases with security fixes (high severity security fixes also result in ad-hoc patch releases). We’ll get into additional guidance on upgrade cadences later in this guide. + +## How to find the specifics for a release + +We love sharing all our great features with you so you can leverage Grafana to its fullest. We also understand that great release documentation allows you to upgrade with confidence. +Whether it’s knowing that a bug has been fixed, seeing that a security vulnerability is patched, or understanding how to mitigate the impact of breaking changes, proper documentation allows you to make informed decisions about when to upgrade your local Grafana instances. + +We provide release documentation in multiple places to address different needs: + +- [**What’s new**](https://grafana.com/docs/grafana/latest/whatsnew/?pg=blog&plcmt=body-txt) outlines new features debuting in each major and minor release. +- [**Breaking changes**](https://grafana.com/docs/grafana/latest/breaking-changes/?pg=blog&plcmt=body-txt) notify you of updates included in major releases that could impact you and provide mitigation recommendations when needed. +- [**Upgrade guides**](https://grafana.com/docs/grafana/latest/upgrade-guide/?pg=blog&plcmt=body-txt) instruct you on how to upgrade to a newer minor or major version. +- And finally, a [**changelog**](https://github.com/grafana/grafana/blob/main/CHANGELOG.md) is generated for every release (major, minor, patching, security) and outlines all changes included in that release. + +## When to expect releases + +Currently, Grafana is on a monthly release cycle. Here’s a look at scheduled releases for 2024: + +| **Anticipated release date** | **Grafana versions** | **Release type** | +| ---------------------------- | -------------------- | ------------------ | +| May 14, 2024 | 11 | Major and patching | +| June 25, 2024 | 11.1 | Minor and patching | +| July 23, 2024 | 11.1.x | Patching | +| Aug. 27, 2024 | 11.2 | Minor and patching | +| Sept. 24, 2024 | 11.2.x | Patching | +| Oct. 22, 2024 | 11.3 | Minor and patching | +| November/December 2024 | To be determined | To be determined | + +### A few important notes + +- The schedule above outlines how we plan release dates. However, unforeseen events and circumstances may cause dates to change. +- High severity security and feature degradation incidents will result in ad-hoc releases that are not scheduled ahead of time. +- Patching releases are for the current (last released) minor version of Grafana. Additional older versions of Grafana may be included if there is a critical bug or security vulnerability that needs to be patched. +- A Grafana release freeze occurs for a week in November and again during the end of December. This does not apply to changes that may be required during the course of an operational or security incident. + +## What to know about version support + +Self-managed Grafana users have control over when they upgrade to a new version of Grafana. To help you make an informed decision about whether it's time to upgrade, it’s important that you understand the level of support provided for your current version. + +For self-managed Grafana (both Enterprise and OSS), the support for versions is as follows: + +- Support for each minor release extends to nine months after the release date. +- Support for the last minor release of a major version is extended an additional six months, for a total of 15 months of support after the release date. + +Here is an overview of projected version support through 2024: + +| **Version** | **Release date** | **Support end of life (EOL)** | +| ------------------------- | ----------------------- | ----------------------------- | +| 10.3 | January 2024 | October 2024 | +| 10.4 (Last minor of 10.0) | March 2024 | June 2025 (extended support) | +| 11.0 | May 2024 | February 2025 | +| 11.1 | June 2024 | March 2025 | +| 11.2 | August 2024 (tentative) | May 2025 (tentative) | + +{{< admonition type="note" >}} +Grafana 9.5.x was the last supported minor for the 9.0 major release and is no longer supported as of July 2024. +{{< /admonition >}} + +## How are these versions supported? + +The level of support changes as new versions of Grafana are released. Here are a few details to keep in mind: + +- The current (most recently released) version of Grafana gets the highest level of support. Releases for this version include all the new features along with all bug fixes. +- All supported versions receive security patches for vulnerabilities impacting that version. +- All supported versions receive patches for bugs that cause critical feature degradation incidents. + +Keeping all this in mind, users that want to receive the most recent features and all bug fixes should be on the current (most recently released) version of Grafana. + +### What is a critical feature degradation? + +A critical feature degradation usually meets one of the following criteria: + +- Major functionality is universally unavailable (for example, cannot create dashboards, unable to authenticate). +- Major (critical) impact to a significant amount of customers. +- Major escalated incident for one or many customers. + +## Self-managing upgrade strategies + +Based on your needs, you can choose your ideal upgrade strategy. Here’s what that might look like in practice: + +| **Strategy/cadence** | **Advantages/disadvantages** | **Example upgrade procedure** | +| ----------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Minor / bi-monthly (11.1 to 11.2)** | Our recommended strategy. It combines up-to-date, secure releases with access to latest features as soon as they're released.
      • Small changelog to review
      • Highest compatibility with actively maintained plugins
      • Easy migration to [Grafana Cloud](https://grafana.com/products/cloud)
      |
      • **June 2024**: You review the 11.1 changelog and deploy the release to testing
      • **July 2024**: You deploy 11.1 to production
      • **August 2024**: 11.2 is released
      | +| **Major / yearly (10.0 to 11.0)** | Yearly upgrade path that still gives access to up-to-date features presented at GrafanaCON.
      • Big changelog to review
      • High compatibility with plugins
      • Relatively easy migration to [Grafana Cloud](https://grafana.com/products/cloud)
      |
      • **April 2024**: 11.0 is released, you start a big changelog review
      • **May 2024**: You deploy 11.0 to testing
      • **June 2024**: You deploy 11.0 to production
      • **April 2025**: 12.0 is released
      | +| **Previous major / yearly (9.5 to 10.4)** | Release with extended support timeline
      • Limited compatibility with actively developed plugins
      • Big changelog to review
      • Migrations to Grafana Cloud might require professional support
      |
      • **April 2024**: 11.0 is released, marking the previous minor (10.4.x) with extended support, you start a big changelog review (9.5.x to 10.4.x)
      • **May 2024**: You deploy 10.4.x to testing
      • **June 2024**: You deploy 10.4.x to production
      | + +For each strategy, you should stay informed about patch releases that fix security vulnerabilities (released monthly, plus ad-hoc releases). Follow the “minor” strategy for the most flexibility, as you can also occasionally lengthen the cadence to a full quarter and still rely on your currently deployed minor release being supported with security fixes. diff --git a/go.mod b/go.mod index c81a561bdf267..a67363ab19878 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/grafana/grafana -go 1.22.4 +go 1.22.7 // Override docker/docker to avoid: // go: github.com/drone-runners/drone-runner-docker@v1.8.2 requires @@ -144,7 +144,6 @@ require ( github.com/redis/go-redis/v9 v9.1.0 // @grafana/alerting-backend github.com/robfig/cron/v3 v3.0.1 // @grafana/grafana-backend-group github.com/russellhaering/goxmldsig v1.4.0 // @grafana/grafana-backend-group - github.com/scottlepp/go-duck v0.0.21 // @grafana/grafana-app-platform-squad github.com/spf13/cobra v1.8.0 // @grafana/grafana-app-platform-squad github.com/spf13/pflag v1.0.5 // @grafana-app-platform-squad github.com/spyzhov/ajson v0.9.0 // @grafana/grafana-app-platform-squad @@ -222,7 +221,6 @@ require ( github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect github.com/DATA-DOG/go-sqlmock v1.5.2 // @grafana/grafana-search-and-storage github.com/FZambia/eagle v0.1.0 // indirect - github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/NYTimes/gziphandler v1.1.1 // indirect github.com/RoaringBitmap/roaring v0.9.4 // indirect @@ -230,8 +228,6 @@ require ( github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230512164433-5d1fd1a340c9 // indirect - github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect - github.com/apache/thrift v0.18.1 // indirect github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/armon/go-metrics v0.4.1 // indirect @@ -334,7 +330,6 @@ require ( github.com/jpillora/backoff v1.0.0 // indirect github.com/jszwedko/go-datemath v0.1.1-0.20230526204004-640a500621d6 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect - github.com/klauspost/asmfmt v1.3.2 // indirect github.com/klauspost/compress v1.17.8 // indirect github.com/klauspost/cpuid/v2 v2.2.7 // indirect github.com/kr/text v0.2.0 // indirect @@ -346,8 +341,6 @@ require ( github.com/mattn/go-ieproxy v0.0.3 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect github.com/miekg/dns v1.1.59 // indirect - github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect - github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect diff --git a/go.sum b/go.sum index a5290f1a726a1..5e7f11b8fa269 100644 --- a/go.sum +++ b/go.sum @@ -1369,7 +1369,6 @@ github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJc github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0/go.mod h1:NBanQUfSWiWn3QEpWDTCU0IjBECKOYvl2R8xdRtMtiM= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0/go.mod h1:1fXstnBMas5kzG+S3q8UoJcmyU6nUeunJcMDHcRYHhs= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.2 h1:FDif4R1+UUR+00q6wquyX90K7A8dN+R5E8GEadoP7sU= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.2/go.mod h1:aiYBYui4BJ/BJCAIKs92XiPyQfTaBWqvHujDwKb6CBU= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg= @@ -1379,7 +1378,6 @@ github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLC github.com/Azure/azure-sdk-for-go/sdk/internal v1.2.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0 h1:jBQA3cKT4L2rWMpgE7Yt3Hwh2aUj8KXjIGLxjHeYNNo= github.com/Azure/azure-sdk-for-go/sdk/internal v1.8.0/go.mod h1:4OG6tQ9EOP/MT0NMjDlRzWoVFxfu9rN9B2X+tlSVktg= @@ -1459,7 +1457,6 @@ github.com/GoogleCloudPlatform/cloudsql-proxy v1.29.0/go.mod h1:spvB9eLJH9dutlbP github.com/HdrHistogram/hdrhistogram-go v1.1.0/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= github.com/HdrHistogram/hdrhistogram-go v1.1.2 h1:5IcZpTvzydCQeHzK4Ef/D5rrSqwxob0t8PQPMybUNFM= github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo= -github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/KimMachineGun/automemlimit v0.6.0/go.mod h1:T7xYht7B8r6AG/AqFcUdc7fzd2bIdBKmepfP2S1svPY= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= @@ -1528,8 +1525,6 @@ github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kd github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230512164433-5d1fd1a340c9 h1:goHVqTbFX3AIo0tzGr14pgfAW2ZfPChKO21Z9MGf/gk= github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230512164433-5d1fd1a340c9/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM= github.com/apache/arrow/go/arrow v0.0.0-20210223225224-5bea62493d91/go.mod h1:c9sxoIT3YgLxH4UhLOCKaBlEojuMhVYpk4Ntv3opUTQ= -github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 h1:q4dksr6ICHXqG5hm0ZW5IHyeEJXoIJSOZeBLmWPNeIQ= -github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs= github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= github.com/apache/arrow/go/v12 v12.0.0/go.mod h1:d+tV/eHZZ7Dz7RPrFKtPK02tpr+c9/PEd/zm8mDS9Vg= @@ -1541,14 +1536,10 @@ github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= github.com/apache/thrift v0.17.0/go.mod h1:OLxhMRJxomX+1I/KUw03qoV3mMz16BwaKI+d4fPBx7Q= -github.com/apache/thrift v0.18.1 h1:lNhK/1nqjbwbiOPDBPFJVKxgDEGSepKuTh6OLiXW8kg= -github.com/apache/thrift v0.18.1/go.mod h1:rdQn/dCcDKEWjjylUeueum4vQEjG2v8v2PqriUnbr+I= github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= -github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA= -github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -1826,7 +1817,6 @@ github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5 github.com/dlmiddlecote/sqlstats v1.0.2 h1:gSU11YN23D/iY50A2zVYwgXgy072khatTsIW6UPjUtI= github.com/dlmiddlecote/sqlstats v1.0.2/go.mod h1:0CWaIh/Th+z2aI6Q9Jpfg/o21zmGxWhbByHgQSCUQvY= github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= -github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/docker/distribution v2.7.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= @@ -2148,7 +2138,6 @@ github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl76 github.com/google/cel-go v0.17.7 h1:6ebJFzu1xO2n7TLtN+UBqShGBhlD85bhvglh5DpcfqQ= github.com/google/cel-go v0.17.7/go.mod h1:HXZKzB0LXqer5lHHgfWAnlYwJaQBDKMjxjulNQzhwhY= github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v24.3.25+incompatible h1:CX395cjN9Kke9mmalRoL3d81AtFUxJM+yDthflgJGkI= @@ -2601,10 +2590,8 @@ github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvW github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE= -github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= @@ -2729,9 +2716,7 @@ github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJys github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= github.com/miekg/dns v1.1.59 h1:C9EXc/UToRwKLhK5wKU/I4QVsBUc8kE6MkHBkeypWZs= github.com/miekg/dns v1.1.59/go.mod h1:nZpewl5p6IvctfgrckopVx2OlSEHPRO/U4SYkRklrEk= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= github.com/minio/highwayhash v1.0.1/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= @@ -2927,7 +2912,6 @@ github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= @@ -3068,8 +3052,6 @@ github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0 github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/scaleway/scaleway-sdk-go v1.0.0-beta.26 h1:F+GIVtGqCFxPxO46ujf8cEOP574MBoRm3gNbPXECbxs= github.com/scaleway/scaleway-sdk-go v1.0.0-beta.26/go.mod h1:fCa7OJZ/9DRTnOKmxvT6pn+LPWUptQAmHF/SBJUGEcg= -github.com/scottlepp/go-duck v0.0.21 h1:bFg5/8ULOo62vmvIjEOy1EOf7Q86cpzq82BDN5RakVE= -github.com/scottlepp/go-duck v0.0.21/go.mod h1:m6V1VGZ4hdgvCj6+BmNMFo0taqiWhMx3CeL3uKHmP2E= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/segmentio/asm v1.1.3/go.mod h1:Ld3L4ZXGNcSLRg4JBsZ3//1+f/TjYl0Mzen/DQy1EJg= @@ -4185,7 +4167,6 @@ google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxH google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U= google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= diff --git a/go.work b/go.work index 2f858b9675ead..ab8a9c48e949e 100644 --- a/go.work +++ b/go.work @@ -1,4 +1,4 @@ -go 1.22.4 +go 1.22.7 use ( . diff --git a/go.work.sum b/go.work.sum index 3e08401db302c..ea085d010fb40 100644 --- a/go.work.sum +++ b/go.work.sum @@ -290,6 +290,7 @@ github.com/CloudyKit/jet/v6 v6.2.0 h1:EpcZ6SR9n28BUGtNJSvlBqf90IpjeFr36Tizxhn/oM github.com/CloudyKit/jet/v6 v6.2.0/go.mod h1:d3ypHeIRNo2+XyqnGA8s+aphtcVpjP5hPwP/Lzo7Ro4= github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= github.com/GoogleCloudPlatform/cloudsql-proxy v1.29.0 h1:YNu23BtH0PKF+fg3ykSorCp6jSTjcEtfnYLzbmcjVRA= +github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= github.com/Joker/jade v1.1.3 h1:Qbeh12Vq6BxURXT1qZBRHsDxeURB8ztcL6f3EXSGeHk= github.com/Joker/jade v1.1.3/go.mod h1:T+2WLyt7VH6Lp0TRxQrUYEs64nRc83wkMQrfeIQKduM= github.com/KimMachineGun/automemlimit v0.6.0 h1:p/BXkH+K40Hax+PuWWPQ478hPjsp9h1CPDhLlA3Z37E= @@ -333,6 +334,8 @@ github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+Dx github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c= github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg= +github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 h1:q4dksr6ICHXqG5hm0ZW5IHyeEJXoIJSOZeBLmWPNeIQ= +github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs= github.com/apache/arrow/go/v10 v10.0.1 h1:n9dERvixoC/1JjDmBcs9FPaEryoANa2sCgVFo6ez9cI= github.com/apache/arrow/go/v11 v11.0.0 h1:hqauxvFQxww+0mEU/2XHG6LT7eZternCZq+A5Yly2uM= github.com/apache/arrow/go/v12 v12.0.0 h1:xtZE63VWl7qLdB0JObIXvvhGjoVNrQ9ciIHG2OK5cmc= @@ -340,6 +343,8 @@ github.com/apache/arrow/go/v12 v12.0.1 h1:JsR2+hzYYjgSUkBSaahpqCetqZMr76djX80fF/ github.com/apache/arrow/go/v13 v13.0.0 h1:kELrvDQuKZo8csdWYqBQfyi431x6Zs/YJTEgUuSVcWk= github.com/apache/arrow/go/v13 v13.0.0/go.mod h1:W69eByFNO0ZR30q1/7Sr9d83zcVZmF2MiP3fFYAWJOc= github.com/apache/arrow/go/v14 v14.0.2 h1:N8OkaJEOfI3mEZt07BIkvo4sC6XDbL+48MBPWO5IONw= +github.com/apache/thrift v0.18.1 h1:lNhK/1nqjbwbiOPDBPFJVKxgDEGSepKuTh6OLiXW8kg= +github.com/apache/thrift v0.18.1/go.mod h1:rdQn/dCcDKEWjjylUeueum4vQEjG2v8v2PqriUnbr+I= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA= @@ -714,7 +719,6 @@ github.com/matryer/moq v0.3.1/go.mod h1:RJ75ZZZD71hejp39j4crZLsEDszGk6iH4v4YsWFK github.com/matryer/moq v0.3.3 h1:pScMH9VyrdT4S93yiLpVyU8rCDqGQr24uOyBxmktG5Q= github.com/matryer/moq v0.3.3/go.mod h1:RJ75ZZZD71hejp39j4crZLsEDszGk6iH4v4YsWFKH4s= github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O4hW7qJOT7vyQPKrWmj26uf5wMc/IiIs= -github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g= github.com/mediocregopher/radix/v3 v3.8.1/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= diff --git a/lerna.json b/lerna.json index 5dbbb0d9639c2..8aed455904539 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", "npmClient": "yarn", - "version": "11.1.5" + "version": "11.1.8" } diff --git a/package.json b/package.json index 5d7bbe1fb099b..f37a1e5d43df7 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "license": "AGPL-3.0-only", "private": true, "name": "grafana", - "version": "11.1.5", + "version": "11.1.8", "repository": "github:grafana/grafana", "scripts": { "build": "NODE_ENV=production nx exec --verbose -- webpack --config scripts/webpack/webpack.prod.js", diff --git a/packages/grafana-data/package.json b/packages/grafana-data/package.json index a7bf109c4b485..5ac998f19ac8c 100644 --- a/packages/grafana-data/package.json +++ b/packages/grafana-data/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "Apache-2.0", "name": "@grafana/data", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana Data Library", "keywords": [ "typescript" @@ -36,7 +36,7 @@ }, "dependencies": { "@braintree/sanitize-url": "7.0.1", - "@grafana/schema": "11.1.5", + "@grafana/schema": "11.1.8", "@types/d3-interpolate": "^3.0.0", "@types/string-hash": "1.1.3", "d3-interpolate": "3.0.1", diff --git a/packages/grafana-e2e-selectors/package.json b/packages/grafana-e2e-selectors/package.json index 6eb17aa047e35..5df97037a3445 100644 --- a/packages/grafana-e2e-selectors/package.json +++ b/packages/grafana-e2e-selectors/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "Apache-2.0", "name": "@grafana/e2e-selectors", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana End-to-End Test Selectors Library", "keywords": [ "cli", diff --git a/packages/grafana-eslint-rules/package.json b/packages/grafana-eslint-rules/package.json index 6d71193fdd27f..35843f5b0b2c5 100644 --- a/packages/grafana-eslint-rules/package.json +++ b/packages/grafana-eslint-rules/package.json @@ -1,7 +1,7 @@ { "name": "@grafana/eslint-plugin", "description": "ESLint rules for use within the Grafana repo. Not suitable (or supported) for external use.", - "version": "11.1.5", + "version": "11.1.8", "main": "./index.cjs", "author": "Grafana Labs", "license": "Apache-2.0", diff --git a/packages/grafana-flamegraph/package.json b/packages/grafana-flamegraph/package.json index eb8216b7a9c90..61aa97d1f74cd 100644 --- a/packages/grafana-flamegraph/package.json +++ b/packages/grafana-flamegraph/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "Apache-2.0", "name": "@grafana/flamegraph", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana flamegraph visualization component", "keywords": [ "grafana", @@ -44,8 +44,8 @@ ], "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/ui": "11.1.8", "@leeoniya/ufuzzy": "1.0.14", "d3": "^7.8.5", "lodash": "4.17.21", diff --git a/packages/grafana-icons/package.json b/packages/grafana-icons/package.json index 275d665517d1b..d77f6f6a2830d 100644 --- a/packages/grafana-icons/package.json +++ b/packages/grafana-icons/package.json @@ -1,6 +1,6 @@ { "name": "@grafana/saga-icons", - "version": "11.1.5", + "version": "11.1.8", "private": true, "description": "Icons for Grafana", "author": "Grafana Labs", diff --git a/packages/grafana-o11y-ds-frontend/package.json b/packages/grafana-o11y-ds-frontend/package.json index c30cc8d042943..bda62a73a38b5 100644 --- a/packages/grafana-o11y-ds-frontend/package.json +++ b/packages/grafana-o11y-ds-frontend/package.json @@ -3,7 +3,7 @@ "license": "AGPL-3.0-only", "name": "@grafana/o11y-ds-frontend", "private": true, - "version": "11.1.5", + "version": "11.1.8", "description": "Library to manage traces in Grafana.", "sideEffects": false, "repository": { @@ -18,12 +18,12 @@ }, "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", - "@grafana/e2e-selectors": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/e2e-selectors": "11.1.8", "@grafana/experimental": "1.7.11", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "react-select": "5.8.0", "react-use": "17.5.0", "rxjs": "7.8.1", diff --git a/packages/grafana-plugin-configs/package.json b/packages/grafana-plugin-configs/package.json index df7e78ae73363..7dc6eb79ea3a2 100644 --- a/packages/grafana-plugin-configs/package.json +++ b/packages/grafana-plugin-configs/package.json @@ -2,7 +2,7 @@ "name": "@grafana/plugin-configs", "description": "Shared dependencies and files for core plugins", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "tslib": "2.6.3" }, diff --git a/packages/grafana-prometheus/package.json b/packages/grafana-prometheus/package.json index cb9a4dd3eca0d..9103997ec3b30 100644 --- a/packages/grafana-prometheus/package.json +++ b/packages/grafana-prometheus/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "AGPL-3.0-only", "name": "@grafana/prometheus", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana Prometheus Library", "keywords": [ "typescript" @@ -38,12 +38,12 @@ "dependencies": { "@emotion/css": "11.11.2", "@floating-ui/react": "0.26.16", - "@grafana/data": "11.1.5", + "@grafana/data": "11.1.8", "@grafana/experimental": "1.7.11", "@grafana/faro-web-sdk": "1.7.3", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "@leeoniya/ufuzzy": "1.0.14", "@lezer/common": "1.2.1", "@lezer/highlight": "1.2.0", @@ -76,7 +76,7 @@ }, "devDependencies": { "@emotion/eslint-plugin": "11.11.0", - "@grafana/e2e-selectors": "11.1.5", + "@grafana/e2e-selectors": "11.1.8", "@grafana/tsconfig": "^1.3.0-rc1", "@rollup/plugin-image": "3.0.3", "@rollup/plugin-node-resolve": "15.2.3", diff --git a/packages/grafana-runtime/package.json b/packages/grafana-runtime/package.json index 52eb40929baa2..cdf901d6bb592 100644 --- a/packages/grafana-runtime/package.json +++ b/packages/grafana-runtime/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "Apache-2.0", "name": "@grafana/runtime", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana Runtime Library", "keywords": [ "grafana", @@ -37,11 +37,11 @@ "postpack": "mv package.json.bak package.json" }, "dependencies": { - "@grafana/data": "11.1.5", - "@grafana/e2e-selectors": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/e2e-selectors": "11.1.8", "@grafana/faro-web-sdk": "^1.3.6", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "history": "4.10.1", "lodash": "4.17.21", "rxjs": "7.8.1", diff --git a/packages/grafana-schema/package.json b/packages/grafana-schema/package.json index 5c47acf178196..5ad5422fb3e99 100644 --- a/packages/grafana-schema/package.json +++ b/packages/grafana-schema/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "Apache-2.0", "name": "@grafana/schema", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana Schema Library", "keywords": [ "typescript" diff --git a/packages/grafana-schema/src/raw/composable/annotationslist/panelcfg/x/AnnotationsListPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/annotationslist/panelcfg/x/AnnotationsListPanelCfg_types.gen.ts index e9bfd97740009..57654f13546d9 100644 --- a/packages/grafana-schema/src/raw/composable/annotationslist/panelcfg/x/AnnotationsListPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/annotationslist/panelcfg/x/AnnotationsListPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { limit: number; diff --git a/packages/grafana-schema/src/raw/composable/barchart/panelcfg/x/BarChartPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/barchart/panelcfg/x/BarChartPanelCfg_types.gen.ts index 5d25b88d4e164..15deba19c495d 100644 --- a/packages/grafana-schema/src/raw/composable/barchart/panelcfg/x/BarChartPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/barchart/panelcfg/x/BarChartPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends common.OptionsWithLegend, common.OptionsWithTooltip, common.OptionsWithTextFormatting { /** diff --git a/packages/grafana-schema/src/raw/composable/bargauge/panelcfg/x/BarGaugePanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/bargauge/panelcfg/x/BarGaugePanelCfg_types.gen.ts index 2437490dfe4f9..67154f78445bd 100644 --- a/packages/grafana-schema/src/raw/composable/bargauge/panelcfg/x/BarGaugePanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/bargauge/panelcfg/x/BarGaugePanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends common.SingleStatBaseOptions { displayMode: common.BarGaugeDisplayMode; diff --git a/packages/grafana-schema/src/raw/composable/candlestick/panelcfg/x/CandlestickPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/candlestick/panelcfg/x/CandlestickPanelCfg_types.gen.ts index 77a99a6cc476a..5966c0986e549 100644 --- a/packages/grafana-schema/src/raw/composable/candlestick/panelcfg/x/CandlestickPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/candlestick/panelcfg/x/CandlestickPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export enum VizDisplayMode { Candles = 'candles', diff --git a/packages/grafana-schema/src/raw/composable/canvas/panelcfg/x/CanvasPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/canvas/panelcfg/x/CanvasPanelCfg_types.gen.ts index 662251872a106..b67cd0645ce9e 100644 --- a/packages/grafana-schema/src/raw/composable/canvas/panelcfg/x/CanvasPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/canvas/panelcfg/x/CanvasPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as ui from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export enum HorizontalConstraint { Center = 'center', diff --git a/packages/grafana-schema/src/raw/composable/cloudwatch/dataquery/x/CloudWatchDataQuery_types.gen.ts b/packages/grafana-schema/src/raw/composable/cloudwatch/dataquery/x/CloudWatchDataQuery_types.gen.ts index d64f4c1816327..aa5a28ca623fa 100644 --- a/packages/grafana-schema/src/raw/composable/cloudwatch/dataquery/x/CloudWatchDataQuery_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/cloudwatch/dataquery/x/CloudWatchDataQuery_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface MetricStat { /** diff --git a/packages/grafana-schema/src/raw/composable/dashboardlist/panelcfg/x/DashboardListPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/dashboardlist/panelcfg/x/DashboardListPanelCfg_types.gen.ts index 0b66747499681..d1c35eee8de9d 100644 --- a/packages/grafana-schema/src/raw/composable/dashboardlist/panelcfg/x/DashboardListPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/dashboardlist/panelcfg/x/DashboardListPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { /** diff --git a/packages/grafana-schema/src/raw/composable/datagrid/panelcfg/x/DatagridPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/datagrid/panelcfg/x/DatagridPanelCfg_types.gen.ts index 6e4a6cce7ed64..80a1cf03b67f2 100644 --- a/packages/grafana-schema/src/raw/composable/datagrid/panelcfg/x/DatagridPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/datagrid/panelcfg/x/DatagridPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { selectedSeries: number; diff --git a/packages/grafana-schema/src/raw/composable/debug/panelcfg/x/DebugPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/debug/panelcfg/x/DebugPanelCfg_types.gen.ts index 79b9293fcdf3d..b003afd11ca80 100644 --- a/packages/grafana-schema/src/raw/composable/debug/panelcfg/x/DebugPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/debug/panelcfg/x/DebugPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export type UpdateConfig = { render: boolean, diff --git a/packages/grafana-schema/src/raw/composable/elasticsearch/dataquery/x/ElasticsearchDataQuery_types.gen.ts b/packages/grafana-schema/src/raw/composable/elasticsearch/dataquery/x/ElasticsearchDataQuery_types.gen.ts index 657006f28b5c3..03e9dae3259bb 100644 --- a/packages/grafana-schema/src/raw/composable/elasticsearch/dataquery/x/ElasticsearchDataQuery_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/elasticsearch/dataquery/x/ElasticsearchDataQuery_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export type BucketAggregation = (DateHistogram | Histogram | Terms | Filters | GeoHashGrid | Nested); diff --git a/packages/grafana-schema/src/raw/composable/gauge/panelcfg/x/GaugePanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/gauge/panelcfg/x/GaugePanelCfg_types.gen.ts index 0b999a7642ded..9fb115e88e957 100644 --- a/packages/grafana-schema/src/raw/composable/gauge/panelcfg/x/GaugePanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/gauge/panelcfg/x/GaugePanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends common.SingleStatBaseOptions { minVizHeight: number; diff --git a/packages/grafana-schema/src/raw/composable/geomap/panelcfg/x/GeomapPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/geomap/panelcfg/x/GeomapPanelCfg_types.gen.ts index f71c1b82eae0d..75fbe88730a42 100644 --- a/packages/grafana-schema/src/raw/composable/geomap/panelcfg/x/GeomapPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/geomap/panelcfg/x/GeomapPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as ui from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { basemap: ui.MapLayerOptions; diff --git a/packages/grafana-schema/src/raw/composable/heatmap/panelcfg/x/HeatmapPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/heatmap/panelcfg/x/HeatmapPanelCfg_types.gen.ts index b98ffd67ca85e..ae18fce81a989 100644 --- a/packages/grafana-schema/src/raw/composable/heatmap/panelcfg/x/HeatmapPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/heatmap/panelcfg/x/HeatmapPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as ui from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; /** * Controls the color mode of the heatmap diff --git a/packages/grafana-schema/src/raw/composable/histogram/panelcfg/x/HistogramPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/histogram/panelcfg/x/HistogramPanelCfg_types.gen.ts index faaba0f5d15a5..bb1eabb4c33bb 100644 --- a/packages/grafana-schema/src/raw/composable/histogram/panelcfg/x/HistogramPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/histogram/panelcfg/x/HistogramPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends common.OptionsWithLegend, common.OptionsWithTooltip { /** diff --git a/packages/grafana-schema/src/raw/composable/logs/panelcfg/x/LogsPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/logs/panelcfg/x/LogsPanelCfg_types.gen.ts index 7b6bb474f738e..a6d59b0bb8d2c 100644 --- a/packages/grafana-schema/src/raw/composable/logs/panelcfg/x/LogsPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/logs/panelcfg/x/LogsPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { dedupStrategy: common.LogsDedupStrategy; diff --git a/packages/grafana-schema/src/raw/composable/loki/dataquery/x/LokiDataQuery_types.gen.ts b/packages/grafana-schema/src/raw/composable/loki/dataquery/x/LokiDataQuery_types.gen.ts index f20e875f050b4..0351fe4eaff58 100644 --- a/packages/grafana-schema/src/raw/composable/loki/dataquery/x/LokiDataQuery_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/loki/dataquery/x/LokiDataQuery_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export enum QueryEditorMode { Builder = 'builder', diff --git a/packages/grafana-schema/src/raw/composable/news/panelcfg/x/NewsPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/news/panelcfg/x/NewsPanelCfg_types.gen.ts index c1de1c1374c1b..769d5ea54e1bd 100644 --- a/packages/grafana-schema/src/raw/composable/news/panelcfg/x/NewsPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/news/panelcfg/x/NewsPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { /** diff --git a/packages/grafana-schema/src/raw/composable/nodegraph/panelcfg/x/NodeGraphPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/nodegraph/panelcfg/x/NodeGraphPanelCfg_types.gen.ts index bea4ceab6c862..edb7039c2ec47 100644 --- a/packages/grafana-schema/src/raw/composable/nodegraph/panelcfg/x/NodeGraphPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/nodegraph/panelcfg/x/NodeGraphPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface ArcOption { /** diff --git a/packages/grafana-schema/src/raw/composable/piechart/panelcfg/x/PieChartPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/piechart/panelcfg/x/PieChartPanelCfg_types.gen.ts index 880b27ea41276..a99b9627fc157 100644 --- a/packages/grafana-schema/src/raw/composable/piechart/panelcfg/x/PieChartPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/piechart/panelcfg/x/PieChartPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; /** * Select the pie chart display style. diff --git a/packages/grafana-schema/src/raw/composable/stat/panelcfg/x/StatPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/stat/panelcfg/x/StatPanelCfg_types.gen.ts index 99242f1c6ffda..5e81c345f2774 100644 --- a/packages/grafana-schema/src/raw/composable/stat/panelcfg/x/StatPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/stat/panelcfg/x/StatPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends common.SingleStatBaseOptions { colorMode: common.BigValueColorMode; diff --git a/packages/grafana-schema/src/raw/composable/statetimeline/panelcfg/x/StateTimelinePanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/statetimeline/panelcfg/x/StateTimelinePanelCfg_types.gen.ts index 20824e7ca01d4..2856da95d8ce6 100644 --- a/packages/grafana-schema/src/raw/composable/statetimeline/panelcfg/x/StateTimelinePanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/statetimeline/panelcfg/x/StateTimelinePanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as ui from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends ui.OptionsWithLegend, ui.OptionsWithTooltip, ui.OptionsWithTimezones { /** diff --git a/packages/grafana-schema/src/raw/composable/statushistory/panelcfg/x/StatusHistoryPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/statushistory/panelcfg/x/StatusHistoryPanelCfg_types.gen.ts index e886b1f42987a..6971dc543b569 100644 --- a/packages/grafana-schema/src/raw/composable/statushistory/panelcfg/x/StatusHistoryPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/statushistory/panelcfg/x/StatusHistoryPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as ui from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends ui.OptionsWithLegend, ui.OptionsWithTooltip, ui.OptionsWithTimezones { /** diff --git a/packages/grafana-schema/src/raw/composable/table/panelcfg/x/TablePanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/table/panelcfg/x/TablePanelCfg_types.gen.ts index 6ef3214f72575..3a1948d89d0bc 100644 --- a/packages/grafana-schema/src/raw/composable/table/panelcfg/x/TablePanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/table/panelcfg/x/TablePanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as ui from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options { /** diff --git a/packages/grafana-schema/src/raw/composable/text/panelcfg/x/TextPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/text/panelcfg/x/TextPanelCfg_types.gen.ts index c1b6e8b75a4aa..c83426195b3a9 100644 --- a/packages/grafana-schema/src/raw/composable/text/panelcfg/x/TextPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/text/panelcfg/x/TextPanelCfg_types.gen.ts @@ -8,7 +8,7 @@ // // Run 'make gen-cue' from repository root to regenerate. -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export enum TextMode { Code = 'code', diff --git a/packages/grafana-schema/src/raw/composable/timeseries/panelcfg/x/TimeSeriesPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/timeseries/panelcfg/x/TimeSeriesPanelCfg_types.gen.ts index 16e75a4c65094..269a1c88a4e39 100644 --- a/packages/grafana-schema/src/raw/composable/timeseries/panelcfg/x/TimeSeriesPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/timeseries/panelcfg/x/TimeSeriesPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; export interface Options extends common.OptionsWithTimezones { legend: common.VizLegendOptions; diff --git a/packages/grafana-schema/src/raw/composable/trend/panelcfg/x/TrendPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/trend/panelcfg/x/TrendPanelCfg_types.gen.ts index 78ed8a18a9bda..a06cfa39850ba 100644 --- a/packages/grafana-schema/src/raw/composable/trend/panelcfg/x/TrendPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/trend/panelcfg/x/TrendPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; /** * Identical to timeseries... except it does not have timezone settings diff --git a/packages/grafana-schema/src/raw/composable/xychart/panelcfg/x/XYChartPanelCfg_types.gen.ts b/packages/grafana-schema/src/raw/composable/xychart/panelcfg/x/XYChartPanelCfg_types.gen.ts index ea486408243d6..a0e3695bd729b 100644 --- a/packages/grafana-schema/src/raw/composable/xychart/panelcfg/x/XYChartPanelCfg_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/xychart/panelcfg/x/XYChartPanelCfg_types.gen.ts @@ -10,7 +10,7 @@ import * as common from '@grafana/schema'; -export const pluginVersion = "11.1.5"; +export const pluginVersion = "11.1.8"; /** * Auto is "table" in the UI diff --git a/packages/grafana-sql/package.json b/packages/grafana-sql/package.json index 65dfdac4bb7a5..7337ef355313d 100644 --- a/packages/grafana-sql/package.json +++ b/packages/grafana-sql/package.json @@ -3,7 +3,7 @@ "license": "AGPL-3.0-only", "private": true, "name": "@grafana/sql", - "version": "11.1.5", + "version": "11.1.8", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git", @@ -15,11 +15,11 @@ }, "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", - "@grafana/e2e-selectors": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/e2e-selectors": "11.1.8", "@grafana/experimental": "1.7.11", - "@grafana/runtime": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/ui": "11.1.8", "@react-awesome-query-builder/ui": "6.5.2", "immutable": "4.3.6", "lodash": "4.17.21", diff --git a/packages/grafana-ui/package.json b/packages/grafana-ui/package.json index 80c99598c465e..08649f896401e 100644 --- a/packages/grafana-ui/package.json +++ b/packages/grafana-ui/package.json @@ -2,7 +2,7 @@ "author": "Grafana Labs", "license": "Apache-2.0", "name": "@grafana/ui", - "version": "11.1.5", + "version": "11.1.8", "description": "Grafana Components Library", "keywords": [ "grafana", @@ -50,10 +50,10 @@ "@emotion/css": "11.11.2", "@emotion/react": "11.11.4", "@floating-ui/react": "0.26.16", - "@grafana/data": "11.1.5", - "@grafana/e2e-selectors": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/e2e-selectors": "11.1.8", "@grafana/faro-web-sdk": "^1.3.6", - "@grafana/schema": "11.1.5", + "@grafana/schema": "11.1.8", "@leeoniya/ufuzzy": "1.0.14", "@monaco-editor/react": "4.6.0", "@popperjs/core": "2.11.8", diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go index 31e11eff73d29..9cd026b18a05f 100644 --- a/pkg/api/annotations.go +++ b/pkg/api/annotations.go @@ -21,6 +21,8 @@ import ( "github.com/grafana/grafana/pkg/web" ) +const defaultAnnotationsLimit = 100 + // swagger:route GET /annotations annotations getAnnotations // // Find Annotations. @@ -47,6 +49,9 @@ func (hs *HTTPServer) GetAnnotations(c *contextmodel.ReqContext) response.Respon MatchAny: c.QueryBool("matchAny"), SignedInUser: c.SignedInUser, } + if query.Limit == 0 { + query.Limit = defaultAnnotationsLimit + } // When dashboard UID present in the request, we ignore dashboard ID if query.DashboardUID != "" { diff --git a/pkg/api/plugin_checks.go b/pkg/api/plugin_checks.go index 9a7deb876309a..dc1225cbb1f9b 100644 --- a/pkg/api/plugin_checks.go +++ b/pkg/api/plugin_checks.go @@ -28,7 +28,13 @@ func checkAppEnabled(pluginStore pluginstore.Store, pluginSettings pluginsetting }) if err != nil { if errors.Is(err, pluginsettings.ErrPluginSettingNotFound) { - c.JsonApiErr(http.StatusNotFound, "Plugin not found", nil) + // If the plugin is auto enabled, we don't want to return an error because auto enabling allows us + // to enable plugins that are not explicitly configured. + if p.AutoEnabled { + return + } + + c.JsonApiErr(http.StatusNotFound, "Plugin setting not found", nil) return } c.JsonApiErr(http.StatusInternalServerError, "Failed to get plugin settings", err) diff --git a/pkg/api/plugin_checks_test.go b/pkg/api/plugin_checks_test.go index fffa04453f307..1f92a1cd172b7 100644 --- a/pkg/api/plugin_checks_test.go +++ b/pkg/api/plugin_checks_test.go @@ -41,6 +41,11 @@ func TestHTTPServer_CheckEnabled(t *testing.T) { pluginID: "grafana-test-app_disabled", expectedCode: 404, }, + { + name: "should not set an error code if the plugin is auto enabled, without a saved plugin setting", + pluginID: "grafana-test-app_autoEnabled", + expectedCode: 0, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -50,6 +55,7 @@ func TestHTTPServer_CheckEnabled(t *testing.T) { {JSONData: plugins.JSONData{ID: "mysql"}}, {JSONData: plugins.JSONData{Type: plugins.TypeApp, ID: "grafana-test-app"}}, {JSONData: plugins.JSONData{Type: plugins.TypeApp, ID: "grafana-test-app_disabled"}}, + {JSONData: plugins.JSONData{Type: plugins.TypeApp, ID: "grafana-test-app_autoEnabled", AutoEnabled: true}}, }, } hs.PluginSettings = &pluginsettings.FakePluginSettings{Plugins: map[string]*pluginsettings.DTO{ diff --git a/pkg/apimachinery/go.mod b/pkg/apimachinery/go.mod index 538ea4a283742..8d5cb94b6f661 100644 --- a/pkg/apimachinery/go.mod +++ b/pkg/apimachinery/go.mod @@ -1,6 +1,6 @@ module github.com/grafana/grafana/pkg/apimachinery -go 1.21.10 +go 1.22.7 require ( k8s.io/apimachinery v0.29.3 diff --git a/pkg/apiserver/go.mod b/pkg/apiserver/go.mod index f88cfae01856f..e2e7691975fdf 100644 --- a/pkg/apiserver/go.mod +++ b/pkg/apiserver/go.mod @@ -1,6 +1,6 @@ module github.com/grafana/grafana/pkg/apiserver -go 1.21.10 +go 1.22.7 require ( github.com/bwmarrin/snowflake v0.3.0 diff --git a/pkg/build/actions/bump-version/main.go b/pkg/build/actions/bump-version/main.go index 2f419af558c26..daec3cdb33069 100644 --- a/pkg/build/actions/bump-version/main.go +++ b/pkg/build/actions/bump-version/main.go @@ -72,6 +72,7 @@ func WithUpdatedVersion(d *dagger.Client, src *dagger.Directory, nodeVersion, ve WithExec([]string{"npm", "version", version, "--no-git-tag-version"}). WithExec([]string{"yarn", "run", "lerna", "version", version, "--no-push", "--no-git-tag-version", "--force-publish", "--exact", "--yes"}). WithExec([]string{"yarn", "install"}). + WithExec([]string{"yarn", "prettier:write"}). Directory("/src"). WithoutDirectory("node_modules") } diff --git a/pkg/build/cmd.go b/pkg/build/cmd.go index fa98316e9c267..7b775ad0f4417 100644 --- a/pkg/build/cmd.go +++ b/pkg/build/cmd.go @@ -11,6 +11,8 @@ import ( "strconv" "strings" "time" + + "github.com/urfave/cli/v2" ) const ( @@ -30,6 +32,12 @@ func logError(message string, err error) int { return 1 } +func RunCmdCLI(c *cli.Context) error { + os.Exit(RunCmd()) + + return nil +} + // RunCmd runs the build command and returns the exit code func RunCmd() int { opts := BuildOptsFromFlags() diff --git a/pkg/build/cmd/argcount_wrapper.go b/pkg/build/cmd/argcount_wrapper.go index 690695cd35086..394283e163d14 100644 --- a/pkg/build/cmd/argcount_wrapper.go +++ b/pkg/build/cmd/argcount_wrapper.go @@ -2,20 +2,6 @@ package main import "github.com/urfave/cli/v2" -// ArgCountWrapper will cause the action to fail if there were not exactly `num` args provided. -func ArgCountWrapper(num int, action cli.ActionFunc) cli.ActionFunc { - return func(ctx *cli.Context) error { - if ctx.NArg() != num { - if err := cli.ShowSubcommandHelp(ctx); err != nil { - return cli.Exit(err.Error(), 1) - } - return cli.Exit("", 1) - } - - return action(ctx) - } -} - // ArgCountWrapper will cause the action to fail if there were more than `num` args provided. func MaxArgCountWrapper(max int, action cli.ActionFunc) cli.ActionFunc { return func(ctx *cli.Context) error { diff --git a/pkg/build/cmd/buildbackend.go b/pkg/build/cmd/buildbackend.go deleted file mode 100644 index 40c4f90388ad8..0000000000000 --- a/pkg/build/cmd/buildbackend.go +++ /dev/null @@ -1,68 +0,0 @@ -package main - -import ( - "fmt" - "log" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/compilers" - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/grafana" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -func BuildBackend(ctx *cli.Context) error { - metadata, err := config.GenerateMetadata(ctx) - if err != nil { - return err - } - version := metadata.GrafanaVersion - - var ( - edition = config.Edition(ctx.String("edition")) - cfg = config.Config{ - NumWorkers: ctx.Int("jobs"), - } - ) - - buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode) - if err != nil { - return fmt.Errorf("could not get version / package info for mode '%s': %w", metadata.ReleaseMode.Mode, err) - } - - const grafanaDir = "." - - log.Printf("Building Grafana back-end, version %q, %s edition, variants [%v]", - version, edition, buildConfig.Variants) - - p := syncutil.NewWorkerPool(cfg.NumWorkers) - defer p.Close() - - if err := compilers.Install(); err != nil { - return cli.Exit(err.Error(), 1) - } - - g, _ := errutil.GroupWithContext(ctx.Context) - for _, variant := range buildConfig.Variants { - variant := variant - - opts := grafana.BuildVariantOpts{ - Variant: variant, - Edition: edition, - Version: version, - GrafanaDir: grafanaDir, - } - - p.Schedule(g.Wrap(func() error { - return grafana.BuildVariant(ctx.Context, opts) - })) - } - if err := g.Wait(); err != nil { - return cli.Exit(err.Error(), 1) - } - - log.Println("Successfully built back-end binaries!") - return nil -} diff --git a/pkg/build/cmd/builddocker.go b/pkg/build/cmd/builddocker.go deleted file mode 100644 index f623057ee6089..0000000000000 --- a/pkg/build/cmd/builddocker.go +++ /dev/null @@ -1,51 +0,0 @@ -package main - -import ( - "log" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/docker" - "github.com/grafana/grafana/pkg/build/gcloud" -) - -func BuildDocker(c *cli.Context) error { - if err := docker.Init(); err != nil { - return err - } - - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - - useUbuntu := c.Bool("ubuntu") - buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode) - if err != nil { - return err - } - - shouldSave := buildConfig.Docker.ShouldSave - if shouldSave { - if err := gcloud.ActivateServiceAccount(); err != nil { - return err - } - } - - edition := config.Edition(c.String("edition")) - - version := metadata.GrafanaVersion - - log.Printf("Building Docker images, version %s, %s edition, Ubuntu based: %v...", version, edition, - useUbuntu) - - for _, arch := range buildConfig.Docker.Architectures { - if _, err := docker.BuildImage(version, arch, ".", useUbuntu, shouldSave, edition, metadata.ReleaseMode.Mode); err != nil { - return cli.Exit(err.Error(), 1) - } - } - - log.Println("Successfully built Docker images!") - return nil -} diff --git a/pkg/build/cmd/buildfrontend.go b/pkg/build/cmd/buildfrontend.go deleted file mode 100644 index ef0fbc9a5fd42..0000000000000 --- a/pkg/build/cmd/buildfrontend.go +++ /dev/null @@ -1,39 +0,0 @@ -package main - -import ( - "log" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/frontend" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -func BuildFrontend(c *cli.Context) error { - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - - cfg, mode, err := frontend.GetConfig(c, metadata) - if err != nil { - return err - } - - p := syncutil.NewWorkerPool(cfg.NumWorkers) - defer p.Close() - - g, _ := errutil.GroupWithContext(c.Context) - if err := frontend.Build(mode, frontend.GrafanaDir, p, g); err != nil { - return err - } - if err := g.Wait(); err != nil { - return err - } - - log.Println("Successfully built Grafana front-end!") - - return nil -} diff --git a/pkg/build/cmd/buildfrontendpackages.go b/pkg/build/cmd/buildfrontendpackages.go deleted file mode 100644 index b1fef3dfceed1..0000000000000 --- a/pkg/build/cmd/buildfrontendpackages.go +++ /dev/null @@ -1,38 +0,0 @@ -package main - -import ( - "log" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/frontend" - "github.com/grafana/grafana/pkg/build/syncutil" - "github.com/urfave/cli/v2" -) - -func BuildFrontendPackages(c *cli.Context) error { - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - - cfg, mode, err := frontend.GetConfig(c, metadata) - if err != nil { - return err - } - - p := syncutil.NewWorkerPool(cfg.NumWorkers) - defer p.Close() - - g, _ := errutil.GroupWithContext(c.Context) - if err := frontend.BuildFrontendPackages(cfg.PackageVersion, mode, frontend.GrafanaDir, p, g); err != nil { - return cli.Exit(err.Error(), 1) - } - if err := g.Wait(); err != nil { - return cli.Exit(err.Error(), 1) - } - - log.Println("Successfully built Grafana front-end packages!") - - return nil -} diff --git a/pkg/build/cmd/buildinternalplugins.go b/pkg/build/cmd/buildinternalplugins.go deleted file mode 100644 index 15a1397158dd9..0000000000000 --- a/pkg/build/cmd/buildinternalplugins.go +++ /dev/null @@ -1,53 +0,0 @@ -package main - -import ( - "context" - "log" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/plugins" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -func BuildInternalPlugins(c *cli.Context) error { - cfg := config.Config{ - NumWorkers: c.Int("jobs"), - } - - const grafanaDir = "." - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode) - if err != nil { - return err - } - - log.Println("Building internal Grafana plug-ins...") - - ctx := context.Background() - - p := syncutil.NewWorkerPool(cfg.NumWorkers) - defer p.Close() - - var g *errutil.Group - g, ctx = errutil.GroupWithContext(ctx) - if err := plugins.Build(ctx, grafanaDir, p, g, buildConfig); err != nil { - return cli.Exit(err.Error(), 1) - } - if err := g.Wait(); err != nil { - return cli.Exit(err.Error(), 1) - } - - if err := plugins.Download(ctx, grafanaDir, p); err != nil { - return cli.Exit(err.Error(), 1) - } - - log.Println("Successfully built Grafana plug-ins!") - - return nil -} diff --git a/pkg/build/cmd/enterprisecheck.go b/pkg/build/cmd/enterprisecheck.go deleted file mode 100644 index 3fb13f6c89397..0000000000000 --- a/pkg/build/cmd/enterprisecheck.go +++ /dev/null @@ -1,133 +0,0 @@ -package main - -import ( - "fmt" - "log" - "os" - "strconv" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/env" - "github.com/grafana/grafana/pkg/build/git" -) - -// checkOpts are options used to create a new GitHub check for the enterprise downstream test. -type checkOpts struct { - SHA string - URL string - Branch string - PR int -} - -func getCheckOpts(args []string) (*checkOpts, error) { - branch, ok := env.Lookup("DRONE_SOURCE_BRANCH", args) - if !ok { - return nil, cli.Exit("Unable to retrieve build source branch", 1) - } - - var ( - rgx = git.PRCheckRegexp() - matches = rgx.FindStringSubmatch(branch) - ) - - sha, ok := env.Lookup("SOURCE_COMMIT", args) - if !ok { - if matches == nil || len(matches) <= 1 { - return nil, cli.Exit("Unable to retrieve source commit", 1) - } - sha = matches[2] - } - - url, ok := env.Lookup("DRONE_BUILD_LINK", args) - if !ok { - return nil, cli.Exit(`missing environment variable "DRONE_BUILD_LINK"`, 1) - } - - prStr, ok := env.Lookup("OSS_PULL_REQUEST", args) - if !ok { - if matches == nil || len(matches) <= 1 { - return nil, cli.Exit("Unable to retrieve PR number", 1) - } - - prStr = matches[1] - } - - pr, err := strconv.Atoi(prStr) - if err != nil { - return nil, err - } - - return &checkOpts{ - Branch: branch, - PR: pr, - SHA: sha, - URL: url, - }, nil -} - -// EnterpriseCheckBegin creates the GitHub check and signals the beginning of the downstream build / test process -func EnterpriseCheckBegin(c *cli.Context) error { - var ( - ctx = c.Context - client = git.NewGitHubClient(ctx, c.String("github-token")) - ) - - opts, err := getCheckOpts(os.Environ()) - if err != nil { - return err - } - - if _, err = git.CreateEnterpriseStatus(ctx, client.Repositories, opts.SHA, opts.URL, "pending"); err != nil { - return err - } - - return nil -} - -func EnterpriseCheckSuccess(c *cli.Context) error { - return completeEnterpriseCheck(c, true) -} - -func EnterpriseCheckFail(c *cli.Context) error { - return completeEnterpriseCheck(c, false) -} - -func completeEnterpriseCheck(c *cli.Context, success bool) error { - var ( - ctx = c.Context - client = git.NewGitHubClient(ctx, c.String("github-token")) - ) - - // Update the pull request labels - opts, err := getCheckOpts(os.Environ()) - if err != nil { - return err - } - - status := "failure" - if success { - status = "success" - } - - // Update the GitHub check... - if _, err := git.CreateEnterpriseStatus(ctx, client.Repositories, opts.SHA, opts.URL, status); err != nil { - return err - } - - // Delete branch if needed - log.Printf("Checking branch '%s' against '%s'", git.PRCheckRegexp().String(), opts.Branch) - if git.PRCheckRegexp().MatchString(opts.Branch) { - log.Println("Deleting branch", opts.Branch) - if err := git.DeleteEnterpriseBranch(ctx, client.Git, opts.Branch); err != nil { - return fmt.Errorf("error deleting enterprise branch: %w", err) - } - } - - label := "enterprise-failed" - if success { - label = "enterprise-ok" - } - - return git.AddLabelToPR(ctx, client.Issues, opts.PR, label) -} diff --git a/pkg/build/cmd/enterprisecheck_test.go b/pkg/build/cmd/enterprisecheck_test.go deleted file mode 100644 index 0eeb5bd57412c..0000000000000 --- a/pkg/build/cmd/enterprisecheck_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package main - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestGetCheckOpts(t *testing.T) { - t.Run("it should return the checkOpts if the correct environment variables are set", func(t *testing.T) { - args := []string{ - "SOURCE_COMMIT=1234", - "DRONE_SOURCE_BRANCH=test", - "DRONE_BUILD_LINK=http://example.com", - "OSS_PULL_REQUEST=1", - } - - opts, err := getCheckOpts(args) - require.NoError(t, err) - require.Equal(t, opts.SHA, "1234") - require.Equal(t, opts.URL, "http://example.com") - }) - t.Run("it should return an error if SOURCE_COMMIT is not set", func(t *testing.T) { - args := []string{ - "DRONE_BUILD_LINK=http://example.com", - "DRONE_SOURCE_BRANCH=test", - "DRONE_BUILD_LINK=http://example.com", - "OSS_PULL_REQUEST=1", - } - - opts, err := getCheckOpts(args) - require.Nil(t, opts) - require.Error(t, err) - }) - t.Run("it should return an error if DRONE_BUILD_LINK is not set", func(t *testing.T) { - args := []string{ - "SOURCE_COMMIT=1234", - "DRONE_SOURCE_BRANCH=test", - "OSS_PULL_REQUEST=1", - } - - opts, err := getCheckOpts(args) - require.Nil(t, opts) - require.Error(t, err) - }) - t.Run("it should return an error if OSS_PULL_REQUEST is not set", func(t *testing.T) { - args := []string{ - "SOURCE_COMMIT=1234", - "DRONE_SOURCE_BRANCH=test", - "DRONE_BUILD_LINK=http://example.com", - } - - opts, err := getCheckOpts(args) - require.Nil(t, opts) - require.Error(t, err) - }) - t.Run("it should return an error if OSS_PULL_REQUEST is not an integer", func(t *testing.T) { - args := []string{ - "SOURCE_COMMIT=1234", - "DRONE_SOURCE_BRANCH=test", - "DRONE_BUILD_LINK=http://example.com", - "OSS_PULL_REQUEST=http://example.com", - } - - opts, err := getCheckOpts(args) - require.Nil(t, opts) - require.Error(t, err) - }) -} diff --git a/pkg/build/cmd/exportversion.go b/pkg/build/cmd/exportversion.go deleted file mode 100644 index c4c4744ebef2f..0000000000000 --- a/pkg/build/cmd/exportversion.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "os" - "path/filepath" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" -) - -func ExportVersion(c *cli.Context) error { - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - - const distDir = "dist" - if err := os.RemoveAll(distDir); err != nil { - return err - } - if err := os.Mkdir(distDir, 0750); err != nil { - return err - } - - // nolint:gosec - if err := os.WriteFile(filepath.Join(distDir, "grafana.version"), []byte(metadata.GrafanaVersion), 0664); err != nil { - return err - } - - return nil -} diff --git a/pkg/build/cmd/fetchimages.go b/pkg/build/cmd/fetchimages.go index b131cc9a73db2..26260aedb19b3 100644 --- a/pkg/build/cmd/fetchimages.go +++ b/pkg/build/cmd/fetchimages.go @@ -4,7 +4,7 @@ import ( "fmt" "log" "os/exec" - "strings" + "path/filepath" "github.com/urfave/cli/v2" @@ -18,6 +18,25 @@ const ( ubuntu = "ubuntu" ) +// GetImageFiles returns the list of image (.img, but should be .tar because they are tar archives) files that are +// created in the 'tag' process and stored in the prerelease bucket, waiting to be released. +func GetImageFiles(grafana string, version string, architectures []config.Architecture) []string { + bases := []string{alpine, ubuntu} + images := []string{} + for _, base := range bases { + for _, arch := range architectures { + image := fmt.Sprintf("%s-%s-%s.img", grafana, version, arch) + if base == "ubuntu" { + image = fmt.Sprintf("%s-%s-ubuntu-%s.img", grafana, version, arch) + } + + images = append(images, image) + } + } + + return images +} + func FetchImages(c *cli.Context) error { if c.NArg() > 0 { if err := cli.ShowSubcommandHelp(c); err != nil { @@ -44,74 +63,65 @@ func FetchImages(c *cli.Context) error { Tag: metadata.GrafanaVersion, } - edition := fmt.Sprintf("-%s", cfg.Edition) - - err = gcloud.ActivateServiceAccount() - if err != nil { - return err + grafana := "grafana" + if cfg.Edition == "enterprise" { + grafana = "grafana-enterprise" } - - var basesStr []string - for _, base := range cfg.Distribution { - switch base { - case alpine: - basesStr = append(basesStr, "") - case ubuntu: - basesStr = append(basesStr, "-ubuntu") - default: - return fmt.Errorf("unrecognized base %q", base) - } + if cfg.Edition == "enterprise2" { + grafana = "grafana-enterprise2" + } + if cfg.Edition == "grafana" || cfg.Edition == "oss" { + grafana = "grafana-oss" } - err = downloadFromGCS(cfg, basesStr, edition) - if err != nil { + baseURL := fmt.Sprintf("gs://%s/%s/", cfg.Bucket, cfg.Tag) + images := GetImageFiles(grafana, cfg.Tag, cfg.Archs) + + log.Printf("Fetching images [%v]", images) + + if err := gcloud.ActivateServiceAccount(); err != nil { return err } - - err = loadImages(cfg, basesStr, edition) - if err != nil { + if err := DownloadImages(baseURL, images, "."); err != nil { + return err + } + if err := LoadImages(images, "."); err != nil { return err } return nil } -func loadImages(cfg docker.Config, basesStr []string, edition string) error { - log.Println("Loading fetched image files to local docker registry...") - log.Printf("Number of images to be loaded: %d\n", len(basesStr)*len(cfg.Archs)) - for _, base := range basesStr { - for _, arch := range cfg.Archs { - imageFilename := fmt.Sprintf("grafana%s-%s%s-%s.img", edition, cfg.Tag, base, arch) - log.Printf("image file name: %s\n", imageFilename) - //nolint:gosec - cmd := exec.Command("docker", "load", "-i", imageFilename) - cmd.Dir = "." - out, err := cmd.CombinedOutput() - if err != nil { - log.Printf("out: %s\n", out) - return fmt.Errorf("error loading image: %q", err) - } - log.Printf("Successfully loaded %s!\n %s\n", fmt.Sprintf("grafana%s-%s%s-%s", edition, cfg.Tag, base, arch), out) +// LoadImages uses the `docker load -i` command to load the image tar file into the docker daemon so that it can be +// tagged and pushed. +func LoadImages(images []string, source string) error { + p := filepath.Clean(source) + for _, image := range images { + image := filepath.Join(p, image) + log.Println("Loading image", image) + //nolint:gosec + cmd := exec.Command("docker", "load", "-i", image) + cmd.Dir = "." + out, err := cmd.CombinedOutput() + if err != nil { + log.Printf("out: %s\n", out) + return fmt.Errorf("error loading image: %q", err) } + log.Println("Loaded image", image) } log.Println("Images successfully loaded!") return nil } -func downloadFromGCS(cfg docker.Config, basesStr []string, edition string) error { - log.Printf("Downloading Docker images from GCS bucket: %s\n", cfg.Bucket) - - for _, base := range basesStr { - for _, arch := range cfg.Archs { - src := fmt.Sprintf("gs://%s/%s/grafana%s-%s%s-%s.img", cfg.Bucket, cfg.Tag, edition, cfg.Tag, base, arch) - args := strings.Split(fmt.Sprintf("-m cp -r %s .", src), " ") - //nolint:gosec - cmd := exec.Command("gsutil", args...) - out, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("failed to download: %w\n%s", err, out) - } +func DownloadImages(baseURL string, images []string, destination string) error { + for _, image := range images { + p := baseURL + image + log.Println("Downloading image", p) + //nolint:gosec + cmd := exec.Command("gsutil", "-m", "cp", "-r", p, destination) + out, err := cmd.CombinedOutput() + if err != nil { + return fmt.Errorf("failed to download: %w\n%s", err, out) } } - log.Printf("Successfully fetched image files from %s bucket!\n", cfg.Bucket) return nil } diff --git a/pkg/build/cmd/fetchimages_test.go b/pkg/build/cmd/fetchimages_test.go new file mode 100644 index 0000000000000..9452b011a662b --- /dev/null +++ b/pkg/build/cmd/fetchimages_test.go @@ -0,0 +1,48 @@ +package main + +import ( + "testing" + + "github.com/grafana/grafana/pkg/build/config" + "github.com/stretchr/testify/require" +) + +func TestGetImageFiles(t *testing.T) { + var ( + architectures = []config.Architecture{ + config.ArchAMD64, + config.ArchARM64, + config.ArchARMv7, + } + ) + + t.Run("1.2.3", func(t *testing.T) { + expect := []string{ + "grafana-oss-1.2.3-amd64.img", + "grafana-oss-1.2.3-arm64.img", + "grafana-oss-1.2.3-armv7.img", + "grafana-oss-1.2.3-ubuntu-amd64.img", + "grafana-oss-1.2.3-ubuntu-arm64.img", + "grafana-oss-1.2.3-ubuntu-armv7.img", + } + + res := GetImageFiles("grafana-oss", "1.2.3", architectures) + + require.Equal(t, expect, res) + }) + + t.Run("1.2.3+example-01", func(t *testing.T) { + expect := []string{ + "grafana-oss-1.2.3+example-01-amd64.img", + "grafana-oss-1.2.3+example-01-arm64.img", + "grafana-oss-1.2.3+example-01-armv7.img", + "grafana-oss-1.2.3+example-01-ubuntu-amd64.img", + "grafana-oss-1.2.3+example-01-ubuntu-arm64.img", + "grafana-oss-1.2.3+example-01-ubuntu-armv7.img", + } + + res := GetImageFiles("grafana-oss", "1.2.3+example-01", architectures) + + require.Equal(t, expect, res) + }) +} diff --git a/pkg/build/cmd/flags.go b/pkg/build/cmd/flags.go index c9020fe2403cb..7ab2eb6f9938f 100644 --- a/pkg/build/cmd/flags.go +++ b/pkg/build/cmd/flags.go @@ -16,37 +16,15 @@ var ( Usage: "The edition of Grafana to build (oss or enterprise)", Value: "oss", } - variantsFlag = cli.StringFlag{ - Name: "variants", - Usage: "Comma-separated list of variants to build", - } triesFlag = cli.IntFlag{ Name: "tries", Usage: "Specify number of tries before failing", Value: 1, } - noInstallDepsFlag = cli.BoolFlag{ - Name: "no-install-deps", - Usage: "Don't install dependencies", - } - signingAdminFlag = cli.BoolFlag{ - Name: "signing-admin", - Usage: "Use manifest signing admin API endpoint?", - } - signFlag = cli.BoolFlag{ - Name: "sign", - Usage: "Enable plug-in signing (you must set GRAFANA_API_KEY)", - } dryRunFlag = cli.BoolFlag{ Name: "dry-run", Usage: "Only simulate actions", } - gitHubTokenFlag = cli.StringFlag{ - Name: "github-token", - Value: "", - EnvVars: []string{"GITHUB_TOKEN"}, - Usage: "GitHub token", - } tagFlag = cli.StringFlag{ Name: "tag", Usage: "Grafana version tag", diff --git a/pkg/build/cmd/grafanacom.go b/pkg/build/cmd/grafanacom.go index 9d44b9298c982..ab53217f7a46c 100644 --- a/pkg/build/cmd/grafanacom.go +++ b/pkg/build/cmd/grafanacom.go @@ -19,6 +19,7 @@ import ( "github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/gcloud" "github.com/grafana/grafana/pkg/build/gcloud/storage" + "github.com/grafana/grafana/pkg/build/gcom" "github.com/grafana/grafana/pkg/build/packaging" ) @@ -125,6 +126,51 @@ func getReleaseURLs() (string, string, error) { return pconf.Grafana.WhatsNewURL, pconf.Grafana.ReleaseNotesURL, nil } +func Builds(baseURL *url.URL, grafana, version string, packages []packaging.BuildArtifact) ([]GCOMPackage, error) { + builds := make([]GCOMPackage, len(packages)) + for i, v := range packages { + var ( + os = v.Distro + arch = v.Arch + ) + + if v.Distro == "windows" { + os = "win" + if v.Ext == "msi" { + os = "win-installer" + } + } + + if v.Distro == "rhel" { + if arch == "aarch64" { + arch = "arm64" + } + if arch == "x86_64" { + arch = "amd64" + } + } + + if v.Distro == "deb" { + if arch == "armhf" { + arch = "armv7" + if v.RaspberryPi { + log.Println(v.Distro, arch, "raspberrypi == true") + arch = "armv6" + } + } + } + + u := gcom.GetURL(baseURL, version, grafana, v.Distro, v.Arch, v.Ext, v.Musl, v.RaspberryPi) + builds[i] = GCOMPackage{ + OS: os, + URL: u.String(), + Arch: arch, + } + } + + return builds, nil +} + // publishPackages publishes packages to grafana.com. func publishPackages(cfg packaging.PublishConfig) error { log.Printf("Publishing Grafana packages, version %s, %s edition, %s mode, dryRun: %v, simulating: %v...\n", @@ -133,14 +179,17 @@ func publishPackages(cfg packaging.PublishConfig) error { versionStr := fmt.Sprintf("v%s", cfg.Version) log.Printf("Creating release %s at grafana.com...\n", versionStr) - var sfx string - var pth string + var ( + pth string + grafana = "grafana" + ) + switch cfg.Edition { case config.EditionOSS: pth = "oss" case config.EditionEnterprise: + grafana = "grafana-enterprise" pth = "enterprise" - sfx = packaging.EnterpriseSfx default: return fmt.Errorf("unrecognized edition %q", cfg.Edition) } @@ -152,28 +201,19 @@ func publishPackages(cfg packaging.PublishConfig) error { pth = path.Join(pth, packaging.ReleaseFolder) } - product := fmt.Sprintf("grafana%s", sfx) - pth = path.Join(pth, product) - baseArchiveURL := fmt.Sprintf("https://dl.grafana.com/%s", pth) - - var builds []buildRepr - for _, ba := range packaging.ArtifactConfigs { - u := ba.GetURL(baseArchiveURL, cfg) - - sha256, err := getSHA256(u) - if err != nil { - return err - } + pth = path.Join(pth) + baseArchiveURL := &url.URL{ + Scheme: "https", + Host: "dl.grafana.com", + Path: pth, + } - builds = append(builds, buildRepr{ - OS: ba.Os, - URL: u, - SHA256: string(sha256), - Arch: ba.Arch, - }) + builds, err := Builds(baseArchiveURL, grafana, cfg.Version, packaging.ArtifactConfigs) + if err != nil { + return err } - r := releaseRepr{ + r := Release{ Version: cfg.Version, ReleaseDate: time.Now().UTC(), Builds: builds, @@ -195,6 +235,15 @@ func publishPackages(cfg packaging.PublishConfig) error { return err } + for i, v := range r.Builds { + sha, err := getSHA256(v.URL) + if err != nil { + return err + } + + r.Builds[i].SHA256 = string(sha) + } + for _, b := range r.Builds { if err := postRequest(cfg, fmt.Sprintf("versions/%s/packages", cfg.Version), b, fmt.Sprintf("create build %s %s", b.OS, b.Arch)); err != nil { @@ -211,6 +260,7 @@ func publishPackages(cfg packaging.PublishConfig) error { func getSHA256(u string) ([]byte, error) { shaURL := fmt.Sprintf("%s.sha256", u) + // nolint:gosec resp, err := http.Get(shaURL) if err != nil { @@ -232,7 +282,7 @@ func getSHA256(u string) ([]byte, error) { return sha256, nil } -func postRequest(cfg packaging.PublishConfig, pth string, obj any, descr string) error { +func postRequest(cfg packaging.PublishConfig, pth string, body any, descr string) error { var sfx string switch cfg.Edition { case config.EditionOSS: @@ -243,7 +293,7 @@ func postRequest(cfg packaging.PublishConfig, pth string, obj any, descr string) } product := fmt.Sprintf("grafana%s", sfx) - jsonB, err := json.Marshal(obj) + jsonB, err := json.Marshal(body) if err != nil { return fmt.Errorf("failed to JSON encode release: %w", err) } @@ -303,20 +353,20 @@ func constructURL(product string, pth string) (string, error) { return u.String(), err } -type buildRepr struct { +type GCOMPackage struct { OS string `json:"os"` URL string `json:"url"` SHA256 string `json:"sha256"` Arch string `json:"arch"` } -type releaseRepr struct { - Version string `json:"version"` - ReleaseDate time.Time `json:"releaseDate"` - Stable bool `json:"stable"` - Beta bool `json:"beta"` - Nightly bool `json:"nightly"` - WhatsNewURL string `json:"whatsNewUrl"` - ReleaseNotesURL string `json:"releaseNotesUrl"` - Builds []buildRepr `json:"-"` +type Release struct { + Version string `json:"version"` + ReleaseDate time.Time `json:"releaseDate"` + Stable bool `json:"stable"` + Beta bool `json:"beta"` + Nightly bool `json:"nightly"` + WhatsNewURL string `json:"whatsNewUrl"` + ReleaseNotesURL string `json:"releaseNotesUrl"` + Builds []GCOMPackage `json:"-"` } diff --git a/pkg/build/cmd/grafanacom_test.go b/pkg/build/cmd/grafanacom_test.go index bf90874b1c924..9f4fbd7c98193 100644 --- a/pkg/build/cmd/grafanacom_test.go +++ b/pkg/build/cmd/grafanacom_test.go @@ -1,7 +1,14 @@ package main import ( + "fmt" + "net/url" + "path" "testing" + + "github.com/grafana/grafana/pkg/build/packaging" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func Test_constructURL(t *testing.T) { @@ -33,3 +40,221 @@ func Test_constructURL(t *testing.T) { }) } } + +func TestBuilds(t *testing.T) { + baseURL := &url.URL{ + Scheme: "https", + Host: "dl.example.com", + Path: path.Join("oss", "release"), + } + + version := "1.2.3" + grafana := "grafana" + packages := []packaging.BuildArtifact{ + { + Distro: "deb", + Arch: "arm64", + Ext: "deb", + }, + { + Distro: "rhel", + Arch: "aarch64", + Ext: "rpm", + }, + { + Distro: "linux", + Arch: "arm64", + Ext: "tar.gz", + }, + { + Distro: "deb", + Arch: "armhf", + Ext: "deb", + RaspberryPi: true, + }, + { + Distro: "deb", + Arch: "armhf", + Ext: "deb", + }, + { + Distro: "linux", + Arch: "armv7", + Ext: "tar.gz", + }, + { + Distro: "windows", + Arch: "amd64", + Ext: "zip", + }, + { + Distro: "windows", + Arch: "amd64", + Ext: "msi", + }, + } + + expect := []GCOMPackage{ + { + URL: "https://dl.example.com/oss/release/grafana_1.2.3_arm64.deb", + OS: "deb", + Arch: "arm64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3-1.aarch64.rpm", + OS: "rhel", + Arch: "arm64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3.linux-arm64.tar.gz", + OS: "linux", + Arch: "arm64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-rpi_1.2.3_armhf.deb", + OS: "deb", + Arch: "armv6", + }, + { + URL: "https://dl.example.com/oss/release/grafana_1.2.3_armhf.deb", + OS: "deb", + Arch: "armv7", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3.linux-armv7.tar.gz", + OS: "linux", + Arch: "armv7", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3.windows-amd64.zip", + OS: "win", + Arch: "amd64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3.windows-amd64.msi", + OS: "win-installer", + Arch: "amd64", + }, + } + + builds, err := Builds(baseURL, grafana, version, packages) + require.NoError(t, err) + require.Equal(t, len(expect), len(builds)) + + for i := range builds { + t.Run(fmt.Sprintf("[%d/%d] %s", i+1, len(builds), expect[i].URL), func(t *testing.T) { + assert.Equal(t, expect[i].URL, builds[i].URL) + assert.Equal(t, expect[i].OS, builds[i].OS) + assert.Equal(t, expect[i].Arch, builds[i].Arch) + }) + } +} + +func TestBuildsWithPlus(t *testing.T) { + baseURL := &url.URL{ + Scheme: "https", + Host: "dl.example.com", + Path: path.Join("oss", "release"), + } + + version := "1.2.3+example-01" + grafana := "grafana" + packages := []packaging.BuildArtifact{ + { + Distro: "deb", + Arch: "arm64", + Ext: "deb", + }, + { + Distro: "rhel", + Arch: "aarch64", + Ext: "rpm", + }, + { + Distro: "linux", + Arch: "arm64", + Ext: "tar.gz", + }, + { + Distro: "deb", + Arch: "armhf", + Ext: "deb", + RaspberryPi: true, + }, + { + Distro: "deb", + Arch: "armhf", + Ext: "deb", + }, + { + Distro: "linux", + Arch: "armv7", + Ext: "tar.gz", + }, + { + Distro: "windows", + Arch: "amd64", + Ext: "zip", + }, + { + Distro: "windows", + Arch: "amd64", + Ext: "msi", + }, + } + + expect := []GCOMPackage{ + { + URL: "https://dl.example.com/oss/release/grafana_1.2.3+example~01_arm64.deb", + OS: "deb", + Arch: "arm64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3+example~01-1.aarch64.rpm", + OS: "rhel", + Arch: "arm64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.linux-arm64.tar.gz", + OS: "linux", + Arch: "arm64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-rpi_1.2.3+example~01_armhf.deb", + OS: "deb", + Arch: "armv6", + }, + { + URL: "https://dl.example.com/oss/release/grafana_1.2.3+example~01_armhf.deb", + OS: "deb", + Arch: "armv7", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.linux-armv7.tar.gz", + OS: "linux", + Arch: "armv7", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.windows-amd64.zip", + OS: "win", + Arch: "amd64", + }, + { + URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.windows-amd64.msi", + OS: "win-installer", + Arch: "amd64", + }, + } + + builds, err := Builds(baseURL, grafana, version, packages) + require.NoError(t, err) + require.Equal(t, len(expect), len(builds)) + + for i := range builds { + t.Run(fmt.Sprintf("[%d/%d] %s", i+1, len(builds), expect[i].URL), func(t *testing.T) { + assert.Equal(t, expect[i].URL, builds[i].URL) + assert.Equal(t, expect[i].OS, builds[i].OS) + assert.Equal(t, expect[i].Arch, builds[i].Arch) + }) + } +} diff --git a/pkg/build/cmd/main.go b/pkg/build/cmd/main.go index dff9db8f8b098..d207abe020b74 100644 --- a/pkg/build/cmd/main.go +++ b/pkg/build/cmd/main.go @@ -3,11 +3,9 @@ package main import ( "log" "os" - "strings" + "github.com/grafana/grafana/pkg/build" "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/docker" ) var additionalCommands []*cli.Command = make([]*cli.Command, 0, 5) @@ -21,28 +19,8 @@ func main() { app := cli.NewApp() app.Commands = cli.Commands{ { - Name: "build-backend", - Usage: "Build one or more variants of back-end binaries", - ArgsUsage: "[version]", - Action: MaxArgCountWrapper(1, BuildBackend), - Flags: []cli.Flag{ - &jobsFlag, - &variantsFlag, - &editionFlag, - &buildIDFlag, - }, - }, - { - Name: "build-frontend-packages", - Usage: "Build front-end packages", - ArgsUsage: "[version]", - Action: BuildFrontendPackages, - Flags: []cli.Flag{ - &jobsFlag, - &editionFlag, - &buildIDFlag, - &noInstallDepsFlag, - }, + Name: "build", + Action: build.RunCmdCLI, }, { Name: "e2e-tests", @@ -71,44 +49,11 @@ func main() { }, }, }, - { - Name: "build-frontend", - Usage: "Build front-end artifacts", - ArgsUsage: "[version]", - Action: MaxArgCountWrapper(1, BuildFrontend), - Flags: []cli.Flag{ - &jobsFlag, - &editionFlag, - &buildIDFlag, - }, - }, { Name: "whatsnew-checker", Usage: "Checks whatsNewUrl in package.json for differences between the tag and the docs version", Action: WhatsNewChecker, }, - { - Name: "build-docker", - Usage: "Build Grafana Docker images", - Action: MaxArgCountWrapper(1, BuildDocker), - Flags: []cli.Flag{ - &jobsFlag, - &editionFlag, - &cli.BoolFlag{ - Name: "ubuntu", - Usage: "Use Ubuntu base image", - }, - &cli.BoolFlag{ - Name: "shouldSave", - Usage: "Should save docker image to tarball", - }, - &cli.StringFlag{ - Name: "archs", - Value: strings.Join(docker.AllArchs, ","), - Usage: "Comma separated architectures to build", - }, - }, - }, { Name: "upload-cdn", Usage: "Upload public/* to a cdn bucket", @@ -117,23 +62,6 @@ func main() { &editionFlag, }, }, - { - Name: "shellcheck", - Usage: "Run shellcheck on shell scripts", - Action: Shellcheck, - }, - { - Name: "build-plugins", - Usage: "Build internal plug-ins", - Action: MaxArgCountWrapper(1, BuildInternalPlugins), - Flags: []cli.Flag{ - &jobsFlag, - &editionFlag, - &signingAdminFlag, - &signFlag, - &noInstallDepsFlag, - }, - }, { Name: "publish-metrics", Usage: "Publish a set of metrics from stdin", @@ -145,30 +73,6 @@ func main() { Usage: "Verify Drone configuration", Action: VerifyDrone, }, - { - Name: "verify-starlark", - Usage: "Verify Starlark configuration", - ArgsUsage: "", - Action: VerifyStarlark, - }, - { - Name: "export-version", - Usage: "Exports version in dist/grafana.version", - Action: ExportVersion, - }, - { - Name: "package", - Usage: "Package one or more Grafana variants", - ArgsUsage: "[version]", - Action: MaxArgCountWrapper(1, Package), - Flags: []cli.Flag{ - &jobsFlag, - &variantsFlag, - &editionFlag, - &buildIDFlag, - &signFlag, - }, - }, { Name: "store-storybook", Usage: "Stores storybook to GCS buckets", @@ -279,18 +183,6 @@ func main() { &editionFlag, }, }, - { - Name: "publish-enterprise2", - Usage: "Handle Grafana Enterprise2 Docker images", - ArgsUsage: "[version]", - Action: Enterprise2, - Flags: []cli.Flag{ - &cli.StringFlag{ - Name: "dockerhub-repo", - Usage: "DockerHub repo to push images", - }, - }, - }, }, }, { @@ -399,36 +291,6 @@ func main() { }, }, }, - { - Name: "enterprise-check", - Usage: "Commands for testing against Grafana Enterprise", - Subcommands: cli.Commands{ - { - Name: "begin", - Usage: "Creates the GitHub check in a pull request and begins the tests", - Action: EnterpriseCheckBegin, - Flags: []cli.Flag{ - &gitHubTokenFlag, - }, - }, - { - Name: "success", - Usage: "Updates the GitHub check in a pull request to show a successful build and updates the pull request labels", - Action: EnterpriseCheckSuccess, - Flags: []cli.Flag{ - &gitHubTokenFlag, - }, - }, - { - Name: "fail", - Usage: "Updates the GitHub check in a pull request to show a failed build and updates the pull request labels", - Action: EnterpriseCheckFail, - Flags: []cli.Flag{ - &gitHubTokenFlag, - }, - }, - }, - }, } app.Commands = append(app.Commands, additionalCommands...) diff --git a/pkg/build/cmd/npm.go b/pkg/build/cmd/npm.go index ba35679f57a2b..4660b22b63620 100644 --- a/pkg/build/cmd/npm.go +++ b/pkg/build/cmd/npm.go @@ -2,6 +2,7 @@ package main import ( "fmt" + "log" "os" "strings" @@ -23,6 +24,11 @@ func NpmRetrieveAction(c *cli.Context) error { return fmt.Errorf("no tag version specified, exitting") } + if strings.Contains(tag, "security") { + log.Printf("skipping npm publish because version '%s' has 'security'", tag) + return nil + } + prereleaseBucket := strings.TrimSpace(os.Getenv("PRERELEASE_BUCKET")) if prereleaseBucket == "" { return cli.Exit("the environment variable PRERELEASE_BUCKET must be set", 1) @@ -48,6 +54,11 @@ func NpmStoreAction(c *cli.Context) error { return fmt.Errorf("no tag version specified, exiting") } + if strings.Contains(tag, "security") { + log.Printf("skipping npm publish because version '%s' has 'security'", tag) + return nil + } + prereleaseBucket := strings.TrimSpace(os.Getenv("PRERELEASE_BUCKET")) if prereleaseBucket == "" { return cli.Exit("the environment variable PRERELEASE_BUCKET must be set", 1) @@ -73,6 +84,11 @@ func NpmReleaseAction(c *cli.Context) error { return fmt.Errorf("no tag version specified, exitting") } + if strings.Contains(tag, "security") { + log.Printf("skipping npm publish because version '%s' has 'security'", tag) + return nil + } + err := npm.PublishNpmPackages(c.Context, tag) if err != nil { return err diff --git a/pkg/build/cmd/package.go b/pkg/build/cmd/package.go deleted file mode 100644 index 6b961512ec194..0000000000000 --- a/pkg/build/cmd/package.go +++ /dev/null @@ -1,80 +0,0 @@ -package main - -import ( - "context" - "log" - "strings" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/gpg" - "github.com/grafana/grafana/pkg/build/packaging" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -func Package(c *cli.Context) error { - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - - edition := config.Edition(c.String("edition")) - - releaseMode, err := metadata.GetReleaseMode() - if err != nil { - return cli.Exit(err.Error(), 1) - } - - releaseModeConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode) - if err != nil { - return cli.Exit(err.Error(), 1) - } - - cfg := config.Config{ - NumWorkers: c.Int("jobs"), - SignPackages: c.Bool("sign"), - } - - ctx := context.Background() - - variants := []config.Variant{} - variantStrs := strings.Split(c.String("variants"), ",") - if c.String("variants") != "" { - for _, varStr := range variantStrs { - if varStr == "" { - continue - } - variants = append(variants, config.Variant(varStr)) - } - } else { - variants = releaseModeConfig.Variants - } - - if len(variants) == 0 { - variants = config.AllVariants - } - - log.Printf("Packaging Grafana version %q, version mode %s, %s edition, variants %s", metadata.GrafanaVersion, releaseMode.Mode, - edition, strings.Join(variantStrs, ",")) - - if cfg.SignPackages { - if err := gpg.LoadGPGKeys(&cfg); err != nil { - return cli.Exit(err, 1) - } - defer gpg.RemoveGPGFiles(cfg) - if err := gpg.Import(cfg); err != nil { - return cli.Exit(err, 1) - } - } - - p := syncutil.NewWorkerPool(cfg.NumWorkers) - defer p.Close() - - if err := packaging.PackageGrafana(ctx, metadata.GrafanaVersion, ".", cfg, edition, variants, releaseModeConfig.PluginSignature.Sign, p); err != nil { - return cli.Exit(err, 1) - } - - log.Println("Successfully packaged Grafana!") - return nil -} diff --git a/pkg/build/cmd/publishimages_enterprise2.go b/pkg/build/cmd/publishimages_enterprise2.go deleted file mode 100644 index 94cd4b57c7d37..0000000000000 --- a/pkg/build/cmd/publishimages_enterprise2.go +++ /dev/null @@ -1,101 +0,0 @@ -package main - -import ( - "fmt" - "log" - "os" - "os/exec" - - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/docker" - "github.com/grafana/grafana/pkg/build/gcloud" -) - -func Enterprise2(c *cli.Context) error { - if c.NArg() > 0 { - if err := cli.ShowSubcommandHelp(c); err != nil { - return cli.Exit(err.Error(), 1) - } - return cli.Exit("", 1) - } - - if err := gcloud.ActivateServiceAccount(); err != nil { - return fmt.Errorf("couldn't activate service account, err: %w", err) - } - - metadata, err := config.GenerateMetadata(c) - if err != nil { - return err - } - - buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode) - if err != nil { - return err - } - - cfg := docker.Config{ - Archs: buildConfig.Docker.Architectures, - Distribution: buildConfig.Docker.Distribution, - DockerHubRepo: c.String("dockerhub-repo"), - Tag: metadata.GrafanaVersion, - } - - err = dockerLoginEnterprise2() - if err != nil { - return err - } - - var distributionStr []string - for _, distribution := range cfg.Distribution { - switch distribution { - case alpine: - distributionStr = append(distributionStr, "") - case ubuntu: - distributionStr = append(distributionStr, "-ubuntu") - default: - return fmt.Errorf("unrecognized distribution %q", distribution) - } - } - - for _, distribution := range distributionStr { - var imageFileNames []string - for _, arch := range cfg.Archs { - imageFilename := fmt.Sprintf("%s:%s%s-%s", cfg.DockerHubRepo, cfg.Tag, distribution, arch) - err := docker.PushImage(imageFilename) - if err != nil { - return err - } - imageFileNames = append(imageFileNames, imageFilename) - } - manifest := fmt.Sprintf("%s:%s%s", cfg.DockerHubRepo, cfg.Tag, distribution) - args := []string{"manifest", "create", manifest} - args = append(args, imageFileNames...) - - //nolint:gosec - cmd := exec.Command("docker", args...) - cmd.Env = append(os.Environ(), "DOCKER_CLI_EXPERIMENTAL=enabled") - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to create Docker manifest: %w\n%s", err, output) - } - - err = docker.PushManifest(manifest) - if err != nil { - return err - } - } - - return nil -} - -func dockerLoginEnterprise2() error { - log.Println("Docker login...") - cmd := exec.Command("gcloud", "auth", "configure-docker") - if out, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("error logging in to DockerHub: %s %q", out, err) - } - - log.Println("Successful login!") - return nil -} diff --git a/pkg/build/cmd/shellcheck.go b/pkg/build/cmd/shellcheck.go deleted file mode 100644 index e18b1d8525898..0000000000000 --- a/pkg/build/cmd/shellcheck.go +++ /dev/null @@ -1,42 +0,0 @@ -package main - -import ( - "fmt" - "log" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/urfave/cli/v2" -) - -func Shellcheck(c *cli.Context) error { - log.Println("Running shellcheck...") - - fpaths := []string{} - if err := filepath.Walk("scripts", func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - - if strings.HasSuffix(path, ".sh") { - fpaths = append(fpaths, path) - } - - return nil - }); err != nil { - return fmt.Errorf("couldn't traverse scripts/: %w", err) - } - - log.Printf("Running shellcheck on %s", strings.Join(fpaths, ",")) - args := append([]string{"-e", "SC1071", "-e", "SC2162"}, fpaths...) - //nolint:gosec - cmd := exec.Command("shellcheck", args...) - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("shellcheck failed: %s", output) - } - - log.Println("Successfully ran shellcheck!") - return nil -} diff --git a/pkg/build/cmd/uploadpackages.go b/pkg/build/cmd/uploadpackages.go index da0dd6a607adf..5801192c62c8f 100644 --- a/pkg/build/cmd/uploadpackages.go +++ b/pkg/build/cmd/uploadpackages.go @@ -7,6 +7,7 @@ import ( "os" "os/exec" "path/filepath" + "regexp" "strings" "github.com/urfave/cli/v2" @@ -14,9 +15,28 @@ import ( "github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/droneutil" "github.com/grafana/grafana/pkg/build/gcloud" - "github.com/grafana/grafana/pkg/build/packaging" ) +// PackageRegexp returns a regexp for matching packages corresponding to a certain Grafana edition. +func PackageRegexp(edition config.Edition) *regexp.Regexp { + var sfx string + switch edition { + case config.EditionOSS: + case config.EditionEnterprise: + sfx = "-enterprise" + case config.EditionEnterprise2: + sfx = "-enterprise2" + default: + panic(fmt.Sprintf("unrecognized edition %q", edition)) + } + rePkg, err := regexp.Compile(fmt.Sprintf(`^grafana%s(?:-rpi)?[-_][^-_]+.*$`, sfx)) + if err != nil { + panic(fmt.Sprintf("Failed to compile regexp: %s", err)) + } + + return rePkg +} + const releaseFolder = "release" const mainFolder = "main" const releaseBranchFolder = "prerelease" @@ -181,7 +201,7 @@ func uploadPackages(cfg uploadConfig) error { return fmt.Errorf("failed to list packages: %w", err) } fpaths := []string{} - rePkg := packaging.PackageRegexp(cfg.edition) + rePkg := PackageRegexp(cfg.edition) for _, fpath := range matches { fname := filepath.Base(fpath) if strings.Contains(fname, "latest") || !rePkg.MatchString(fname) { diff --git a/pkg/build/cmd/verifystarlark.go b/pkg/build/cmd/verifystarlark.go deleted file mode 100644 index ff33a77a3afc5..0000000000000 --- a/pkg/build/cmd/verifystarlark.go +++ /dev/null @@ -1,142 +0,0 @@ -package main - -import ( - "context" - "errors" - "fmt" - "io/fs" - "os/exec" - "path/filepath" - "strings" - - "github.com/urfave/cli/v2" -) - -func mapSlice[I any, O any](a []I, f func(I) O) []O { - o := make([]O, len(a)) - for i, e := range a { - o[i] = f(e) - } - return o -} - -// VerifyStarlark is the CLI Action for verifying Starlark files in a workspace. -// It expects a single context argument which is the path to the workspace. -// The actual verification procedure can return multiple errors which are -// joined together to be one holistic error for the action. -func VerifyStarlark(c *cli.Context) error { - if c.NArg() != 1 { - var message string - if c.NArg() == 0 { - message = "ERROR: missing required argument " - } - if c.NArg() > 1 { - message = "ERROR: too many arguments" - } - - if err := cli.ShowSubcommandHelp(c); err != nil { - return err - } - - return cli.Exit(message, 1) - } - - workspace := c.Args().Get(0) - verificationErrs, executionErr := verifyStarlark(c.Context, workspace, buildifierLintCommand) - if executionErr != nil { - return executionErr - } - - if len(verificationErrs) == 0 { - return nil - } - - noun := "file" - if len(verificationErrs) > 1 { - noun += "s" - } - - return fmt.Errorf("verification failed for %d %s:\n%s", - len(verificationErrs), - noun, - strings.Join( - mapSlice(verificationErrs, func(e error) string { return e.Error() }), - "\n", - )) -} - -type commandFunc = func(path string) (command string, args []string) - -func buildifierLintCommand(path string) (string, []string) { - return "buildifier", []string{"-lint", "warn", "-mode", "check", path} -} - -// verifyStarlark walks all directories starting at provided workspace path and -// verifies any Starlark files it finds. -// Starlark files are assumed to end with the .star extension. -// The verification relies on linting frovided by the 'buildifier' binary which -// must be in the PATH. -// A slice of verification errors are returned, one for each file that failed verification. -// If any execution of the `buildifier` command fails, this is returned separately. -// commandFn is executed on every Starlark file to determine the command and arguments to be executed. -// The caller is trusted and it is the callers responsibility to ensure that the resulting command is safe to execute. -func verifyStarlark(ctx context.Context, workspace string, commandFn commandFunc) ([]error, error) { - var verificationErrs []error - - // All errors from filepath.WalkDir are filtered by the fs.WalkDirFunc. - // Lstat or ReadDir errors are reported as verificationErrors. - // If any execution of the `buildifier` command fails or if the context is cancelled, - // it is reported as an error and any verification of subsequent files is skipped. - err := filepath.WalkDir(workspace, func(path string, d fs.DirEntry, err error) error { - // Skip verification of the file or files within the directory if there is an error - // returned by Lstat or ReadDir. - if err != nil { - verificationErrs = append(verificationErrs, err) - return nil - } - - if d.IsDir() { - return nil - } - - if filepath.Ext(path) == ".star" { - command, args := commandFn(path) - // The caller is trusted. - //nolint:gosec - cmd := exec.CommandContext(ctx, command, args...) - cmd.Dir = workspace - - _, err = cmd.Output() - if err == nil { // No error, early return. - return nil - } - - // The error returned from cmd.Output() is never wrapped. - //nolint:errorlint - if err, ok := err.(*exec.ExitError); ok { - switch err.ExitCode() { - // Case comments are informed by the output of `buildifier --help` - case 1: // syntax errors in input - verificationErrs = append(verificationErrs, errors.New(string(err.Stderr))) - return nil - case 2: // usage errors: invoked incorrectly - return fmt.Errorf("command %q: %s", cmd, err.Stderr) - case 3: // unexpected runtime errors: file I/O problems or internal bugs - return fmt.Errorf("command %q: %s", cmd, err.Stderr) - case 4: // check mode failed (reformat is needed) - verificationErrs = append(verificationErrs, errors.New(string(err.Stderr))) - return nil - default: - return fmt.Errorf("command %q: %s", cmd, err.Stderr) - } - } - - // Error was not an exit error from the command. - return fmt.Errorf("command %q: %v", cmd, err) - } - - return nil - }) - - return verificationErrs, err -} diff --git a/pkg/build/cmd/verifystarlark_test.go b/pkg/build/cmd/verifystarlark_test.go deleted file mode 100644 index 1121ca5bd646f..0000000000000 --- a/pkg/build/cmd/verifystarlark_test.go +++ /dev/null @@ -1,137 +0,0 @@ -//go:build requires_buildifier - -package main - -import ( - "context" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestVerifyStarlark(t *testing.T) { - t.Run("execution errors", func(t *testing.T) { - t.Run("invalid usage", func(t *testing.T) { - ctx := context.Background() - workspace := t.TempDir() - err := os.WriteFile(filepath.Join(workspace, "ignored.star"), []byte{}, os.ModePerm) - if err != nil { - t.Fatalf(err.Error()) - } - - _, executionErr := verifyStarlark(ctx, workspace, func(string) (string, []string) { return "buildifier", []string{"--invalid"} }) - if executionErr == nil { - t.Fatalf("Expected execution error but got none") - } - }) - - t.Run("context cancellation", func(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - workspace := t.TempDir() - err := os.WriteFile(filepath.Join(workspace, "ignored.star"), []byte{}, os.ModePerm) - if err != nil { - t.Fatalf(err.Error()) - } - err = os.WriteFile(filepath.Join(workspace, "other-ignored.star"), []byte{}, os.ModePerm) - if err != nil { - t.Fatalf(err.Error()) - } - cancel() - - _, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand) - if executionErr == nil { - t.Fatalf("Expected execution error but got none") - } - }) - }) - - t.Run("verification errors", func(t *testing.T) { - t.Run("a single file with lint", func(t *testing.T) { - ctx := context.Background() - workspace := t.TempDir() - - invalidContent := []byte(`load("scripts/drone/other.star", "function") - -function()`) - err := os.WriteFile(filepath.Join(workspace, "has-lint.star"), invalidContent, os.ModePerm) - if err != nil { - t.Fatalf(err.Error()) - } - - verificationErrs, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand) - if executionErr != nil { - t.Fatalf("Unexpected execution error: %v", executionErr) - } - if len(verificationErrs) == 0 { - t.Fatalf(`"has-lint.star" requires linting but the verifyStarlark function provided no linting error`) - } - if len(verificationErrs) > 1 { - t.Fatalf(`verifyStarlark returned multiple errors for the "has-lint.star" file but only one was expected: %v`, verificationErrs) - } - if !strings.Contains(verificationErrs[0].Error(), "has-lint.star:1: module-docstring: The file has no module docstring.") { - t.Fatalf(`"has-lint.star" is missing a module docstring but the verifyStarlark function linting error did not mention this, instead we got: %v`, verificationErrs[0]) - } - }) - - t.Run("no files with lint", func(t *testing.T) { - ctx := context.Background() - workspace := t.TempDir() - - content := []byte(`""" -This module does nothing. -""" - -load("scripts/drone/other.star", "function") - -function() -`) - require.NoError(t, os.WriteFile(filepath.Join(workspace, "no-lint.star"), content, os.ModePerm)) - - verificationErrs, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand) - if executionErr != nil { - t.Fatalf("Unexpected execution error: %v", executionErr) - } - if len(verificationErrs) != 0 { - t.Log(`"no-lint.star" has no lint but the verifyStarlark function provided at least one error`) - for _, err := range verificationErrs { - t.Log(err) - } - t.FailNow() - } - }) - - t.Run("multiple files with lint", func(t *testing.T) { - ctx := context.Background() - workspace := t.TempDir() - - invalidContent := []byte(`load("scripts/drone/other.star", "function") - -function()`) - require.NoError(t, os.WriteFile(filepath.Join(workspace, "has-lint.star"), invalidContent, os.ModePerm)) - require.NoError(t, os.WriteFile(filepath.Join(workspace, "has-lint2.star"), invalidContent, os.ModePerm)) - - verificationErrs, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand) - if executionErr != nil { - t.Fatalf("Unexpected execution error: %v", executionErr) - } - if len(verificationErrs) == 0 { - t.Fatalf(`Two files require linting but the verifyStarlark function provided no linting error`) - } - if len(verificationErrs) == 1 { - t.Fatalf(`Two files require linting but the verifyStarlark function provided only one linting error: %v`, verificationErrs[0]) - } - if len(verificationErrs) > 2 { - t.Fatalf(`verifyStarlark returned more errors than expected: %v`, verificationErrs) - } - if !strings.Contains(verificationErrs[0].Error(), "has-lint.star:1: module-docstring: The file has no module docstring.") { - t.Errorf(`"has-lint.star" is missing a module docstring but the verifyStarlark function linting error did not mention this, instead we got: %v`, verificationErrs[0]) - } - if !strings.Contains(verificationErrs[1].Error(), "has-lint2.star:1: module-docstring: The file has no module docstring.") { - t.Fatalf(`"has-lint2.star" is missing a module docstring but the verifyStarlark function linting error did not mention this, instead we got: %v`, verificationErrs[0]) - } - }) - }) -} diff --git a/pkg/build/compilers/install.go b/pkg/build/compilers/install.go deleted file mode 100644 index 9a45ba48c0bcb..0000000000000 --- a/pkg/build/compilers/install.go +++ /dev/null @@ -1,50 +0,0 @@ -package compilers - -import ( - "fmt" - "os" - "os/exec" - "path/filepath" -) - -const ( - ArmV6 = "/opt/rpi-tools/arm-bcm2708/arm-linux-gnueabihf/bin/arm-linux-gnueabihf-gcc" - Armv7 = "arm-linux-gnueabihf-gcc" - Armv7Musl = "/tmp/arm-linux-musleabihf-cross/bin/arm-linux-musleabihf-gcc" - Arm64 = "aarch64-linux-gnu-gcc" - Arm64Musl = "/tmp/aarch64-linux-musl-cross/bin/aarch64-linux-musl-gcc" - Osx64 = "/tmp/osxcross/target/bin/o64-clang" - Win64 = "x86_64-w64-mingw32-gcc" - LinuxX64 = "/tmp/x86_64-centos6-linux-gnu/bin/x86_64-centos6-linux-gnu-gcc" - LinuxX64Musl = "/tmp/x86_64-linux-musl-cross/bin/x86_64-linux-musl-gcc" -) - -func Install() error { - // From the os.TempDir documentation: - // On Unix systems, it returns $TMPDIR if non-empty, - // else /tmp. On Windows, it uses GetTempPath, - // returning the first non-empty value from %TMP%, %TEMP%, %USERPROFILE%, - // or the Windows directory. On Plan 9, it returns /tmp. - tmp := os.TempDir() - - var ( - centosArchive = "x86_64-centos6-linux-gnu.tar.xz" - osxArchive = "osxcross.tar.xz" - ) - - for _, fname := range []string{centosArchive, osxArchive} { - path := filepath.Join(tmp, fname) - if _, err := os.Stat(path); err != nil { - return fmt.Errorf("stat error: %w", err) - } - // Ignore gosec G204 as this function is only used in the build process. - //nolint:gosec - cmd := exec.Command("tar", "xfJ", fname) - cmd.Dir = tmp - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to unpack %q: %q, %w", fname, output, err) - } - } - - return nil -} diff --git a/pkg/build/config/revision.go b/pkg/build/config/revision.go deleted file mode 100644 index 70be21cdf18a7..0000000000000 --- a/pkg/build/config/revision.go +++ /dev/null @@ -1,69 +0,0 @@ -package config - -import ( - "context" - "fmt" - "log" - "strconv" - "time" - - "github.com/grafana/grafana/pkg/build/executil" -) - -type Revision struct { - Timestamp int64 - SHA256 string - EnterpriseCommit string - Branch string -} - -func GrafanaTimestamp(ctx context.Context, dir string) (int64, error) { - out, err := executil.OutputAt(ctx, dir, "git", "show", "-s", "--format=%ct") - if err != nil { - return time.Now().Unix(), nil - } - - stamp, err := strconv.ParseInt(out, 10, 64) - if err != nil { - return 0, fmt.Errorf("failed to parse output from git show: %q", out) - } - - return stamp, nil -} - -// GrafanaRevision uses git commands to get information about the checked out Grafana code located at 'grafanaDir'. -// This could maybe be a more generic "Describe" function in the "git" package. -func GrafanaRevision(ctx context.Context, grafanaDir string) (Revision, error) { - stamp, err := GrafanaTimestamp(ctx, grafanaDir) - if err != nil { - return Revision{}, err - } - - sha, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--short", "HEAD") - if err != nil { - return Revision{}, err - } - - enterpriseCommit, err := executil.OutputAt(ctx, grafanaDir, "git", "-C", "../grafana-enterprise", "rev-parse", "--short", "HEAD") - if err != nil { - enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "..", "rev-parse", "--short", "HEAD") - if err != nil { - enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "/tmp/grafana-enterprise", "rev-parse", "--short", "HEAD") - if err != nil { - log.Println("Could not get enterprise commit. Error:", err) - } - } - } - - branch, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--abbrev-ref", "HEAD") - if err != nil { - return Revision{}, err - } - - return Revision{ - SHA256: sha, - EnterpriseCommit: enterpriseCommit, - Branch: branch, - Timestamp: stamp, - }, nil -} diff --git a/pkg/build/cryptoutil/md5.go b/pkg/build/cryptoutil/md5.go deleted file mode 100644 index fa4b4fcc5ec23..0000000000000 --- a/pkg/build/cryptoutil/md5.go +++ /dev/null @@ -1,35 +0,0 @@ -package cryptoutil - -import ( - "crypto/md5" - "fmt" - "io" - "log" - "os" -) - -func MD5File(fpath string) error { - // Ignore gosec G304 as this function is only used in the build process. - //nolint:gosec - fd, err := os.Open(fpath) - if err != nil { - return err - } - defer func() { - if err := fd.Close(); err != nil { - log.Printf("error closing file at '%s': %s", fpath, err.Error()) - } - }() - - h := md5.New() // nolint:gosec - if _, err = io.Copy(h, fd); err != nil { - return err - } - - // nolint:gosec - if err := os.WriteFile(fpath+".md5", []byte(fmt.Sprintf("%x\n", h.Sum(nil))), 0664); err != nil { - return err - } - - return nil -} diff --git a/pkg/build/docker/build.go b/pkg/build/docker/build.go deleted file mode 100644 index 8731ab7a247ca..0000000000000 --- a/pkg/build/docker/build.go +++ /dev/null @@ -1,182 +0,0 @@ -package docker - -import ( - "crypto/sha256" - "encoding/hex" - "fmt" - "io" - "log" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/grafana/grafana/pkg/build/config" -) - -// verifyArchive verifies the integrity of an archive file. -func verifyArchive(archive string) error { - log.Printf("Verifying checksum of %q", archive) - - //nolint:gosec - shaB, err := os.ReadFile(archive + ".sha256") - if err != nil { - return err - } - - exp := strings.TrimSpace(string(shaB)) - - //nolint:gosec - f, err := os.Open(archive) - if err != nil { - return err - } - - defer func() { - if err := f.Close(); err != nil { - log.Println("error closing file:", err) - } - }() - - h := sha256.New() - _, err = io.Copy(h, f) - if err != nil { - return err - } - - chksum := hex.EncodeToString(h.Sum(nil)) - if chksum != exp { - return fmt.Errorf("archive checksum is different than expected: %q", archive) - } - - log.Printf("Archive %q has expected checksum: %s", archive, exp) - - return nil -} - -// BuildImage builds a Docker image. -// The image tag is returned. -func BuildImage(version string, arch config.Architecture, grafanaDir string, useUbuntu, shouldSave bool, edition config.Edition, mode config.VersionMode) ([]string, error) { - var baseArch string - - switch arch { - case "amd64": - case "armv7": - baseArch = "arm32v7/" - case "arm64": - baseArch = "arm64v8/" - default: - return []string{}, fmt.Errorf("unrecognized architecture %q", arch) - } - - libc := "-musl" - baseImage := fmt.Sprintf("%salpine:3.18.5", baseArch) - tagSuffix := "" - if useUbuntu { - libc = "" - baseImage = fmt.Sprintf("%subuntu:22.04", baseArch) - tagSuffix = "-ubuntu" - } - - var editionStr string - var dockerRepo string - var additionalDockerRepo string - var tags []string - var imageFileBase string - var dockerEnterprise2Repo string - if repo, ok := os.LookupEnv("DOCKER_ENTERPRISE2_REPO"); ok { - dockerEnterprise2Repo = repo - } - - switch edition { - case config.EditionOSS: - dockerRepo = "grafana/grafana-image-tags" - additionalDockerRepo = "grafana/grafana-oss-image-tags" - imageFileBase = "grafana-oss" - case config.EditionEnterprise: - dockerRepo = "grafana/grafana-enterprise-image-tags" - imageFileBase = "grafana-enterprise" - editionStr = "-enterprise" - case config.EditionEnterprise2: - dockerRepo = dockerEnterprise2Repo - imageFileBase = "grafana-enterprise2" - editionStr = "-enterprise2" - default: - return []string{}, fmt.Errorf("unrecognized edition %s", edition) - } - - buildDir := filepath.Join(grafanaDir, "packaging/docker") - // For example: grafana-8.5.0-52819pre.linux-amd64-musl.tar.gz - archive := fmt.Sprintf("grafana%s-%s.linux-%s%s.tar.gz", editionStr, version, arch, libc) - if err := verifyArchive(filepath.Join(buildDir, archive)); err != nil { - return []string{}, err - } - - tag := fmt.Sprintf("%s:%s%s-%s", dockerRepo, version, tagSuffix, arch) - tags = append(tags, tag) - - args := []string{ - "build", - "-q", - "--build-arg", fmt.Sprintf("BASE_IMAGE=%s", baseImage), - "--build-arg", fmt.Sprintf("GRAFANA_TGZ=%s", archive), - "--build-arg", "GO_SRC=tgz-builder", - "--build-arg", "JS_SRC=tgz-builder", - "--build-arg", "RUN_SH=./run.sh", - "--tag", tag, - "--no-cache", - "--file", "../../Dockerfile", - ".", - "--label", fmt.Sprintf("mode=%s", string(mode)), - } - - //nolint:gosec - cmd := exec.Command("docker", args...) - cmd.Dir = buildDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Env = append(os.Environ(), "DOCKER_CLI_EXPERIMENTAL=enabled", "DOCKER_BUILDKIT=1") - log.Printf("Running Docker: DOCKER_CLI_EXPERIMENTAL=enabled DOCKER_BUILDKIT=1 %s", cmd) - if err := cmd.Run(); err != nil { - return []string{}, fmt.Errorf("building Docker image failed: %w", err) - } - if shouldSave { - imageFile := fmt.Sprintf("%s-%s%s-%s.img", imageFileBase, version, tagSuffix, arch) - //nolint:gosec - cmd = exec.Command("docker", "save", tag, "-o", imageFile) - cmd.Dir = buildDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - log.Printf("Running Docker: %s", cmd) - if err := cmd.Run(); err != nil { - return []string{}, fmt.Errorf("saving Docker image failed: %w", err) - } - gcsURL := fmt.Sprintf("gs://grafana-prerelease/artifacts/docker/%s/%s", version, imageFile) - //nolint:gosec - cmd = exec.Command("gsutil", "-q", "cp", imageFile, gcsURL) - cmd.Dir = buildDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - log.Printf("Running gsutil: %s", cmd) - if err := cmd.Run(); err != nil { - return []string{}, fmt.Errorf("storing Docker image failed: %w", err) - } - log.Printf("Docker image %s stored to grafana-prerelease GCS bucket", imageFile) - } - if additionalDockerRepo != "" { - additionalTag := fmt.Sprintf("%s:%s%s-%s", additionalDockerRepo, version, tagSuffix, arch) - - //nolint:gosec - cmd = exec.Command("docker", "tag", tag, additionalTag) - cmd.Dir = buildDir - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - log.Printf("Running Docker: %s", cmd) - if err := cmd.Run(); err != nil { - return []string{}, fmt.Errorf("tagging Docker image failed: %w", err) - } - tags = append(tags, additionalTag) - } - - return tags, nil -} diff --git a/pkg/build/docker/init.go b/pkg/build/docker/init.go deleted file mode 100644 index 94fe236aa52ea..0000000000000 --- a/pkg/build/docker/init.go +++ /dev/null @@ -1,34 +0,0 @@ -package docker - -import ( - "fmt" - "log" - "os" - "os/exec" -) - -// AllArchs is a list of all supported Docker image architectures. -var AllArchs = []string{"amd64", "arm64"} - -// emulatorImage is the docker image used as the cross-platform emulator -var emulatorImage = "tonistiigi/binfmt:qemu-v7.0.0" - -// Init initializes the OS for Docker image building. -func Init() error { - // Necessary for cross-platform builds - if err := os.Setenv("DOCKER_BUILDKIT", "1"); err != nil { - log.Println("error setting DOCKER_BUILDKIT environment variable:", err) - } - - // Enable execution of Docker images for other architectures - //nolint:gosec - cmd := exec.Command("docker", "run", "--privileged", "--rm", - emulatorImage, "--install", "all") - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("failed to enable execution of cross-platform Docker images: %w\n%s", err, output) - } - log.Println("emulators have been installed successfully!") - - return nil -} diff --git a/pkg/build/docker/push.go b/pkg/build/docker/push.go deleted file mode 100644 index da87ef2cb169a..0000000000000 --- a/pkg/build/docker/push.go +++ /dev/null @@ -1,62 +0,0 @@ -package docker - -import ( - "fmt" - "log" - "os" - "os/exec" - "time" -) - -const ( - tries = 3 - sleepTime = 30 -) - -func PushImage(newImage string) error { - var err error - for i := 0; i < tries; i++ { - log.Printf("push attempt #%d...", i+1) - var out []byte - cmd := exec.Command("docker", "push", newImage) - cmd.Dir = "." - out, err = cmd.CombinedOutput() - if err != nil { - log.Printf("output: %s", out) - log.Printf("sleep for %d, before retrying...", sleepTime) - time.Sleep(sleepTime * time.Second) - } else { - log.Printf("Successfully pushed %s!", newImage) - break - } - } - if err != nil { - return fmt.Errorf("error pushing images to DockerHub: %q", err) - } - return nil -} - -func PushManifest(manifest string) error { - log.Printf("Pushing Docker manifest %s...", manifest) - - var err error - for i := 0; i < tries; i++ { - log.Printf("push attempt #%d...", i+1) - var out []byte - cmd := exec.Command("docker", "manifest", "push", manifest) - cmd.Env = append(os.Environ(), "DOCKER_CLI_EXPERIMENTAL=enabled") - out, err = cmd.CombinedOutput() - if err != nil { - log.Printf("output: %s", out) - log.Printf("sleep for %d, before retrying...", sleepTime) - time.Sleep(sleepTime * time.Second) - } else { - log.Printf("Successful manifest push! %s", string(out)) - break - } - } - if err != nil { - return fmt.Errorf("failed to push manifest, err: %w", err) - } - return nil -} diff --git a/pkg/build/env/lookup.go b/pkg/build/env/lookup.go deleted file mode 100644 index 993b7259e146b..0000000000000 --- a/pkg/build/env/lookup.go +++ /dev/null @@ -1,18 +0,0 @@ -package env - -import ( - "strings" -) - -// Lookup is the equivalent of os.LookupEnv, only you are able to provide the list of environment variables. -// To use this as os.LookupEnv would be used, simply call -// `env.Lookup("ENVIRONMENT_VARIABLE", os.Environ())` -func Lookup(name string, vars []string) (string, bool) { - for _, v := range vars { - if strings.HasPrefix(v, name) { - return strings.TrimPrefix(v, name+"="), true - } - } - - return "", false -} diff --git a/pkg/build/env/lookup_test.go b/pkg/build/env/lookup_test.go deleted file mode 100644 index ccd44b11e8c93..0000000000000 --- a/pkg/build/env/lookup_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package env_test - -import ( - "testing" - - "github.com/stretchr/testify/require" - - "github.com/grafana/grafana/pkg/build/env" -) - -func TestLookup(t *testing.T) { - values := []string{"ENV_1=a", "ENV_2=b", "ENV_3=c", "ENV_4_TEST="} - - { - v, ok := env.Lookup("ENV_1", values) - require.Equal(t, v, "a") - require.True(t, ok) - } - - { - v, ok := env.Lookup("ENV_2", values) - require.Equal(t, v, "b") - require.True(t, ok) - } - - { - v, ok := env.Lookup("ENV_3", values) - require.Equal(t, v, "c") - require.True(t, ok) - } - - { - v, ok := env.Lookup("ENV_4_TEST", values) - require.Equal(t, v, "") - require.True(t, ok) - } - - { - v, ok := env.Lookup("NOT_THERE", values) - require.Equal(t, v, "") - require.False(t, ok) - } -} diff --git a/pkg/build/errutil/group.go b/pkg/build/errutil/group.go deleted file mode 100644 index 0844616e57941..0000000000000 --- a/pkg/build/errutil/group.go +++ /dev/null @@ -1,61 +0,0 @@ -package errutil - -import ( - "context" - "log" - "sync" -) - -type Group struct { - cancel func() - wg sync.WaitGroup - errOnce sync.Once - err error -} - -func GroupWithContext(ctx context.Context) (*Group, context.Context) { - ctx, cancel := context.WithCancel(ctx) - return &Group{cancel: cancel}, ctx -} - -// Wait waits for any wrapped goroutines to finish and returns any error having occurred in one of them. -func (g *Group) Wait() error { - log.Println("Waiting on Group") - g.wg.Wait() - if g.cancel != nil { - log.Println("Group canceling its context after waiting") - g.cancel() - } - return g.err -} - -// Cancel cancels the associated context. -func (g *Group) Cancel() { - log.Println("Group's Cancel method being called") - g.cancel() -} - -// Wrap wraps a function to be executed in a goroutine. -func (g *Group) Wrap(f func() error) func() { - g.wg.Add(1) - return func() { - defer g.wg.Done() - - if err := f(); err != nil { - g.errOnce.Do(func() { - log.Printf("An error occurred in Group: %s", err) - g.err = err - if g.cancel != nil { - log.Println("Group canceling its context due to error") - g.cancel() - } - }) - } - } -} - -// Go wraps the provided function and executes it in a goroutine. -func (g *Group) Go(f func() error) { - wrapped := g.Wrap(f) - go wrapped() -} diff --git a/pkg/build/executil/exec.go b/pkg/build/executil/exec.go deleted file mode 100644 index e46f568a9976a..0000000000000 --- a/pkg/build/executil/exec.go +++ /dev/null @@ -1,46 +0,0 @@ -package executil - -import ( - "context" - "fmt" - "os/exec" - "strings" -) - -func RunAt(ctx context.Context, dir, cmd string, args ...string) error { - // Ignore gosec G204 as this function is only used in the build process. - //nolint:gosec - c := exec.CommandContext(ctx, cmd, args...) - c.Dir = dir - - b, err := c.CombinedOutput() - - if err != nil { - return fmt.Errorf("%w. '%s %v': %s", err, cmd, args, string(b)) - } - - return nil -} - -func Run(ctx context.Context, cmd string, args ...string) error { - return RunAt(ctx, ".", cmd, args...) -} - -func OutputAt(ctx context.Context, dir, cmd string, args ...string) (string, error) { - // Ignore gosec G204 as this function is only used in the build process. - //nolint:gosec - c := exec.CommandContext(ctx, cmd, args...) - c.Dir = dir - - b, err := c.CombinedOutput() - - if err != nil { - return "", err - } - - return strings.TrimSpace(string(b)), nil -} - -func Output(ctx context.Context, cmd string, args ...string) (string, error) { - return OutputAt(ctx, ".", cmd, args...) -} diff --git a/pkg/build/frontend/build.go b/pkg/build/frontend/build.go deleted file mode 100644 index c4259c25e5794..0000000000000 --- a/pkg/build/frontend/build.go +++ /dev/null @@ -1,56 +0,0 @@ -package frontend - -import ( - "fmt" - "log" - "os" - "os/exec" - "path/filepath" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/lerna" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -func BuildFrontendPackages(version string, edition config.Edition, grafanaDir string, p syncutil.WorkerPool, g *errutil.Group) error { - p.Schedule(g.Wrap(func() error { - if err := lerna.BuildFrontendPackages(version, edition, grafanaDir); err != nil { - return fmt.Errorf("failed to build %s frontend packages: %v", edition, err) - } - - log.Printf("Finished building %s frontend packages", string(edition)) - return nil - })) - - return nil -} - -// Build builds the Grafana front-end -func Build(edition config.Edition, grafanaDir string, p syncutil.WorkerPool, g *errutil.Group) error { - log.Printf("Building %s frontend in %q", edition, grafanaDir) - grafanaDir, err := filepath.Abs(grafanaDir) - if err != nil { - return err - } - - for _, dpath := range []string{"tmp", "public_gen", "public/build"} { - dpath = filepath.Join(grafanaDir, dpath) - if err := os.RemoveAll(dpath); err != nil { - return fmt.Errorf("failed to remove %q: %w", dpath, err) - } - } - - p.Schedule(g.Wrap(func() error { - cmd := exec.Command("yarn", "run", "build") - cmd.Dir = grafanaDir - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to build %s frontend with webpack: %s", edition, output) - } - - log.Printf("Finished building %s frontend", edition) - return nil - })) - - return nil -} diff --git a/pkg/build/frontend/config.go b/pkg/build/frontend/config.go deleted file mode 100644 index 318168de1d489..0000000000000 --- a/pkg/build/frontend/config.go +++ /dev/null @@ -1,42 +0,0 @@ -package frontend - -import ( - "fmt" - - "github.com/blang/semver/v4" - "github.com/grafana/grafana/pkg/build/config" - "github.com/urfave/cli/v2" -) - -const GrafanaDir = "." - -func GetConfig(c *cli.Context, metadata config.Metadata) (config.Config, config.Edition, error) { - cfg := config.Config{ - NumWorkers: c.Int("jobs"), - GitHubToken: c.String("github-token"), - } - - mode := config.Edition(c.String("edition")) - - if metadata.ReleaseMode.Mode == config.TagMode && !metadata.ReleaseMode.IsTest { - packageJSONVersion, err := config.GetPackageJSONVersion(GrafanaDir) - if err != nil { - return config.Config{}, "", err - } - semverGrafanaVersion, err := semver.Parse(metadata.GrafanaVersion) - if err != nil { - return config.Config{}, "", err - } - semverPackageJSONVersion, err := semver.Parse(packageJSONVersion) - if err != nil { - return config.Config{}, "", err - } - // Check if the semver digits of the tag are not equal - if semverGrafanaVersion.FinalizeVersion() != semverPackageJSONVersion.FinalizeVersion() { - return config.Config{}, "", cli.Exit(fmt.Errorf("package.json version and input tag version differ %s != %s.\nPlease update package.json", packageJSONVersion, metadata.GrafanaVersion), 1) - } - } - - cfg.PackageVersion = metadata.GrafanaVersion - return cfg, mode, nil -} diff --git a/pkg/build/frontend/config_test.go b/pkg/build/frontend/config_test.go deleted file mode 100644 index 770df3be7a6b7..0000000000000 --- a/pkg/build/frontend/config_test.go +++ /dev/null @@ -1,118 +0,0 @@ -package frontend - -import ( - "encoding/json" - "flag" - "os" - "testing" - - "github.com/stretchr/testify/require" - "github.com/urfave/cli/v2" - - "github.com/grafana/grafana/pkg/build/config" -) - -const ( - jobs = "jobs" - githubToken = "github-token" - buildID = "build-id" -) - -type packageJson struct { - Version string `json:"version"` -} - -type flagObj struct { - name string - value string -} - -var app = cli.NewApp() - -func TestGetConfig(t *testing.T) { - tests := []struct { - ctx *cli.Context - name string - packageJsonVersion string - metadata config.Metadata - wantErr bool - }{ - { - ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil), - name: "package.json matches tag", - packageJsonVersion: "10.0.0", - metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}}, - wantErr: false, - }, - { - ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil), - name: "custom tag, package.json doesn't match", - packageJsonVersion: "10.0.0", - metadata: config.Metadata{GrafanaVersion: "10.0.0-abcd123pre", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}}, - wantErr: false, - }, - { - ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil), - name: "package.json doesn't match tag", - packageJsonVersion: "10.1.0", - metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}}, - wantErr: true, - }, - { - ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil), - name: "test tag event, check should be skipped", - packageJsonVersion: "10.1.0", - metadata: config.Metadata{GrafanaVersion: "10.1.0-test", ReleaseMode: config.ReleaseMode{Mode: config.TagMode, IsTest: true}}, - wantErr: false, - }, - { - ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}, flagObj{name: buildID, value: "12345"}), nil), - name: "non-tag event", - packageJsonVersion: "10.1.0-pre", - metadata: config.Metadata{GrafanaVersion: "10.1.0-12345pre", ReleaseMode: config.ReleaseMode{Mode: config.PullRequestMode}}, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := createTempPackageJson(t, tt.packageJsonVersion) - require.NoError(t, err) - - got, _, err := GetConfig(tt.ctx, tt.metadata) - if !tt.wantErr { - require.Equal(t, got.PackageVersion, tt.metadata.GrafanaVersion) - } - - if tt.wantErr { - require.Equal(t, got.PackageVersion, "") - require.Error(t, err) - } - }) - } -} - -func setFlags(t *testing.T, flagSet *flag.FlagSet, flags ...flagObj) *flag.FlagSet { - t.Helper() - for _, f := range flags { - if f.name != "" { - flagSet.StringVar(&f.name, f.name, f.value, "") - } - } - return flagSet -} - -func createTempPackageJson(t *testing.T, version string) error { - t.Helper() - - data := packageJson{Version: version} - file, _ := json.MarshalIndent(data, "", " ") - - err := os.WriteFile("package.json", file, 0644) - require.NoError(t, err) - - t.Cleanup(func() { - err := os.RemoveAll("package.json") - require.NoError(t, err) - }) - return nil -} diff --git a/pkg/build/fsutil/copy_recursive.go b/pkg/build/fsutil/copy_recursive.go deleted file mode 100644 index daf60a5f5e978..0000000000000 --- a/pkg/build/fsutil/copy_recursive.go +++ /dev/null @@ -1,65 +0,0 @@ -package fsutil - -import ( - "fmt" - "os" - "path/filepath" -) - -// CopyRecursive copies files and directories recursively. -func CopyRecursive(src, dst string) error { - sfi, err := os.Stat(src) - if err != nil { - return err - } - if !sfi.IsDir() { - return CopyFile(src, dst) - } - - if _, err := os.Stat(dst); os.IsNotExist(err) { - if err := os.MkdirAll(dst, sfi.Mode()); err != nil { - return fmt.Errorf("failed to create directory %q: %s", dst, err) - } - } - - entries, err := os.ReadDir(src) - if err != nil { - return err - } - for _, entry := range entries { - srcPath := filepath.Join(src, entry.Name()) - dstPath := filepath.Join(dst, entry.Name()) - - srcFi, err := os.Stat(srcPath) - if err != nil { - return err - } - - switch srcFi.Mode() & os.ModeType { - case os.ModeDir: - if err := CopyRecursive(srcPath, dstPath); err != nil { - return err - } - case os.ModeSymlink: - link, err := os.Readlink(srcPath) - if err != nil { - return err - } - if err := os.Symlink(link, dstPath); err != nil { - return err - } - default: - if err := CopyFile(srcPath, dstPath); err != nil { - return err - } - } - - if srcFi.Mode()&os.ModeSymlink != 0 { - if err := os.Chmod(dstPath, srcFi.Mode()); err != nil { - return err - } - } - } - - return nil -} diff --git a/pkg/build/fsutil/createtemp.go b/pkg/build/fsutil/createtemp.go deleted file mode 100644 index 21720a9a3d80a..0000000000000 --- a/pkg/build/fsutil/createtemp.go +++ /dev/null @@ -1,43 +0,0 @@ -package fsutil - -import ( - "fmt" - "os" -) - -// CreateTempFile generates a temp filepath, based on the provided suffix. -// A typical generated path looks like /var/folders/abcd/abcdefg/A/1137975807. -func CreateTempFile(sfx string) (string, error) { - var suffix string - if sfx != "" { - suffix = fmt.Sprintf("*-%s", sfx) - } else { - suffix = sfx - } - f, err := os.CreateTemp("", suffix) - if err != nil { - return "", err - } - if err := f.Close(); err != nil { - return "", err - } - - return f.Name(), nil -} - -// CreateTempDir generates a temp directory, based on the provided suffix. -// A typical generated path looks like /var/folders/abcd/abcdefg/A/1137975807/. -func CreateTempDir(sfx string) (string, error) { - var suffix string - if sfx != "" { - suffix = fmt.Sprintf("*-%s", sfx) - } else { - suffix = sfx - } - dir, err := os.MkdirTemp("", suffix) - if err != nil { - return "", err - } - - return dir, nil -} diff --git a/pkg/build/fsutil/createtemp_test.go b/pkg/build/fsutil/createtemp_test.go deleted file mode 100644 index 640585f4ea5a7..0000000000000 --- a/pkg/build/fsutil/createtemp_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package fsutil - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestCreateTempFile(t *testing.T) { - t.Run("empty suffix, expects pattern like: /var/folders/abcd/abcdefg/A/1137975807", func(t *testing.T) { - filePath, err := CreateTempFile("") - require.NoError(t, err) - - pathParts := strings.Split(filePath, "/") - require.Greater(t, len(pathParts), 1) - require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 1) - }) - - t.Run("non-empty suffix, expects /var/folders/abcd/abcdefg/A/1137975807-foobar", func(t *testing.T) { - filePath, err := CreateTempFile("foobar") - require.NoError(t, err) - - pathParts := strings.Split(filePath, "/") - require.Greater(t, len(pathParts), 1) - require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 2) - }) -} - -func TestCreateTempDir(t *testing.T) { - t.Run("empty suffix, expects pattern like: /var/folders/abcd/abcdefg/A/1137975807/", func(t *testing.T) { - filePath, err := CreateTempFile("") - require.NoError(t, err) - - pathParts := strings.Split(filePath, "/") - require.Greater(t, len(pathParts), 1) - require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 1) - }) - - t.Run("non-empty suffix, expects /var/folders/abcd/abcdefg/A/1137975807-foobar/", func(t *testing.T) { - filePath, err := CreateTempFile("foobar") - require.NoError(t, err) - - pathParts := strings.Split(filePath, "/") - require.Greater(t, len(pathParts), 1) - require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 2) - }) -} diff --git a/pkg/build/gcom/url.go b/pkg/build/gcom/url.go new file mode 100644 index 0000000000000..e85c103979605 --- /dev/null +++ b/pkg/build/gcom/url.go @@ -0,0 +1,72 @@ +package gcom + +import ( + "fmt" + "net/url" + "path" + "strings" + + "github.com/grafana/grafana/pkg/build/versions" +) + +func PackageName(grafana, distro, arch, version, ext string, musl bool, raspberryPi bool) string { + v := versions.ParseSemver(version) + + if raspberryPi { + grafana += "-rpi" + } + + versionString := strings.Join([]string{v.Major, v.Minor, v.Patch}, ".") + fmt.Println("Version string:", versionString) + if distro == "deb" { + if v.BuildMetadata != "" { + versionString += "+" + strings.ReplaceAll(v.BuildMetadata, "-", "~") + } + + if v.Prerelease != "" { + versionString += "~" + v.Prerelease + } + + return strings.Join([]string{grafana, versionString, arch}, "_") + "." + ext + } + + if distro == "rhel" { + if v.BuildMetadata != "" { + versionString += "+" + strings.ReplaceAll(v.BuildMetadata, "-", "~") + } + + if v.Prerelease != "" { + versionString += "~" + v.Prerelease + } + + versionString += "-1" + + // Notable difference between our deb naming and our RPM naming: the file ends with `.arch.ext`, not + // `_arch.ext`. + return strings.Join([]string{grafana, versionString}, "-") + "." + arch + "." + ext + } + + if v.Prerelease != "" { + versionString += "-" + v.Prerelease + } + + if v.BuildMetadata != "" { + versionString += "+" + v.BuildMetadata + } + + if musl { + arch += "-musl" + } + + // grafana-enterprise-1.2.3+example-01.linux-amd64.tar.gz + return fmt.Sprintf("%s-%s.%s-%s.%s", grafana, versionString, distro, arch, ext) +} + +func GetURL(baseURL *url.URL, version, grafana, distro, arch, ext string, musl, raspberryPi bool) *url.URL { + packageName := PackageName(grafana, distro, arch, version, ext, musl, raspberryPi) + return &url.URL{ + Host: baseURL.Host, + Scheme: baseURL.Scheme, + Path: path.Join(baseURL.Path, packageName), + } +} diff --git a/pkg/build/gcom/url_test.go b/pkg/build/gcom/url_test.go new file mode 100644 index 0000000000000..680a4326fd1b9 --- /dev/null +++ b/pkg/build/gcom/url_test.go @@ -0,0 +1,367 @@ +package gcom_test + +import ( + "fmt" + "testing" + + "github.com/grafana/grafana/pkg/build/gcom" + "github.com/stretchr/testify/require" +) + +func TestPackageName(t *testing.T) { + type args struct { + Distro string + Arch string + Version string + Ext string + Musl bool + RaspberryPi bool + + Expect string + } + + cases := []args{ + { + RaspberryPi: true, + Distro: "deb", + Arch: "armhf", + Version: "1.2.3", + Ext: "deb", + Expect: "grafana-rpi_1.2.3_armhf.deb", + }, + { + Distro: "deb", + Arch: "arm64", + Version: "1.2.3", + Ext: "deb", + Expect: "grafana_1.2.3_arm64.deb", + }, + { + Distro: "rhel", + Arch: "aarch64", + Version: "1.2.3", + Ext: "rpm", + Expect: "grafana-1.2.3-1.aarch64.rpm", + }, + { + Distro: "rhel", + Arch: "aarch64", + Ext: "rpm.sha256", + Version: "1.2.3", + Expect: "grafana-1.2.3-1.aarch64.rpm.sha256", + }, + { + Distro: "rhel", + Ext: "rpm", + Version: "1.2.3", + Arch: "x86_64", + Expect: "grafana-1.2.3-1.x86_64.rpm", + }, + { + Distro: "rhel", + Ext: "rpm.sha256", + Version: "1.2.3", + Arch: "x86_64", + Expect: "grafana-1.2.3-1.x86_64.rpm.sha256", + }, + { + Distro: "darwin", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "amd64", + Expect: "grafana-1.2.3.darwin-amd64.tar.gz", + }, + { + Distro: "darwin", + Ext: "tar.gz.sha256", + Version: "1.2.3", + Arch: "amd64", + Expect: "grafana-1.2.3.darwin-amd64.tar.gz.sha256", + }, + { + Distro: "darwin", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.darwin-arm64-musl.tar.gz", + Musl: true, + }, + { + Distro: "darwin", + Ext: "tar.gz.sha256", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.darwin-arm64-musl.tar.gz.sha256", + Musl: true, + }, + { + Distro: "darwin", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.darwin-arm64.tar.gz", + }, + { + Distro: "darwin", + Ext: "tar.gz.sha256", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.darwin-arm64.tar.gz.sha256", + }, + { + Distro: "linux", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "amd64", + Expect: "grafana-1.2.3.linux-amd64-musl.tar.gz", + Musl: true, + }, + { + Distro: "linux", + Ext: "tar.gz.sha256", + Version: "1.2.3", + Arch: "amd64", + Expect: "grafana-1.2.3.linux-amd64-musl.tar.gz.sha256", + Musl: true, + }, + { + Distro: "linux", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "amd64", + Expect: "grafana-1.2.3.linux-amd64.tar.gz", + }, + { + Distro: "linux", + Ext: "tar.gz.sha256", + Version: "1.2.3", + Arch: "amd64", + Expect: "grafana-1.2.3.linux-amd64.tar.gz.sha256", + }, + { + Distro: "linux", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.linux-arm64-musl.tar.gz", + Musl: true, + }, + { + Distro: "linux", + Ext: "tar.gz.sha256", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.linux-arm64-musl.tar.gz.sha256", + Musl: true, + }, + { + Distro: "linux", + Ext: "tar.gz", + Version: "1.2.3", + Arch: "arm64", + Expect: "grafana-1.2.3.linux-arm64.tar.gz", + }, + { + Ext: "tar.gz.sha256", + Version: "1.2.3", + Distro: "linux", + Arch: "arm64", + Expect: "grafana-1.2.3.linux-arm64.tar.gz.sha256", + }, + { + Ext: "tar.gz", + Version: "1.2.3", + Distro: "linux", + Arch: "armv6", + Expect: "grafana-1.2.3.linux-armv6.tar.gz", + }, + { + Ext: "tar.gz.sha256", + Version: "1.2.3", + Distro: "linux", + Arch: "armv6", + Expect: "grafana-1.2.3.linux-armv6.tar.gz.sha256", + }, + { + Ext: "tar.gz", + Version: "1.2.3", + Distro: "linux", + Arch: "armv7", + Expect: "grafana-1.2.3.linux-armv7-musl.tar.gz", + Musl: true, + }, + { + Ext: "tar.gz.sha256", + Version: "1.2.3", + Distro: "linux", + Arch: "armv7", + Expect: "grafana-1.2.3.linux-armv7-musl.tar.gz.sha256", + Musl: true, + }, + { + Ext: "tar.gz", + Version: "1.2.3", + Distro: "linux", + Arch: "armv7", + Expect: "grafana-1.2.3.linux-armv7.tar.gz", + }, + { + Ext: "tar.gz.sha256", + Version: "1.2.3", + Distro: "linux", + Arch: "armv7", + Expect: "grafana-1.2.3.linux-armv7.tar.gz.sha256", + }, + { + Version: "1.2.3", + Arch: "amd64", + Ext: "exe", + Distro: "windows", + Expect: "grafana-1.2.3.windows-amd64.exe", + }, + { + Version: "1.2.3", + Arch: "amd64", + Distro: "windows", + Ext: "exe.sha256", + Expect: "grafana-1.2.3.windows-amd64.exe.sha256", + }, + { + Version: "1.2.3", + Arch: "amd64", + Distro: "windows", + Ext: "msi", + Expect: "grafana-1.2.3.windows-amd64.msi", + }, + { + Version: "1.2.3", + Arch: "amd64", + Distro: "windows", + Ext: "msi.sha256", + Expect: "grafana-1.2.3.windows-amd64.msi.sha256", + }, + { + Ext: "tar.gz", + Version: "1.2.3", + Distro: "windows", + Expect: "grafana-1.2.3.windows-amd64.tar.gz", + Arch: "amd64", + }, + { + Version: "1.2.3", + Distro: "windows", + Arch: "amd64", + Ext: "tar.gz.sha256", + Expect: "grafana-1.2.3.windows-amd64.tar.gz.sha256", + }, + { + Version: "1.2.3", + Distro: "windows", + Expect: "grafana-1.2.3.windows-amd64.zip", + Ext: "zip", + Arch: "amd64", + }, + { + Version: "1.2.3", + Distro: "windows", + Expect: "grafana-1.2.3.windows-amd64.zip.sha256", + Ext: "zip.sha256", + Arch: "amd64", + }, + { + Ext: "tar.gz", + Version: "1.2.3", + Distro: "windows", + Arch: "arm64", + Expect: "grafana-1.2.3.windows-arm64-musl.tar.gz", + Musl: true, + }, + { + Version: "1.2.3", + Ext: "tar.gz.sha256", + Distro: "windows", + Arch: "arm64", + Expect: "grafana-1.2.3.windows-arm64-musl.tar.gz.sha256", + Musl: true, + }, + { + Ext: "tar.gz", + Version: "1.2.3", + Distro: "windows", + Arch: "arm64", + Expect: "grafana-1.2.3.windows-arm64.tar.gz", + }, + { + Version: "1.2.3", + Ext: "tar.gz.sha256", + Distro: "windows", + Arch: "arm64", + Expect: "grafana-1.2.3.windows-arm64.tar.gz.sha256", + }, + { + RaspberryPi: true, + Version: "1.2.3", + Ext: "deb", + Arch: "armhf", + Distro: "deb", + Expect: "grafana-rpi_1.2.3_armhf.deb", + }, + { + RaspberryPi: true, + Version: "1.2.3", + Ext: "deb.sha256", + Distro: "deb", + Arch: "armhf", + Expect: "grafana-rpi_1.2.3_armhf.deb.sha256", + }, + { + Version: "1.2.3", + Ext: "deb", + Distro: "deb", + Expect: "grafana_1.2.3_amd64.deb", + Arch: "amd64", + }, + { + Version: "1.2.3", + Ext: "deb.sha256", + Distro: "deb", + Expect: "grafana_1.2.3_amd64.deb.sha256", + Arch: "amd64", + }, + { + Version: "1.2.3", + Ext: "deb", + Arch: "arm64", + Distro: "deb", + Expect: "grafana_1.2.3_arm64.deb", + }, + { + Version: "1.2.3", + Ext: "deb.sha256", + Arch: "arm64", + Distro: "deb", + Expect: "grafana_1.2.3_arm64.deb.sha256", + }, + { + Version: "1.2.3", + Ext: "deb", + Distro: "deb", + Arch: "armhf", + Expect: "grafana_1.2.3_armhf.deb", + }, + { + Version: "1.2.3", + Ext: "deb.sha256", + Arch: "armhf", + Distro: "deb", + Expect: "grafana_1.2.3_armhf.deb.sha256", + }, + } + + for i, v := range cases { + t.Run(fmt.Sprintf("[%d / %d] %s", i+1, len(cases), v.Expect), func(t *testing.T) { + n := gcom.PackageName("grafana", v.Distro, v.Arch, v.Version, v.Ext, v.Musl, v.RaspberryPi) + require.Equal(t, v.Expect, n) + }) + } +} diff --git a/pkg/build/git/git.go b/pkg/build/git/git.go index 9fbcc346e9272..2977cf137758f 100644 --- a/pkg/build/git/git.go +++ b/pkg/build/git/git.go @@ -4,13 +4,9 @@ import ( "context" "errors" "fmt" - "net/http" "regexp" "github.com/google/go-github/v45/github" - "golang.org/x/oauth2" - - "github.com/grafana/grafana/pkg/build/stringutil" ) const ( @@ -47,19 +43,6 @@ type StatusesService interface { CreateStatus(ctx context.Context, owner, repo, ref string, status *github.RepoStatus) (*github.RepoStatus, *github.Response, error) } -// NewGitHubClient creates a new Client using the provided GitHub token if not empty. -func NewGitHubClient(ctx context.Context, token string) *github.Client { - var tc *http.Client - if token != "" { - ts := oauth2.StaticTokenSource(&oauth2.Token{ - AccessToken: token, - }) - tc = oauth2.NewClient(ctx, ts) - } - - return github.NewClient(tc) -} - func PRCheckRegexp() *regexp.Regexp { reBranch, err := regexp.Compile(`^prc-([0-9]+)-([A-Za-z0-9]+)\/(.+)$`) if err != nil { @@ -68,76 +51,3 @@ func PRCheckRegexp() *regexp.Regexp { return reBranch } - -func AddLabelToPR(ctx context.Context, client LabelsService, prID int, newLabel string) error { - // Check existing labels - labels, _, err := client.ListLabelsByIssue(ctx, RepoOwner, OSSRepo, prID, nil) - if err != nil { - return err - } - - duplicate := false - for _, label := range labels { - if *label.Name == newLabel { - duplicate = true - continue - } - - // Delete existing "enterprise-xx" labels - if stringutil.Contains(EnterpriseCheckLabels, *label.Name) { - _, err := client.RemoveLabelForIssue(ctx, RepoOwner, OSSRepo, prID, *label.Name) - if err != nil { - return err - } - } - } - - if duplicate { - return nil - } - - _, _, err = client.AddLabelsToIssue(ctx, RepoOwner, OSSRepo, prID, []string{newLabel}) - if err != nil { - return err - } - - return nil -} - -func DeleteEnterpriseBranch(ctx context.Context, client GitService, branchName string) error { - ref := "heads/" + branchName - if _, err := client.DeleteRef(ctx, RepoOwner, EnterpriseRepo, ref); err != nil { - return err - } - - return nil -} - -// CreateEnterpriseStatus sets the status on a commit for the enterprise build check. -func CreateEnterpriseStatus(ctx context.Context, client StatusesService, sha, link, status string) (*github.RepoStatus, error) { - check, _, err := client.CreateStatus(ctx, RepoOwner, OSSRepo, sha, &github.RepoStatus{ - Context: github.String(EnterpriseCheckName), - Description: github.String(EnterpriseCheckDescription), - TargetURL: github.String(link), - State: github.String(status), - }) - - if err != nil { - return nil, err - } - - return check, nil -} - -func CreateEnterpriseBuildFailedComment(ctx context.Context, client CommentService, link string, prID int) error { - body := fmt.Sprintf("Drone build failed: %s", link) - - _, _, err := client.CreateComment(ctx, RepoOwner, OSSRepo, prID, &github.IssueComment{ - Body: &body, - }) - if err != nil { - return err - } - - return nil -} diff --git a/pkg/build/git/git_checks_test.go b/pkg/build/git/git_checks_test.go deleted file mode 100644 index c9bf0c98d0b03..0000000000000 --- a/pkg/build/git/git_checks_test.go +++ /dev/null @@ -1,56 +0,0 @@ -package git_test - -import ( - "context" - "errors" - "testing" - - "github.com/google/go-github/v45/github" - "github.com/stretchr/testify/require" - - "github.com/grafana/grafana/pkg/build/git" -) - -type TestChecksService struct { - CreateCheckRunError error -} - -func (s *TestChecksService) CreateStatus(ctx context.Context, owner, repo, ref string, status *github.RepoStatus) (*github.RepoStatus, *github.Response, error) { - if s.CreateCheckRunError != nil { - return nil, nil, s.CreateCheckRunError - } - - return &github.RepoStatus{ - ID: github.Int64(1), - URL: status.URL, - }, nil, nil -} - -func TestCreateEnterpriseRepoStatus(t *testing.T) { - t.Run("It should create a repo status", func(t *testing.T) { - var ( - ctx = context.Background() - client = &TestChecksService{} - link = "http://example.com" - sha = "1234" - ) - - _, err := git.CreateEnterpriseStatus(ctx, client, link, sha, "success") - - require.NoError(t, err) - }) - t.Run("It should return an error if GitHub fails to create the status", func(t *testing.T) { - var ( - ctx = context.Background() - createCheckError = errors.New("create check run error") - client = &TestChecksService{ - CreateCheckRunError: createCheckError, - } - link = "http://example.com" - sha = "1234" - ) - - _, err := git.CreateEnterpriseStatus(ctx, client, link, sha, "success") - require.ErrorIs(t, err, createCheckError) - }) -} diff --git a/pkg/build/git/git_issues_test.go b/pkg/build/git/git_issues_test.go deleted file mode 100644 index dab9fddde7731..0000000000000 --- a/pkg/build/git/git_issues_test.go +++ /dev/null @@ -1,135 +0,0 @@ -package git_test - -import ( - "context" - "errors" - "testing" - - "github.com/google/go-github/v45/github" - "github.com/stretchr/testify/require" - - "github.com/grafana/grafana/pkg/build/git" -) - -type TestLabelsService struct { - Labels []*github.Label - ListLabelsError error - RemoveLabelError error - AddLabelsError error -} - -func (s *TestLabelsService) ListLabelsByIssue(ctx context.Context, owner string, repo string, number int, opts *github.ListOptions) ([]*github.Label, *github.Response, error) { - if s.ListLabelsError != nil { - return nil, nil, s.ListLabelsError - } - - labels := s.Labels - if labels == nil { - labels = []*github.Label{} - } - - return labels, nil, nil -} - -func (s *TestLabelsService) RemoveLabelForIssue(ctx context.Context, owner string, repo string, number int, label string) (*github.Response, error) { - if s.RemoveLabelError != nil { - return nil, s.RemoveLabelError - } - - return &github.Response{}, nil -} - -func (s *TestLabelsService) AddLabelsToIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*github.Label, *github.Response, error) { - if s.AddLabelsError != nil { - return nil, nil, s.AddLabelsError - } - - l := make([]*github.Label, len(labels)) - for i, v := range labels { - l[i] = &github.Label{ - Name: github.String(v), - } - } - - return l, nil, nil -} - -func TestAddLabelToPR(t *testing.T) { - t.Run("It should add a label to a pull request", func(t *testing.T) { - var ( - ctx = context.Background() - client = &TestLabelsService{} - pr = 20 - label = "test-label" - ) - - require.NoError(t, git.AddLabelToPR(ctx, client, pr, label)) - }) - t.Run("It should not return an error if the label already exists", func(t *testing.T) { - var ( - ctx = context.Background() - client = &TestLabelsService{ - Labels: []*github.Label{ - { - Name: github.String("test-label"), - }, - }, - } - pr = 20 - label = "test-label" - ) - - require.NoError(t, git.AddLabelToPR(ctx, client, pr, label)) - }) - - t.Run("It should return an error if GitHub returns an error when listing labels", func(t *testing.T) { - var ( - ctx = context.Background() - listLabelsError = errors.New("list labels error") - client = &TestLabelsService{ - ListLabelsError: listLabelsError, - Labels: []*github.Label{}, - } - pr = 20 - label = "test-label" - ) - - require.ErrorIs(t, git.AddLabelToPR(ctx, client, pr, label), listLabelsError) - }) - - t.Run("It should not return an error if there are existing enterprise-check labels.", func(t *testing.T) { - var ( - ctx = context.Background() - client = &TestLabelsService{ - Labels: []*github.Label{ - { - Name: github.String("enterprise-failed"), - }, - }, - } - pr = 20 - label = "test-label" - ) - - require.NoError(t, git.AddLabelToPR(ctx, client, pr, label)) - }) - - t.Run("It should return an error if GitHub returns an error when removing existing enterprise-check labels", func(t *testing.T) { - var ( - ctx = context.Background() - removeLabelError = errors.New("remove label error") - client = &TestLabelsService{ - RemoveLabelError: removeLabelError, - Labels: []*github.Label{ - { - Name: github.String("enterprise-failed"), - }, - }, - } - pr = 20 - label = "test-label" - ) - - require.ErrorIs(t, git.AddLabelToPR(ctx, client, pr, label), removeLabelError) - }) -} diff --git a/pkg/build/git/git_test.go b/pkg/build/git/git_test.go deleted file mode 100644 index 3b47ce08dd07e..0000000000000 --- a/pkg/build/git/git_test.go +++ /dev/null @@ -1,57 +0,0 @@ -package git_test - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/grafana/grafana/pkg/build/git" -) - -func TestPRCheckRegexp(t *testing.T) { - type match struct { - String string - Commit string - Branch string - PR string - } - - var ( - shouldMatch = []match{ - { - String: "prc-1-a1b2c3d4/branch-name", - Branch: "branch-name", - Commit: "a1b2c3d4", - PR: "1", - }, - { - String: "prc-111-a1b2c3d4/branch/name", - Branch: "branch/name", - Commit: "a1b2c3d4", - PR: "111", - }, - { - String: "prc-102930122-a1b2c3d4/branch-name", - Branch: "branch-name", - Commit: "a1b2c3d4", - PR: "102930122", - }, - } - - shouldNotMatch = []string{"prc-a/branch", "km/test", "test", "prc", "prc/test", "price"} - ) - - regex := git.PRCheckRegexp() - - for _, v := range shouldMatch { - assert.Truef(t, regex.MatchString(v.String), "regex '%s' should match %s", regex.String(), v) - m := regex.FindStringSubmatch(v.String) - assert.Equal(t, m[1], v.PR) - assert.Equal(t, m[2], v.Commit) - assert.Equal(t, m[3], v.Branch) - } - - for _, v := range shouldNotMatch { - assert.False(t, regex.MatchString(v), "regex '%s' should not match %s", regex.String(), v) - } -} diff --git a/pkg/build/go.mod b/pkg/build/go.mod index b48156e9035b4..f0d0a5899bffb 100644 --- a/pkg/build/go.mod +++ b/pkg/build/go.mod @@ -1,6 +1,6 @@ module github.com/grafana/grafana/pkg/build -go 1.22.4 +go 1.22.7 // Override docker/docker to avoid: // go: github.com/drone-runners/drone-runner-docker@v1.8.2 requires diff --git a/pkg/build/golangutils/build.go b/pkg/build/golangutils/build.go deleted file mode 100644 index 14da56a776515..0000000000000 --- a/pkg/build/golangutils/build.go +++ /dev/null @@ -1,124 +0,0 @@ -package golangutils - -import ( - "context" - "fmt" - "io" - "os/exec" - "strings" - - "github.com/grafana/grafana/pkg/build/config" -) - -type BuildOpts struct { - // Package refers to the path to the `main` package containing `func main` - Package string - - // Output is used as the -o argument in the go build command - Output string - - // Workdir should define some place in the module where the package path resolves. - // Go commands need to be ran inside a the Go module directory. - Workdir string - - GoOS config.OS - GoArch config.Architecture - GoArm string - Go386 string - CC string - LibC string - - CGoEnabled bool - CGoCFlags string - - // LdFlags are joined by a space character and provided to the -ldflags argument. - // A valid element here would be `-X 'main.version=1.0.0'`. - LdFlags []string - - Stdout io.ReadWriter - Stderr io.ReadWriter - Stdin io.ReadWriter - - // ExtraEnv allows consumers to provide extra env args that are not defined above. - // A single element should be formatted using like so: {NAME}={VALUE}. Example: GOOS=linux. - ExtraEnv []string - - // ExtraArgs allows consumers to provide extra arguments that are not defined above. - // Flag names and values should be two separate elements. - // These flags will be appended to the command arguments before the package path in "go build". - ExtraArgs []string -} - -// Env constructs a list of key/value pairs for setting a build command's environment. -// Should we consider using something to unmarshal the struct to env? -func (opts BuildOpts) Env() []string { - env := []string{} - if opts.CGoEnabled { - env = append(env, "CGO_ENABLED=1") - } - - if opts.GoOS != "" { - env = append(env, fmt.Sprintf("GOOS=%s", opts.GoOS)) - } - - if opts.GoArch != "" { - env = append(env, fmt.Sprintf("GOARCH=%s", opts.GoArch)) - } - - if opts.CC != "" { - env = append(env, fmt.Sprintf("CC=%s", opts.CC)) - } - - if opts.CGoCFlags != "" { - env = append(env, fmt.Sprintf("CGO_CFLAGS=%s", opts.CGoCFlags)) - } - - if opts.GoArm != "" { - env = append(env, fmt.Sprintf("GOARM=%s", opts.GoArm)) - } - - if opts.ExtraEnv != nil { - return append(opts.ExtraEnv, env...) - } - - return env -} - -// Args constructs a list of flags and values for use with the exec.Command type when running "go build". -func (opts BuildOpts) Args() []string { - args := []string{} - - if opts.LdFlags != nil { - args = append(args, "-ldflags", strings.Join(opts.LdFlags, " ")) - } - - if opts.Output != "" { - args = append(args, "-o", opts.Output) - } - - if opts.ExtraArgs != nil { - args = append(args, opts.ExtraArgs...) - } - - args = append(args, opts.Package) - - return args -} - -// Build runs the go build process in the current shell given the opts. -// This function will panic if no Stdout/Stderr/Stdin is provided in the opts. -func RunBuild(ctx context.Context, opts BuildOpts) error { - env := opts.Env() - args := append([]string{"build"}, opts.Args()...) - // Ignore gosec G304 as this function is only used in the build process. - //nolint:gosec - cmd := exec.CommandContext(ctx, "go", args...) - cmd.Env = env - - cmd.Stdout = opts.Stdout - cmd.Stderr = opts.Stderr - cmd.Stdin = opts.Stdin - cmd.Dir = opts.Workdir - - return cmd.Run() -} diff --git a/pkg/build/golangutils/doc.go b/pkg/build/golangutils/doc.go deleted file mode 100644 index 2cb33af05e107..0000000000000 --- a/pkg/build/golangutils/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package golangutils holds utility functions, wrappers, and types for building Go binaries for Grafana. -package golangutils diff --git a/pkg/build/gpg/gpg.go b/pkg/build/gpg/gpg.go deleted file mode 100644 index 59f9672201492..0000000000000 --- a/pkg/build/gpg/gpg.go +++ /dev/null @@ -1,73 +0,0 @@ -package gpg - -import ( - "encoding/base64" - "fmt" - "log" - "os" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/fsutil" -) - -// LoadGPGKeys loads GPG key pair and password from the environment and writes them to corresponding files. -// -// The passed config's GPG fields also get updated. Make sure to call RemoveGPGFiles at application exit. -func LoadGPGKeys(cfg *config.Config) error { - var err error - cfg.GPGPrivateKey, err = fsutil.CreateTempFile("priv.key") - if err != nil { - return err - } - cfg.GPGPublicKey, err = fsutil.CreateTempFile("pub.key") - if err != nil { - return err - } - cfg.GPGPassPath, err = fsutil.CreateTempFile("") - if err != nil { - return err - } - - gpgPrivKey := os.Getenv("GPG_PRIV_KEY") - if gpgPrivKey == "" { - return fmt.Errorf("$GPG_PRIV_KEY must be defined") - } - gpgPubKey := os.Getenv("GPG_PUB_KEY") - if gpgPubKey == "" { - return fmt.Errorf("$GPG_PUB_KEY must be defined") - } - gpgPass := os.Getenv("GPG_KEY_PASSWORD") - if gpgPass == "" { - return fmt.Errorf("$GPG_KEY_PASSWORD must be defined") - } - - gpgPrivKeyB, err := base64.StdEncoding.DecodeString(gpgPrivKey) - if err != nil { - return fmt.Errorf("couldn't decode $GPG_PRIV_KEY: %w", err) - } - gpgPubKeyB, err := base64.StdEncoding.DecodeString(gpgPubKey) - if err != nil { - return fmt.Errorf("couldn't decode $GPG_PUB_KEY: %w", err) - } - - if err := os.WriteFile(cfg.GPGPrivateKey, append(gpgPrivKeyB, '\n'), 0400); err != nil { - return fmt.Errorf("failed to write GPG private key file: %w", err) - } - if err := os.WriteFile(cfg.GPGPublicKey, append(gpgPubKeyB, '\n'), 0400); err != nil { - return fmt.Errorf("failed to write GPG public key file: %w", err) - } - if err := os.WriteFile(cfg.GPGPassPath, []byte(gpgPass+"\n"), 0400); err != nil { - return fmt.Errorf("failed to write GPG password file: %w", err) - } - - return nil -} - -// RemoveGPGFiles removes configured GPG files. -func RemoveGPGFiles(cfg config.Config) { - for _, fpath := range []string{cfg.GPGPrivateKey, cfg.GPGPublicKey, cfg.GPGPassPath} { - if err := os.Remove(fpath); err != nil { - log.Printf("failed to remove %q", fpath) - } - } -} diff --git a/pkg/build/gpg/import.go b/pkg/build/gpg/import.go deleted file mode 100644 index 4542a64d8c951..0000000000000 --- a/pkg/build/gpg/import.go +++ /dev/null @@ -1,73 +0,0 @@ -package gpg - -import ( - "fmt" - "log" - "os" - "os/exec" - "path/filepath" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/fsutil" -) - -// writeRpmMacros writes ~/.rpmmacros. -func writeRpmMacros(homeDir, gpgPassPath string) error { - fpath := filepath.Join(homeDir, ".rpmmacros") - content := fmt.Sprintf(`%%_signature gpg -%%_gpg_path %s/.gnupg -%%_gpg_name Grafana -%%_gpgbin /usr/bin/gpg -%%__gpg_sign_cmd %%{__gpg} gpg --batch --yes --pinentry-mode loopback --no-armor --passphrase-file %s --no-secmem-warning -u "%%{_gpg_name}" -sbo %%{__signature_filename} %%{__plaintext_filename} -`, homeDir, gpgPassPath) - //nolint:gosec - if err := os.WriteFile(fpath, []byte(content), 0600); err != nil { - return fmt.Errorf("failed to write %q: %w", fpath, err) - } - - return nil -} - -// Import imports the GPG package signing key. -// ~/.rpmmacros also gets written. -func Import(cfg config.Config) error { - exists, err := fsutil.Exists(cfg.GPGPrivateKey) - if err != nil { - return err - } - if !exists { - return fmt.Errorf("GPG private key file doesn't exist: %q", cfg.GPGPrivateKey) - } - - log.Printf("Importing GPG key %q...", cfg.GPGPrivateKey) - // nolint:gosec - cmd := exec.Command("gpg", "--batch", "--yes", "--no-tty", "--allow-secret-key-import", "--import", - cfg.GPGPrivateKey) - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to import private key: %s", output) - } - - homeDir, err := os.UserHomeDir() - if err != nil { - return err - } - - if err := writeRpmMacros(homeDir, cfg.GPGPassPath); err != nil { - return err - } - - pubKeysPath := filepath.Join(homeDir, ".rpmdb", "pubkeys") - if err := os.MkdirAll(pubKeysPath, 0700); err != nil { - return fmt.Errorf("failed to make %s: %w", pubKeysPath, err) - } - gpgPub, err := os.ReadFile(cfg.GPGPublicKey) - if err != nil { - return err - } - //nolint:gosec - if err := os.WriteFile(filepath.Join(homeDir, ".rpmdb", "pubkeys", "grafana.key"), gpgPub, 0400); err != nil { - return fmt.Errorf("failed to write pub key to ~/.rpmdb: %w", err) - } - - return nil -} diff --git a/pkg/build/grafana/build.go b/pkg/build/grafana/build.go deleted file mode 100644 index 586b2d22839b0..0000000000000 --- a/pkg/build/grafana/build.go +++ /dev/null @@ -1,129 +0,0 @@ -package grafana - -import ( - "context" - "fmt" - "log" - "os" - "path/filepath" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/cryptoutil" - "github.com/grafana/grafana/pkg/build/golangutils" -) - -var binaries = []string{"grafana", "grafana-server", "grafana-cli"} - -const ( - SuffixEnterprise2 = "-enterprise2" -) - -const ( - ExtensionExe = ".exe" -) - -func GrafanaLDFlags(version string, r config.Revision) []string { - cmd := []string{ - "-w", - fmt.Sprintf("-X main.version=%s", version), - fmt.Sprintf("-X main.commit=%s", r.SHA256), - fmt.Sprintf("-X main.buildstamp=%d", r.Timestamp), - fmt.Sprintf("-X main.buildBranch=%s", r.Branch), - } - - if r.EnterpriseCommit != "" { - cmd = append(cmd, fmt.Sprintf("-X main.enterpriseCommit=%s", r.EnterpriseCommit)) - } - - return cmd -} - -// BinaryFolder returns the path to where the Grafana binary is build given the provided arguments. -func BinaryFolder(edition config.Edition, args BuildArgs) string { - sfx := "" - if edition == config.EditionEnterprise2 { - sfx = SuffixEnterprise2 - } - - arch := string(args.GoArch) - if args.GoArch == config.ArchARM { - arch = string(args.GoArch) + "v" + args.GoArm - } - - format := fmt.Sprintf("%s-%s", args.GoOS, arch) - if args.LibC != "" { - format += fmt.Sprintf("-%s", args.LibC) - } - format += sfx - - if args.GoOS == config.OSWindows { - format += ExtensionExe - } - - return format -} - -func GrafanaDescriptor(opts golangutils.BuildOpts) string { - libcPart := "" - if opts.LibC != "" { - libcPart = fmt.Sprintf("/%s", opts.LibC) - } - arch := string(opts.GoArch) - if opts.GoArch == config.ArchARM { - arch = string(opts.GoArch) + "v" + opts.GoArm - } - - return fmt.Sprintf("%s/%s%s", opts.GoOS, arch, libcPart) -} - -// BuildGrafanaBinary builds a certain binary according to certain parameters. -func BuildGrafanaBinary(ctx context.Context, name, version string, args BuildArgs, edition config.Edition) error { - opts := args.BuildOpts - opts.ExtraEnv = os.Environ() - - revision, err := config.GrafanaRevision(ctx, opts.Workdir) - if err != nil { - return err - } - - folder := BinaryFolder(edition, args) - - if opts.GoOS == config.OSWindows { - name += ExtensionExe - } - - binary := filepath.Join(opts.Workdir, "bin", folder, name) - opts.Output = binary - - if err := os.RemoveAll(binary); err != nil { - return fmt.Errorf("failed to remove %q: %w", binary, err) - } - - if err := os.RemoveAll(binary + ".md5"); err != nil { - return fmt.Errorf("failed to remove %q: %w", binary+".md5", err) - } - - descriptor := GrafanaDescriptor(opts) - - log.Printf("Building %q for %s", binary, descriptor) - - opts.LdFlags = append(args.LdFlags, GrafanaLDFlags(version, revision)...) - - if edition == config.EditionEnterprise2 { - opts.ExtraArgs = []string{"-tags=pro"} - } - - log.Printf("Running command 'go %s'", opts.Args()) - - if err := golangutils.RunBuild(ctx, opts); err != nil { - return err - } - - // Create an MD5 checksum of the binary, to be included in the archive for - // automatic upgrades. - if err := cryptoutil.MD5File(binary); err != nil { - return err - } - - return nil -} diff --git a/pkg/build/grafana/variant.go b/pkg/build/grafana/variant.go index 6ccd4abb8a4c2..89654722cf28e 100644 --- a/pkg/build/grafana/variant.go +++ b/pkg/build/grafana/variant.go @@ -1,160 +1 @@ package grafana - -import ( - "bytes" - "context" - "fmt" - "path/filepath" - - "github.com/grafana/grafana/pkg/build/compilers" - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/golangutils" -) - -// BuildArgs represent the build parameters that define the "go build" behavior of a single variant. -// These arguments are applied as environment variables and arguments to the "go build" command. -type BuildArgs struct { - golangutils.BuildOpts - DebArch config.Architecture - RPMArch config.Architecture -} - -type BuildVariantOpts struct { - Variant config.Variant - Edition config.Edition - - Version string - GrafanaDir string -} - -// BuildVariant builds a certain variant of the grafana-server and grafana-cli binaries sequentially. -func BuildVariant(ctx context.Context, opts BuildVariantOpts) error { - grafanaDir, err := filepath.Abs(opts.GrafanaDir) - if err != nil { - return err - } - - var ( - args = VariantBuildArgs(opts.Variant) - ) - - for _, binary := range binaries { - // Note that for Golang cmd paths we must use the relative path and the Linux file separators (/) even for Windows users. - var ( - pkg = fmt.Sprintf("./pkg/cmd/%s", binary) - stdout = bytes.NewBuffer(nil) - stderr = bytes.NewBuffer(nil) - ) - - args.Workdir = grafanaDir - args.Stdout = stdout - args.Stderr = stderr - args.Package = pkg - - if err := BuildGrafanaBinary(ctx, binary, opts.Version, args, opts.Edition); err != nil { - return fmt.Errorf("failed to build %s for %s: %w\nstdout: %s\nstderr: %s", pkg, opts.Variant, err, stdout.String(), stderr.String()) - } - } - - return nil -} - -var ldFlagsStatic = []string{"-linkmode=external", "-extldflags=-static"} - -var variantArgs = map[config.Variant]BuildArgs{ - config.VariantArmV6: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - CGoEnabled: true, - GoArch: config.ArchARM, - GoArm: "6", - CC: compilers.ArmV6, - }, - DebArch: config.ArchARMHF, - }, - config.VariantArmV7: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - CGoEnabled: true, - GoArch: config.ArchARM, - GoArm: "7", - CC: compilers.Armv7, - }, - DebArch: config.ArchARMHF, - RPMArch: config.ArchARMHFP, - }, - config.VariantArmV7Musl: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - CGoEnabled: true, - GoArch: config.ArchARM, - GoArm: "7", - LibC: config.LibCMusl, - CC: compilers.Armv7Musl, - LdFlags: ldFlagsStatic, - }, - }, - config.VariantArm64: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - CGoEnabled: true, - GoArch: config.ArchARM64, - CC: compilers.Arm64, - }, - DebArch: config.ArchARM64, - RPMArch: "aarch64", - }, - config.VariantArm64Musl: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - GoArch: config.ArchARM64, - CGoEnabled: true, - CC: compilers.Arm64Musl, - LibC: config.LibCMusl, - LdFlags: ldFlagsStatic, - }, - }, - config.VariantDarwinAmd64: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSDarwin, - CGoEnabled: true, - GoArch: config.ArchAMD64, - CC: compilers.Osx64, - }, - }, - config.VariantWindowsAmd64: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSWindows, - GoArch: config.ArchAMD64, - CC: compilers.Win64, - CGoEnabled: true, - CGoCFlags: "-D_WIN32_WINNT=0x0601", - }, - }, - config.VariantLinuxAmd64: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - GoArch: config.ArchAMD64, - CC: compilers.LinuxX64, - }, - DebArch: config.ArchAMD64, - RPMArch: config.ArchAMD64, - }, - config.VariantLinuxAmd64Musl: { - BuildOpts: golangutils.BuildOpts{ - GoOS: config.OSLinux, - GoArch: config.ArchAMD64, - CC: compilers.LinuxX64Musl, - LibC: config.LibCMusl, - LdFlags: ldFlagsStatic, - }, - }, -} - -func VariantBuildArgs(v config.Variant) BuildArgs { - if val, ok := variantArgs[v]; ok { - return val - } - - return BuildArgs{} -} diff --git a/pkg/build/lerna/lerna.go b/pkg/build/lerna/lerna.go index 3bd67a87b44d5..048c0dd374d87 100644 --- a/pkg/build/lerna/lerna.go +++ b/pkg/build/lerna/lerna.go @@ -2,63 +2,13 @@ package lerna import ( "context" - "encoding/json" "fmt" "os" "os/exec" - "path/filepath" - "strings" - "github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/fsutil" ) -// BuildFrontendPackages will bump the version for the package to the latest canary build -// and build the packages so they are ready for being published, used for generating docs etc. -func BuildFrontendPackages(version string, mode config.Edition, grafanaDir string) error { - err := bumpLernaVersion(version, grafanaDir) - if err != nil { - return err - } - cmd := exec.Command("yarn", "run", "packages:build") - cmd.Dir = grafanaDir - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to build %s frontend packages: %s", mode, output) - } - - return nil -} - -func bumpLernaVersion(version string, grafanaDir string) error { - //nolint:gosec - cmd := exec.Command("yarn", "run", "lerna", "version", version, "--exact", "--no-git-tag-version", "--no-push", "--force-publish", "-y") - cmd.Dir = grafanaDir - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to bump version for frontend packages: %s\n%s", err, output) - } - - return nil -} - -func GetLernaVersion(grafanaDir string) (string, error) { - lernaJSONPath := filepath.Join(grafanaDir, "lerna.json") - //nolint:gosec - lernaJSONB, err := os.ReadFile(lernaJSONPath) - if err != nil { - return "", fmt.Errorf("failed to read %q: %w", lernaJSONPath, err) - } - pkgObj := map[string]any{} - if err := json.Unmarshal(lernaJSONB, &pkgObj); err != nil { - return "", fmt.Errorf("failed decoding %q: %w", lernaJSONPath, err) - } - - version := pkgObj["version"].(string) - if version == "" { - return "", fmt.Errorf("failed to read version from %q", lernaJSONPath) - } - return strings.TrimSpace(version), nil -} - func PackFrontendPackages(ctx context.Context, tag, grafanaDir, artifactsDir string) error { exists, err := fsutil.Exists(artifactsDir) if err != nil { diff --git a/pkg/build/packaging/artifacts.go b/pkg/build/packaging/artifacts.go index 36c1e3258e5dd..b1740b68c5f3d 100644 --- a/pkg/build/packaging/artifacts.go +++ b/pkg/build/packaging/artifacts.go @@ -1,9 +1,6 @@ package packaging import ( - "fmt" - "strings" - "github.com/grafana/grafana/pkg/build/config" ) @@ -12,11 +9,18 @@ const MainFolder = "main" const EnterpriseSfx = "-enterprise" const CacheSettings = "Cache-Control:public, max-age=" -type buildArtifact struct { - Os string - Arch string - urlPostfix string - packagePostfix string +type BuildArtifact struct { + // Distro can be "windows", "darwin", "deb", "rhel", or "linux" + Distro string + Arch string + // Ext is the file extension without the "." + Ext string + Musl bool + RaspberryPi bool + + // URL can be set optionally by another process + // Note: check other repos before determining this to be dead code + URL string } type PublishConfig struct { @@ -32,110 +36,101 @@ type PublishConfig struct { SimulateRelease bool } -const rhelOS = "rhel" -const debOS = "deb" - -func (t buildArtifact) GetURL(baseArchiveURL string, cfg PublishConfig) string { - rev := "" - prefix := "-" - if t.Os == debOS { - prefix = "_" - } else if t.Os == rhelOS { - rev = "-1" - } - - version := cfg.Version - verComponents := strings.Split(version, "-") - if len(verComponents) > 2 { - panic(fmt.Sprintf("Version string contains more than one hyphen: %q", version)) - } - - switch t.Os { - case debOS, rhelOS: - if len(verComponents) > 1 { - // With Debian and RPM packages, it's customary to prefix any pre-release component with a ~, since this - // is considered of lower lexical value than the empty character, and this way pre-release versions are - // considered to be of a lower version than the final version (which lacks this suffix). - version = fmt.Sprintf("%s~%s", verComponents[0], verComponents[1]) - } - } +var LinuxArtifacts = []BuildArtifact{ + { + Distro: "linux", + Arch: "arm64", + Ext: "tar.gz", + }, + { + Distro: "deb", + Arch: "amd64", + Ext: "deb", + }, + { + Distro: "rhel", + Arch: "x86_64", + Ext: "rpm", + }, + { + Distro: "linux", + Arch: "amd64", + Ext: "tar.gz", + }, +} - // https://dl.grafana.com/oss/main/grafana_8.5.0~54094pre_armhf.deb: 404 Not Found - url := fmt.Sprintf("%s%s%s%s%s%s", baseArchiveURL, t.packagePostfix, prefix, version, rev, t.urlPostfix) - return url +var DarwinArtifacts = []BuildArtifact{ + { + Distro: "darwin", + Arch: "amd64", + Ext: "tar.gz", + }, } -var ArtifactConfigs = []buildArtifact{ +var WindowsArtifacts = []BuildArtifact{ + { + Distro: "windows", + Arch: "amd64", + Ext: "zip", + }, { - Os: debOS, - Arch: "arm64", - urlPostfix: "_arm64.deb", + Distro: "windows", + Arch: "amd64", + Ext: "msi", }, +} + +var ARMArtifacts = []BuildArtifact{ { - Os: rhelOS, - Arch: "arm64", - urlPostfix: ".aarch64.rpm", + Distro: "deb", + Arch: "arm64", + Ext: "deb", }, { - Os: "linux", - Arch: "arm64", - urlPostfix: ".linux-arm64.tar.gz", + Distro: "rhel", + Arch: "aarch64", + Ext: "rpm", }, - // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved - // { - // Os: debOS, - // Arch: "armv7", - // urlPostfix: "_armhf.deb", - // }, - // { - // Os: debOS, - // Arch: "armv6", - // packagePostfix: "-rpi", - // urlPostfix: "_armhf.deb", - // }, - // { - // Os: rhelOS, - // Arch: "armv7", - // urlPostfix: ".armhfp.rpm", - // }, - // { - // Os: "linux", - // Arch: "armv6", - // urlPostfix: ".linux-armv6.tar.gz", - // }, - // { - // Os: "linux", - // Arch: "armv7", - // urlPostfix: ".linux-armv7.tar.gz", - // }, { - Os: "darwin", - Arch: "amd64", - urlPostfix: ".darwin-amd64.tar.gz", + Distro: "deb", + Arch: "armhf", + Ext: "deb", + RaspberryPi: false, }, { - Os: "deb", - Arch: "amd64", - urlPostfix: "_amd64.deb", + Distro: "deb", + Arch: "armhf", + RaspberryPi: true, + Ext: "deb", }, { - Os: rhelOS, - Arch: "amd64", - urlPostfix: ".x86_64.rpm", + Distro: "linux", + Arch: "armv6", + Ext: "tar.gz", }, { - Os: "linux", - Arch: "amd64", - urlPostfix: ".linux-amd64.tar.gz", + Distro: "linux", + Arch: "armv7", + Ext: "tar.gz", }, { - Os: "win", - Arch: "amd64", - urlPostfix: ".windows-amd64.zip", + Distro: "linux", + Arch: "arm64", + Ext: "tar.gz", }, { - Os: "win-installer", - Arch: "amd64", - urlPostfix: ".windows-amd64.msi", + Distro: "linux", + Arch: "amd64", + Ext: "tar.gz", }, } + +func join(a []BuildArtifact, b ...[]BuildArtifact) []BuildArtifact { + for i := range b { + a = append(a, b[i]...) + } + + return a +} + +var ArtifactConfigs = join(LinuxArtifacts, DarwinArtifacts, WindowsArtifacts, ARMArtifacts) diff --git a/pkg/build/packaging/docs.go b/pkg/build/packaging/docs.go deleted file mode 100644 index a723cb3d346c0..0000000000000 --- a/pkg/build/packaging/docs.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package packaging holds functions and types for creating the tar.gz, deb, and rpm packages of Grafana. -package packaging diff --git a/pkg/build/packaging/errors.go b/pkg/build/packaging/errors.go deleted file mode 100644 index c20b9edbfae36..0000000000000 --- a/pkg/build/packaging/errors.go +++ /dev/null @@ -1 +0,0 @@ -package packaging diff --git a/pkg/build/packaging/grafana.go b/pkg/build/packaging/grafana.go deleted file mode 100644 index 7022f1bf4697a..0000000000000 --- a/pkg/build/packaging/grafana.go +++ /dev/null @@ -1,1133 +0,0 @@ -package packaging - -import ( - "archive/tar" - "archive/zip" - "compress/gzip" - "context" - "crypto/sha256" - "encoding/json" - "errors" - "fmt" - "io" - "log" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/fsutil" - "github.com/grafana/grafana/pkg/build/grafana" - "github.com/grafana/grafana/pkg/build/plugins" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -var ( - ErrorNoBinaries = errors.New("no binaries found") - ErrorNoDebArch = errors.New("deb architecture not defined") - ErrorNoRPMArch = errors.New("rpm architecture not defined") -) - -const ( - maxAttempts = 3 - enterpriseSfx = "-enterprise" - enterprise2Sfx = "-enterprise2" - DefaultDebDBBucket = "grafana-aptly-db" - DefaultDebRepoBucket = "grafana-repo" - DefaultRPMRepoBucket = "grafana-repo" - DefaultTTLSeconds = "300" -) - -// PackageRegexp returns a regexp for matching packages corresponding to a certain Grafana edition. -func PackageRegexp(edition config.Edition) *regexp.Regexp { - var sfx string - switch edition { - case config.EditionOSS: - case config.EditionEnterprise: - sfx = "-enterprise" - case config.EditionEnterprise2: - sfx = "-enterprise2" - default: - panic(fmt.Sprintf("unrecognized edition %q", edition)) - } - rePkg, err := regexp.Compile(fmt.Sprintf(`^grafana%s(?:-rpi)?[-_][^-_]+.*$`, sfx)) - if err != nil { - panic(fmt.Sprintf("Failed to compile regexp: %s", err)) - } - - return rePkg -} - -// PackageGrafana packages Grafana for various variants. -func PackageGrafana( - ctx context.Context, - version string, - grafanaDir string, - cfg config.Config, - edition config.Edition, - variants []config.Variant, - shouldSign bool, - p syncutil.WorkerPool, -) error { - if err := packageGrafana(ctx, edition, version, grafanaDir, variants, shouldSign, p); err != nil { - return err - } - - if cfg.SignPackages { - if err := signRPMPackages(edition, cfg, grafanaDir); err != nil { - return err - } - } - - if err := checksumPackages(grafanaDir, edition); err != nil { - return err - } - - return nil -} - -func packageGrafana( - ctx context.Context, - edition config.Edition, - version string, - grafanaDir string, - variants []config.Variant, - shouldSign bool, - p syncutil.WorkerPool, -) error { - distDir := filepath.Join(grafanaDir, "dist") - exists, err := fsutil.Exists(distDir) - if err != nil { - return err - } - if !exists { - log.Printf("directory %s doesn't exist - creating...", distDir) - //nolint - if err := os.MkdirAll(distDir, 0o755); err != nil { - return fmt.Errorf("couldn't create dist: %w", err) - } - } - - var m pluginsManifest - manifestPath := filepath.Join(grafanaDir, "plugins-bundled", "external.json") - //nolint:gosec - manifestB, err := os.ReadFile(manifestPath) - if err != nil { - return fmt.Errorf("failed to open plugins manifest %q: %w", manifestPath, err) - } - if err := json.Unmarshal(manifestB, &m); err != nil { - return err - } - - g, ctx := errutil.GroupWithContext(ctx) - for _, v := range variants { - packageVariant(ctx, v, edition, version, grafanaDir, shouldSign, g, p, m) - } - if err := g.Wait(); err != nil { - return err - } - - return nil -} - -// packageVariant packages Grafana for a certain variant. -func packageVariant( - ctx context.Context, - v config.Variant, - edition config.Edition, - version string, - grafanaDir string, - shouldSign bool, - g *errutil.Group, - p syncutil.WorkerPool, - m pluginsManifest, -) { - p.Schedule(g.Wrap(func() error { - // We've experienced spurious packaging failures, so retry on failure. - i := 0 - for { - if err := realPackageVariant(ctx, v, edition, version, grafanaDir, m, shouldSign); err != nil { - i++ - if i < maxAttempts { - log.Printf("Packaging for variant %s, %s edition failed: %s, trying again", v, edition, err) - continue - } - - return err - } - - break - } - - return nil - })) -} - -// signRPMPackages signs the RPM packages. -func signRPMPackages(edition config.Edition, cfg config.Config, grafanaDir string) error { - log.Printf("Signing %s RPM packages...", edition) - var sfx string - switch edition { - case config.EditionOSS: - case config.EditionEnterprise: - sfx = enterpriseSfx - case config.EditionEnterprise2: - sfx = enterprise2Sfx - default: - panic(fmt.Sprintf("Unrecognized edition %s", edition)) - } - rpms, err := filepath.Glob(filepath.Join(grafanaDir, "dist", fmt.Sprintf("grafana%s-*.rpm", sfx))) - if err != nil { - return err - } - - if len(rpms) > 0 { - rpmArgs := append([]string{"--addsign"}, rpms...) - log.Printf("Invoking rpm with args: %+v", rpmArgs) - //nolint:gosec - cmd := exec.Command("rpm", rpmArgs...) - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to sign RPM packages: %s", output) - } - if err := os.Remove(cfg.GPGPassPath); err != nil { - return fmt.Errorf("failed to remove %q: %w", cfg.GPGPassPath, err) - } - - log.Printf("Verifying %s RPM packages...", edition) - // The output changed between rpm versions - reOutput := regexp.MustCompile("(?:digests signatures OK)|(?:pgp.+OK)") - for _, p := range rpms { - //nolint:gosec - cmd := exec.Command("rpm", "-K", p) - output, err := cmd.CombinedOutput() - if err != nil { - return fmt.Errorf("failed to verify RPM signature: %w", err) - } - - if !reOutput.Match(output) { - return fmt.Errorf("RPM package %q not verified: %s", p, output) - } - } - } - - return nil -} - -// checksumPackages generates package checksums with SHA-256. -func checksumPackages(grafanaDir string, edition config.Edition) error { - log.Printf("Checksumming %s packages...", edition) - distDir := filepath.Join(grafanaDir, "dist") - rePkg := PackageRegexp(edition) - if err := filepath.Walk(distDir, func(fpath string, info os.FileInfo, err error) error { - if err != nil { - var pathErr *os.PathError - if errors.As(err, &pathErr) { - log.Printf("path error in walk function for file %q: %s", pathErr.Path, pathErr.Err.Error()) - return nil - } - return fmt.Errorf("walking through dist folder failed: %w", err) - } - - if info.IsDir() { - return nil - } - - fname := filepath.Base(fpath) - if strings.HasSuffix(fname, ".sha256") || strings.HasSuffix(fname, ".version") || !rePkg.MatchString(fname) { - log.Printf("Ignoring non-package %q", fpath) - return nil - } - - return shaFile(fpath) - }); err != nil { - return fmt.Errorf("checksumming packages in %q failed: %w", distDir, err) - } - - log.Printf("Successfully checksummed %s packages", edition) - return nil -} - -func shaFile(fpath string) error { - //nolint:gosec - fd, err := os.Open(fpath) - if err != nil { - return fmt.Errorf("failed to open %q: %w", fpath, err) - } - defer func() { - if err := fd.Close(); err != nil { - log.Println(err) - } - }() - - h := sha256.New() - _, err = io.Copy(h, fd) - if err != nil { - return fmt.Errorf("failed to read %q: %w", fpath, err) - } - - //nolint:gosec - out, err := os.Create(fpath + ".sha256") - if err != nil { - return fmt.Errorf("failed to create %q: %w", fpath+".sha256", err) - } - defer func() { - if err := out.Close(); err != nil { - log.Println("failed to close file", out.Name()) - } - }() - - if _, err = fmt.Fprintf(out, "%x\n", h.Sum(nil)); err != nil { - return fmt.Errorf("failed to write %q: %w", out.Name(), err) - } - - return nil -} - -// createPackage creates a Linux package. -func createPackage(srcDir string, options linuxPackageOptions) error { - binary := "grafana" - cliBinary := "grafana-cli" - serverBinary := "grafana-server" - - packageRoot, err := os.MkdirTemp("", "grafana-linux-pack") - if err != nil { - return fmt.Errorf("failed to create temporary directory: %w", err) - } - defer func() { - if err := os.RemoveAll(packageRoot); err != nil { - log.Println(err) - } - }() - - for _, dname := range []string{ - options.homeDir, - options.configDir, - "etc/init.d", - options.etcDefaultPath, - "usr/lib/systemd/system", - "usr/sbin", - } { - dpath := filepath.Join(packageRoot, dname) - //nolint - if err := os.MkdirAll(dpath, 0o755); err != nil { - return fmt.Errorf("failed to make directory %q: %w", dpath, err) - } - } - - if err := fsutil.CopyFile(filepath.Join(options.wrapperFilePath, binary), - filepath.Join(packageRoot, "usr", "sbin", binary)); err != nil { - return err - } - if err := fsutil.CopyFile(filepath.Join(options.wrapperFilePath, cliBinary), - filepath.Join(packageRoot, "usr", "sbin", cliBinary)); err != nil { - return err - } - if err := fsutil.CopyFile(filepath.Join(options.wrapperFilePath, serverBinary), - filepath.Join(packageRoot, "usr", "sbin", serverBinary)); err != nil { - return err - } - if err := fsutil.CopyFile(options.initdScriptSrc, filepath.Join(packageRoot, options.initdScriptFilePath)); err != nil { - return err - } - if err := fsutil.CopyFile(options.defaultFileSrc, filepath.Join(packageRoot, options.etcDefaultFilePath)); err != nil { - return err - } - if err := fsutil.CopyFile(options.systemdFileSrc, filepath.Join(packageRoot, options.systemdServiceFilePath)); err != nil { - return err - } - if err := fsutil.CopyRecursive(srcDir, filepath.Join(packageRoot, options.homeDir)); err != nil { - return err - } - - if err := executeFPM(options, packageRoot, srcDir); err != nil { - return err - } - - return nil -} -func executeFPM(options linuxPackageOptions, packageRoot, srcDir string) error { - name := "grafana" - vendor := "Grafana" - if options.edition == config.EditionEnterprise || options.edition == config.EditionEnterprise2 { - vendor += " Enterprise" - if options.edition == config.EditionEnterprise2 { - name += enterprise2Sfx - } else if options.edition == config.EditionEnterprise { - name += enterpriseSfx - } - } - - if options.goArch == config.ArchARM && options.goArm == "6" { - name += "-rpi" - } - - pkgVersion := packageVersion(options) - args := []string{ - "-s", "dir", - "--description", "Grafana", - "-C", packageRoot, - "--url", "https://grafana.com", - "--maintainer", "contact@grafana.com", - "--config-files", options.initdScriptFilePath, - "--config-files", options.etcDefaultFilePath, - "--config-files", options.systemdServiceFilePath, - "--after-install", options.postinstSrc, - "--version", pkgVersion, - "-p", "dist/", - "--name", name, - "--vendor", vendor, - "-a", string(options.packageArch), - } - if options.prermSrc != "" { - args = append(args, "--before-remove", options.prermSrc) - } - if options.edition == config.EditionEnterprise || options.edition == config.EditionEnterprise2 || options.goArch == config.ArchARMv6 { - args = append(args, "--conflicts", "grafana") - } - if options.edition == config.EditionOSS { - args = append(args, "--license", "\"AGPLv3\"") - } - switch options.packageType { - case packageTypeRpm: - args = append(args, "-t", "rpm", "--rpm-posttrans", "packaging/rpm/control/posttrans") - args = append(args, "--rpm-digest", "sha256") - case packageTypeDeb: - args = append(args, "-t", "deb", "--deb-no-default-config-files") - default: - panic(fmt.Sprintf("Unrecognized package type %d", options.packageType)) - } - for _, dep := range options.depends { - args = append(args, "--depends", dep) - } - args = append(args, ".") - - distDir := filepath.Join(options.grafanaDir, "dist") - log.Printf("Generating package in %q (source directory %q)", distDir, srcDir) - - cmdStr := "fpm" - for _, arg := range args { - if strings.Contains(arg, " ") { - arg = fmt.Sprintf("'%s'", arg) - } - cmdStr += fmt.Sprintf(" %s", arg) - } - log.Printf("Creating %s package: %s...", options.packageType, cmdStr) - const rvmPath = "/etc/profile.d/rvm.sh" - exists, err := fsutil.Exists(rvmPath) - if err != nil { - return err - } - if exists { - cmdStr = fmt.Sprintf("source %q && %s", rvmPath, cmdStr) - log.Printf("Sourcing %q before running fpm", rvmPath) - } - //nolint:gosec - cmd := exec.Command("/bin/bash", "-c", cmdStr) - cmd.Dir = options.grafanaDir - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("failed to run fpm: %s", output) - } - - return nil -} - -// copyPubDir copies public/ from grafanaDir to tmpDir. -func copyPubDir(grafanaDir, tmpDir string) error { - srcPubDir := filepath.Join(grafanaDir, "public") - tgtPubDir := filepath.Join(tmpDir, "public") - log.Printf("Copying %q to %q...", srcPubDir, tgtPubDir) - if err := fsutil.CopyRecursive(srcPubDir, tgtPubDir); err != nil { - return fmt.Errorf("failed to copy %q to %q: %w", srcPubDir, tgtPubDir, err) - } - - return nil -} - -// copyBinaries copies binaries from grafanaDir into tmpDir. -func copyBinaries(grafanaDir, tmpDir string, args grafana.BuildArgs, edition config.Edition) error { - tgtDir := filepath.Join(tmpDir, "bin") - //nolint - if err := os.MkdirAll(tgtDir, 0o755); err != nil { - return fmt.Errorf("failed to make directory %q: %w", tgtDir, err) - } - - binDir := filepath.Join(grafanaDir, "bin", grafana.BinaryFolder(edition, args)) - - files, err := os.ReadDir(binDir) - if err != nil { - return fmt.Errorf("failed to list files in %q: %w", binDir, err) - } - - if len(files) == 0 { - return fmt.Errorf("%w in %s", ErrorNoBinaries, binDir) - } - - for _, file := range files { - srcPath := filepath.Join(binDir, file.Name()) - tgtPath := filepath.Join(tgtDir, file.Name()) - - if err := fsutil.CopyFile(srcPath, tgtPath); err != nil { - return err - } - } - - return nil -} - -// copyConfFiles copies configuration files from grafanaDir into tmpDir. -func copyConfFiles(grafanaDir, tmpDir string) error { - //nolint:gosec - if err := os.MkdirAll(filepath.Join(tmpDir, "conf"), 0o755); err != nil { - return fmt.Errorf("failed to create dir %q: %w", filepath.Join(tmpDir, "conf"), err) - } - - confDir := filepath.Join(grafanaDir, "conf") - infos, err := os.ReadDir(confDir) - if err != nil { - return fmt.Errorf("failed to list files in %q: %w", confDir, err) - } - for _, info := range infos { - fpath := filepath.Join(confDir, info.Name()) - if err := fsutil.CopyRecursive(fpath, filepath.Join(tmpDir, "conf", info.Name())); err != nil { - return err - } - } - - return nil -} - -// copyPlugins copies plugins from grafanaDir into tmpDir. -func copyPlugins(ctx context.Context, v config.Variant, grafanaDir, tmpDir string, m pluginsManifest, shouldSign bool) error { - log.Printf("Copying plugins for package variant %s...", v) - - variant2Sfx := map[config.Variant]string{ - config.VariantLinuxAmd64: "linux_amd64", - config.VariantDarwinAmd64: "darwin_amd64", - config.VariantWindowsAmd64: "windows_amd64.exe", - } - - tgtDir := filepath.Join(tmpDir, "plugins-bundled") - exists, err := fsutil.Exists(tgtDir) - if err != nil { - return err - } - if !exists { - //nolint:gosec - if err := os.MkdirAll(tgtDir, 0o755); err != nil { - return err - } - } - pluginsDir := filepath.Join(grafanaDir, "plugins-bundled") - - // External plugins. - for _, pm := range m.Plugins { - srcDir := filepath.Join(pluginsDir, fmt.Sprintf("%s-%s", pm.Name, pm.Version)) - dstDir := filepath.Join(tgtDir, fmt.Sprintf("%s-%s", pm.Name, pm.Version)) - log.Printf("Copying external plugin %q to %q...", srcDir, dstDir) - - //nolint:gosec - jsonB, err := os.ReadFile(filepath.Join(srcDir, "plugin.json")) - if err != nil { - return fmt.Errorf("failed to read %q: %w", filepath.Join(srcDir, "plugin.json"), err) - } - var plugJSON map[string]any - if err := json.Unmarshal(jsonB, &plugJSON); err != nil { - return err - } - - plugExe, ok := plugJSON["executable"].(string) - var wantExe string - if ok && strings.TrimSpace(plugExe) != "" { - sfx := variant2Sfx[v] - if sfx == "" { - log.Printf("External plugin %s-%s doesn't have an executable for variant %s - ignoring", - pm.Name, pm.Version, v) - continue - } - - wantExe = fmt.Sprintf("%s_%s", plugExe, sfx) - log.Printf("The external plugin should contain an executable %q", wantExe) - exists, err := fsutil.Exists(filepath.Join(srcDir, wantExe)) - if err != nil { - return err - } - if !exists { - log.Printf("External plugin %s-%s doesn't have an executable of the right format: %q - ignoring", - pm.Name, pm.Version, wantExe) - continue - } - } - - if err := filepath.Walk(srcDir, func(pth string, info os.FileInfo, err error) error { - if err != nil { - return err - } - - log.Printf("Handling %q", pth) - - relPath := strings.TrimPrefix(pth, srcDir) - relPath = strings.TrimPrefix(relPath, "/") - dstPath := filepath.Join(dstDir, relPath) - - if info.IsDir() { - log.Printf("Making directory %q", dstPath) - //nolint:gosec - return os.MkdirAll(dstPath, info.Mode()) - } - - if wantExe != "" { - m, err := regexp.MatchString(fmt.Sprintf(`^%s_[^/]+$`, plugExe), relPath) - if err != nil { - return err - } - if m && relPath != wantExe { - // Ignore other executable variants - log.Printf("Ignoring executable variant %q", pth) - return nil - } - } - - log.Printf("Copying %q to %q", pth, dstPath) - return fsutil.CopyFile(pth, dstPath) - }); err != nil { - return fmt.Errorf("failed to copy external plugin %q to %q: %w", srcDir, dstDir, err) - } - - if shouldSign { - if err := plugins.BuildManifest(ctx, dstDir, true); err != nil { - return fmt.Errorf("failed to generate signed manifest for external plugin %q: %w", dstDir, err) - } - } - } - - return copyInternalPlugins(pluginsDir, tmpDir) -} - -func copyInternalPlugins(pluginsDir, tmpDir string) error { - tgtDir := filepath.Join(tmpDir, "plugins-bundled", "internal") - srcDir := filepath.Join(pluginsDir, "dist") - - exists, err := fsutil.Exists(tgtDir) - if err != nil { - return err - } - if !exists { - //nolint:gosec - if err := os.MkdirAll(tgtDir, 0o755); err != nil { - return err - } - } - - // Copy over internal plugins. - fis, err := os.ReadDir(srcDir) - if err != nil { - return fmt.Errorf("failed to list internal plugins in %q: %w", srcDir, err) - } - for _, fi := range fis { - srcPath := filepath.Join(srcDir, fi.Name()) - if !fi.IsDir() { - log.Printf("Ignoring non-directory %q", srcPath) - continue - } - - dstPath := filepath.Join(tgtDir, fi.Name()) - log.Printf("Copying internal plugin %q to %q...", srcPath, dstPath) - if err := fsutil.CopyRecursive(srcPath, dstPath); err != nil { - return fmt.Errorf("failed to copy %q to %q: %w", srcPath, dstPath, err) - } - } - - return nil -} - -func realPackageVariant(ctx context.Context, v config.Variant, edition config.Edition, version, grafanaDir string, - m pluginsManifest, shouldSign bool) error { - log.Printf("Packaging Grafana %s for %s...", edition, v) - - enableDeb := false - enableRpm := false - switch v { - case config.VariantLinuxAmd64: - enableDeb = true - enableRpm = true - case config.VariantArmV6: - enableDeb = true - case config.VariantArmV7: - enableDeb = true - enableRpm = true - case config.VariantArm64: - enableDeb = true - enableRpm = true - default: - } - - tmpDir, err := os.MkdirTemp("", "") - if err != nil { - return fmt.Errorf("failed to create temporary directory: %w", err) - } - defer func() { - if err := os.RemoveAll(tmpDir); err != nil { - log.Println(err) - } - }() - - args := grafana.VariantBuildArgs(v) - - if err := copyPubDir(grafanaDir, tmpDir); err != nil { - return err - } - if err := copyBinaries(grafanaDir, tmpDir, args, edition); err != nil { - return err - } - if err := copyConfFiles(grafanaDir, tmpDir); err != nil { - return err - } - if err := copyPlugins(ctx, v, grafanaDir, tmpDir, m, shouldSign); err != nil { - return err - } - - if v == config.VariantWindowsAmd64 { - toolsDir := filepath.Join(tmpDir, "tools") - //nolint:gosec - if err := os.MkdirAll(toolsDir, 0o755); err != nil { - return fmt.Errorf("failed to create tools dir %q: %w", toolsDir, err) - } - - if err := fsutil.CopyFile("/usr/local/go/lib/time/zoneinfo.zip", - filepath.Join(tmpDir, "tools", "zoneinfo.zip")); err != nil { - return err - } - } - - if err := os.WriteFile(filepath.Join(tmpDir, "VERSION"), []byte(version), 0664); err != nil { - return fmt.Errorf("failed to write %s/VERSION: %w", tmpDir, err) - } - - if err := createArchive(tmpDir, edition, v, version, grafanaDir); err != nil { - return err - } - - if enableDeb { - if args.DebArch == "" { - return fmt.Errorf("%w for %s", ErrorNoDebArch, v) - } - - if err := createPackage(tmpDir, linuxPackageOptions{ - edition: edition, - version: version, - grafanaDir: grafanaDir, - goArch: args.GoArch, - goArm: args.GoArm, - packageType: packageTypeDeb, - packageArch: args.DebArch, - homeDir: "/usr/share/grafana", - homeBinDir: "/usr/share/grafana/bin", - binPath: "/usr/sbin", - configDir: "/etc/grafana", - etcDefaultPath: "/etc/default", - etcDefaultFilePath: "/etc/default/grafana-server", - initdScriptFilePath: "/etc/init.d/grafana-server", - systemdServiceFilePath: "/usr/lib/systemd/system/grafana-server.service", - postinstSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "postinst"), - prermSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "prerm"), - initdScriptSrc: filepath.Join(grafanaDir, "packaging", "deb", "init.d", "grafana-server"), - defaultFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "default", "grafana-server"), - systemdFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "systemd", "grafana-server.service"), - wrapperFilePath: filepath.Join(grafanaDir, "packaging", "wrappers"), - depends: []string{"adduser", "libfontconfig1"}, - }); err != nil { - return err - } - } - - if !enableRpm { - return nil - } - - if args.RPMArch == "" { - return fmt.Errorf("%w for %s", ErrorNoRPMArch, v) - } - - if err := createPackage(tmpDir, linuxPackageOptions{ - edition: edition, - version: version, - grafanaDir: grafanaDir, - goArch: args.GoArch, - packageType: packageTypeRpm, - packageArch: args.RPMArch, - homeDir: "/usr/share/grafana", - homeBinDir: "/usr/share/grafana/bin", - binPath: "/usr/sbin", - configDir: "/etc/grafana", - etcDefaultPath: "/etc/sysconfig", - etcDefaultFilePath: "/etc/sysconfig/grafana-server", - initdScriptFilePath: "/etc/init.d/grafana-server", - systemdServiceFilePath: "/usr/lib/systemd/system/grafana-server.service", - postinstSrc: filepath.Join(grafanaDir, "packaging", "rpm", "control", "postinst"), - initdScriptSrc: filepath.Join(grafanaDir, "packaging", "rpm", "init.d", "grafana-server"), - defaultFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "sysconfig", "grafana-server"), - systemdFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "systemd", "grafana-server.service"), - wrapperFilePath: filepath.Join(grafanaDir, "packaging", "wrappers"), - depends: []string{"/sbin/service", "fontconfig", "freetype"}, - }); err != nil { - return err - } - - return nil -} - -// pluginManifest has details of an external plugin package. -type pluginManifest struct { - Name string `json:"name"` - Version string `json:"version"` - Checksum string `json:"checksum"` -} - -// pluginsManifest represents a manifest of Grafana's external plugins. -type pluginsManifest struct { - Plugins []pluginManifest `json:"plugins"` -} - -// packageVersion converts a Grafana version into the corresponding package version. -func packageVersion(options linuxPackageOptions) string { - verComponents := strings.Split(options.version, "-") - if len(verComponents) > 2 { - panic(fmt.Sprintf("Version string contains more than one hyphen: %q", options.version)) - } - - switch options.packageType { - case packageTypeDeb, packageTypeRpm: - if len(verComponents) > 1 { - // With Debian and RPM packages, it's customary to prefix any pre-release component with a ~, since this - // is considered of lower lexical value than the empty character, and this way pre-release versions are - // considered to be of a lower version than the final version (which lacks this suffix). - return fmt.Sprintf("%s~%s", verComponents[0], verComponents[1]) - } - - return options.version - default: - panic(fmt.Sprintf("Unrecognized package type %s", options.packageType)) - } -} - -type packageType int - -func (pt packageType) String() string { - switch pt { - case packageTypeDeb: - return "Debian" - case packageTypeRpm: - return "RPM" - default: - panic(fmt.Sprintf("Unrecognized package type %d", pt)) - } -} - -const ( - packageTypeDeb packageType = iota - packageTypeRpm -) - -type linuxPackageOptions struct { - edition config.Edition - packageType packageType - version string - grafanaDir string - goArch config.Architecture - goArm string - packageArch config.Architecture - homeDir string - homeBinDir string - binPath string - configDir string - etcDefaultPath string - etcDefaultFilePath string - initdScriptFilePath string - systemdServiceFilePath string - postinstSrc string - prermSrc string - initdScriptSrc string - defaultFileSrc string - systemdFileSrc string - wrapperFilePath string - - depends []string -} - -// createArchive makes a distribution archive. -func createArchive(srcDir string, edition config.Edition, v config.Variant, version, grafanaDir string) error { - distDir := filepath.Join(grafanaDir, "dist") - exists, err := fsutil.Exists(distDir) - if err != nil { - return err - } - if !exists { - log.Printf("directory %s doesn't exist - creating...", distDir) - //nolint:gosec - if err := os.MkdirAll(distDir, 0o755); err != nil { - return fmt.Errorf("couldn't create dist: %w", err) - } - } - sfx := "" - if edition == config.EditionEnterprise2 { - sfx = enterprise2Sfx - } else if edition == config.EditionEnterprise { - sfx = enterpriseSfx - } - if v != config.VariantWindowsAmd64 { - return createTarball(srcDir, version, string(v), sfx, grafanaDir) - } - - return createZip(srcDir, version, string(v), sfx, grafanaDir) -} - -func createZip(srcDir, version, variantStr, sfx, grafanaDir string) error { - fpath := filepath.Join(grafanaDir, "dist", fmt.Sprintf("grafana%s-%s.%s.zip", sfx, version, variantStr)) - //nolint:gosec - tgt, err := os.Create(fpath) - if err != nil { - return fmt.Errorf("failed to create %q: %w", fpath, err) - } - defer func() { - if err := tgt.Close(); err != nil && !errors.Is(err, os.ErrClosed) { - log.Println(err) - } - }() - - //nolint:gosec - if err := os.Chmod(fpath, 0664); err != nil { - return fmt.Errorf("failed to set permissions on %q: %w", fpath, err) - } - zipWriter := zip.NewWriter(tgt) - defer func() { - if err := zipWriter.Close(); err != nil { - log.Println(err) - } - }() - - for _, fname := range []string{"LICENSE", "README.md", "NOTICE.md"} { - fpath := filepath.Join(grafanaDir, fname) - fi, err := os.Lstat(fpath) - if err != nil { - return fmt.Errorf("couldn't stat %q: %w", fpath, err) - } - hdr, err := zip.FileInfoHeader(fi) - if err != nil { - return fmt.Errorf("failed to open zip header: %w", err) - } - // Enable compression, as it's disabled by default - hdr.Method = zip.Deflate - hdr.Name = fmt.Sprintf("grafana-%s/%s", version, fname) - w, err := zipWriter.CreateHeader(hdr) - if err != nil { - return fmt.Errorf("failed writing zip header: %w", err) - } - //nolint:gosec - src, err := os.Open(fpath) - if err != nil { - return fmt.Errorf("failed to open %q: %w", fname, err) - } - if _, err := io.Copy(w, src); err != nil { - if err := src.Close(); err != nil { - log.Println(err) - } - return fmt.Errorf("failed writing zip entry: %w", err) - } - if err := src.Close(); err != nil { - log.Println(err) - } - } - if err := filepath.Walk(srcDir, func(fpath string, fi os.FileInfo, err error) error { - if err != nil { - return err - } - if fpath == srcDir { - return nil - } - - hdr, err := zip.FileInfoHeader(fi) - if err != nil { - return fmt.Errorf("failed to open zip header: %s", err) - } - // Enable compression, as it's disabled by default - hdr.Method = zip.Deflate - hdr.Name = fmt.Sprintf("grafana-%s/%s", version, strings.TrimPrefix(fpath, fmt.Sprintf("%s/", srcDir))) - if fi.IsDir() { - // A trailing slash means it's a directory - if hdr.Name[len(hdr.Name)-1] != '/' { - hdr.Name += "/" - } - } - w, err := zipWriter.CreateHeader(hdr) - if err != nil { - return fmt.Errorf("failed writing zip header: %s", err) - } - if fi.IsDir() { - return nil - } - - //nolint:gosec - src, err := os.Open(fpath) - if err != nil { - return fmt.Errorf("failed to open %q: %w", fpath, err) - } - if _, err := io.Copy(w, src); err != nil { - if err := src.Close(); err != nil { - log.Println(err) - } - return fmt.Errorf("failed writing zip entry: %w", err) - } - if err := src.Close(); err != nil { - log.Println(err) - } - return nil - }); err != nil { - return fmt.Errorf("failed to traverse directory %q: %w", srcDir, err) - } - - if err := zipWriter.Close(); err != nil { - return fmt.Errorf("failed writing %q: %w", fpath, err) - } - if err := tgt.Close(); err != nil { - return fmt.Errorf("failed writing %q: %w", fpath, err) - } - - log.Printf("Successfully created %q", fpath) - return nil -} - -// nolint -func createTarball(srcDir, version, variantStr, sfx, grafanaDir string) error { - fpath := filepath.Join(grafanaDir, "dist", fmt.Sprintf("grafana%s-%s.%s.tar.gz", sfx, version, variantStr)) - //nolint:gosec - tgt, err := os.Create(fpath) - if err != nil { - return fmt.Errorf("failed to create %q: %w", fpath, err) - } - defer func() { - if err := tgt.Close(); err != nil && !errors.Is(err, os.ErrClosed) { - log.Println(err) - } - }() - - //nolint:gosec - if err := os.Chmod(fpath, 0664); err != nil { - return fmt.Errorf("failed to set permissions on %q: %w", fpath, err) - } - gzWriter, err := gzip.NewWriterLevel(tgt, gzip.BestCompression) - if err != nil { - return fmt.Errorf("failed to create gzip writer: %w", err) - } - defer func() { - if err := gzWriter.Close(); err != nil { - log.Println(err) - } - }() - tarWriter := tar.NewWriter(gzWriter) - defer func() { - if err := tarWriter.Close(); err != nil { - log.Println(err) - } - }() - - for _, fname := range []string{"LICENSE", "README.md", "NOTICE.md"} { - fpath := filepath.Join(grafanaDir, fname) - fi, err := os.Lstat(fpath) - if err != nil { - return fmt.Errorf("couldn't stat %q: %w", fpath, err) - } - hdr, err := tar.FileInfoHeader(fi, "") - if err != nil { - return fmt.Errorf("failed getting tar header: %w", err) - } - hdr.Name = fmt.Sprintf("grafana-%s/%s", version, fname) - if err := tarWriter.WriteHeader(hdr); err != nil { - return fmt.Errorf("failed writing tar header: %w", err) - } - //nolint:gosec - src, err := os.Open(fpath) - if err != nil { - return fmt.Errorf("failed to open %q: %w", fname, err) - } - if _, err := io.Copy(tarWriter, src); err != nil { - if err := src.Close(); err != nil { - log.Println(err) - } - return fmt.Errorf("failed writing tar entry: %w", err) - } - if err := src.Close(); err != nil { - log.Println(err) - } - } - if err := filepath.Walk(srcDir, func(fpath string, fi os.FileInfo, err error) error { - if err != nil { - return err - } - if fpath == srcDir { - return nil - } - - linkTgt := "" - if fi.Mode()&os.ModeSymlink != 0 { - log.Printf("reading link '%s'", fpath) - linkTgt, err = os.Readlink(fpath) - if err != nil { - return err - } - linkTgt = fmt.Sprintf("grafana-%s/%s", version, linkTgt) - } - - hdr, err := tar.FileInfoHeader(fi, linkTgt) - if err != nil { - return fmt.Errorf("failed getting tar header: %w", err) - } - hdr.Name = fmt.Sprintf("grafana-%s/%s", version, strings.TrimPrefix(fpath, fmt.Sprintf("%s/", srcDir))) - if err := tarWriter.WriteHeader(hdr); err != nil { - return fmt.Errorf("failed writing tar header: %w", err) - } - if fi.IsDir() { - return nil - } - - //nolint:gosec - src, err := os.Open(fpath) - if err != nil { - return fmt.Errorf("failed to open %q: %w", fpath, err) - } - if _, err := io.Copy(tarWriter, src); err != nil { - if err := src.Close(); err != nil { - log.Println(err) - } - return fmt.Errorf("failed writing tar entry: %w", err) - } - if err := src.Close(); err != nil { - log.Println(err) - } - - return nil - }); err != nil { - return fmt.Errorf("failed to traverse directory %q: %w", srcDir, err) - } - - if err := tarWriter.Close(); err != nil { - return fmt.Errorf("failed writing %q: %w", fpath, err) - } - if err := gzWriter.Close(); err != nil { - return fmt.Errorf("failed writing %q: %w", fpath, err) - } - if err := tgt.Close(); err != nil { - return fmt.Errorf("failed writing %q: %w", fpath, err) - } - - st, err := os.Stat(fpath) - if err != nil { - return err - } - perms := st.Mode() & os.ModePerm - log.Printf("Successfully created package %q (permissions: %o)", fpath, perms) - - return nil -} diff --git a/pkg/build/packaging/grafana_test.go b/pkg/build/packaging/grafana_test.go deleted file mode 100644 index 7f45a63271830..0000000000000 --- a/pkg/build/packaging/grafana_test.go +++ /dev/null @@ -1,23 +0,0 @@ -package packaging_test - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/packaging" -) - -func TestPackageRegexp(t *testing.T) { - t.Run("It should match enterprise2 packages", func(t *testing.T) { - rgx := packaging.PackageRegexp(config.EditionEnterprise2) - matches := []string{ - "grafana-enterprise2-1.2.3-4567pre.linux-amd64.tar.gz", - "grafana-enterprise2-1.2.3-4567pre.linux-amd64.tar.gz.sha256", - } - for _, v := range matches { - assert.Truef(t, rgx.MatchString(v), "'%s' should match regex '%s'", v, rgx.String()) - } - }) -} diff --git a/pkg/build/plugins/build.go b/pkg/build/plugins/build.go deleted file mode 100644 index da9a23cd5d15f..0000000000000 --- a/pkg/build/plugins/build.go +++ /dev/null @@ -1,66 +0,0 @@ -package plugins - -import ( - "context" - "fmt" - "log" - "os" - "os/exec" - "path/filepath" - - "github.com/grafana/grafana/pkg/build/config" - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/fsutil" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -type PluginSigningMode = int - -// BuildPlugins builds internal plugins. -// The built plugins are placed in plugins-bundled/dist/. -func Build(ctx context.Context, grafanaDir string, p syncutil.WorkerPool, g *errutil.Group, verMode *config.BuildConfig) error { - log.Printf("Building plugins in %q...", grafanaDir) - - root := filepath.Join(grafanaDir, "plugins-bundled", "internal") - fis, err := os.ReadDir(root) - if err != nil { - return err - } - - for i := range fis { - fi := fis[i] - if !fi.IsDir() { - continue - } - - dpath := filepath.Join(root, fi.Name()) - - p.Schedule(g.Wrap(func() error { - log.Printf("Building plugin %q...", dpath) - - cmd := exec.Command("yarn", "build") - cmd.Dir = dpath - if output, err := cmd.CombinedOutput(); err != nil { - return fmt.Errorf("yarn build failed: %s", output) - } - - dstPath := filepath.Join("plugins-bundled", "dist", fi.Name()) - if err := fsutil.CopyRecursive(filepath.Join(dpath, "dist"), dstPath); err != nil { - return err - } - if !verMode.PluginSignature.Sign { - return nil - } - - return BuildManifest(ctx, dstPath, verMode.PluginSignature.AdminSign) - })) - } - - if err := g.Wait(); err != nil { - return err - } - - log.Printf("Built all plug-ins successfully!") - - return nil -} diff --git a/pkg/build/plugins/download.go b/pkg/build/plugins/download.go deleted file mode 100644 index ed8f00bf911e6..0000000000000 --- a/pkg/build/plugins/download.go +++ /dev/null @@ -1,118 +0,0 @@ -package plugins - -import ( - "context" - "crypto/sha256" - "encoding/hex" - "encoding/json" - "fmt" - "io" - "log" - "net/http" - "os" - "path/filepath" - - "github.com/grafana/grafana/pkg/build/errutil" - "github.com/grafana/grafana/pkg/build/syncutil" -) - -// logCloseError executes the closeFunc; if it returns an error, it is logged by the log package. -func logCloseError(closeFunc func() error) { - if err := closeFunc(); err != nil { - log.Println(err) - } -} - -// logCloseError executes the closeFunc; if it returns an error, it is logged by the log package. -func logError(err error) { - if err != nil { - log.Println(err) - } -} - -// pluginManifest has details of an external plugin package. -type pluginManifest struct { - Name string `json:"name"` - Version string `json:"version"` - Checksum string `json:"checksum"` -} - -// pluginsManifest represents a manifest of Grafana's external plugins. -type pluginsManifest struct { - Plugins []pluginManifest `json:"plugins"` -} - -// downloadPlugins downloads Grafana plugins that should be bundled into packages. -// -// The plugin archives are downloaded into /plugins-bundled. -func Download(ctx context.Context, grafanaDir string, p syncutil.WorkerPool) error { - g, _ := errutil.GroupWithContext(ctx) - - log.Println("Downloading external plugins...") - - var m pluginsManifest - manifestPath := filepath.Join(grafanaDir, "plugins-bundled", "external.json") - //nolint:gosec - manifestB, err := os.ReadFile(manifestPath) - if err != nil { - return fmt.Errorf("failed to open plugins manifest %q: %w", manifestPath, err) - } - if err := json.Unmarshal(manifestB, &m); err != nil { - return err - } - - for i := range m.Plugins { - pm := m.Plugins[i] - p.Schedule(g.Wrap(func() error { - tgt := filepath.Join(grafanaDir, "plugins-bundled", fmt.Sprintf("%s-%s.zip", pm.Name, pm.Version)) - //nolint:gosec - out, err := os.Create(tgt) - if err != nil { - return err - } - defer logCloseError(out.Close) - - u := fmt.Sprintf("http://storage.googleapis.com/plugins-ci/plugins/%s/%s-%s.zip", pm.Name, pm.Name, - pm.Version) - log.Printf("Downloading plugin %q to %q...", u, tgt) - // nolint:gosec - resp, err := http.Get(u) - if err != nil { - return fmt.Errorf("downloading %q failed: %w", u, err) - } - defer logError(resp.Body.Close()) - - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("failed to download %q, status code %d", u, resp.StatusCode) - } - - if _, err := io.Copy(out, resp.Body); err != nil { - return fmt.Errorf("downloading %q failed: %w", u, err) - } - if err := out.Close(); err != nil { - return fmt.Errorf("downloading %q failed: %w", u, err) - } - - //nolint:gosec - fd, err := os.Open(tgt) - if err != nil { - return err - } - defer logCloseError(fd.Close) - - h := sha256.New() - if _, err := io.Copy(h, fd); err != nil { - return err - } - - chksum := hex.EncodeToString(h.Sum(nil)) - if chksum != pm.Checksum { - return fmt.Errorf("plugin %q has bad checksum: %s (expected %s)", u, chksum, pm.Checksum) - } - - return Unzip(tgt, filepath.Join(grafanaDir, "plugins-bundled")) - })) - } - - return g.Wait() -} diff --git a/pkg/build/plugins/manifest.go b/pkg/build/plugins/manifest.go deleted file mode 100644 index 359e8ad77bc0a..0000000000000 --- a/pkg/build/plugins/manifest.go +++ /dev/null @@ -1,204 +0,0 @@ -package plugins - -import ( - "bytes" - "context" - "crypto/sha256" - "encoding/json" - "fmt" - "io" - "log" - "net/http" - "os" - "path/filepath" -) - -type manifest struct { - Plugin string `json:"plugin"` - Version string `json:"version"` - Files map[string]string `json:"files"` -} - -func getManifest(dpath string, chksums map[string]string) (manifest, error) { - m := manifest{} - - type pluginInfo struct { - Version string `json:"version"` - } - - type plugin struct { - ID string `json:"id"` - Info pluginInfo `json:"info"` - } - - //nolint:gosec - f, err := os.Open(filepath.Join(dpath, "plugin.json")) - if err != nil { - return m, err - } - decoder := json.NewDecoder(f) - var p plugin - if err := decoder.Decode(&p); err != nil { - return m, err - } - - if p.ID == "" { - return m, fmt.Errorf("plugin.json doesn't define id") - } - if p.Info.Version == "" { - return m, fmt.Errorf("plugin.json doesn't define info.version") - } - - return manifest{ - Plugin: p.ID, - Version: p.Info.Version, - Files: chksums, - }, nil -} - -// BuildManifest requests a plugin's signed manifest file fromt he Grafana API. -// If signingAdmin is true, the manifest signing admin endpoint (without plugin ID) will be used, and requires -// an admin API key. -func BuildManifest(ctx context.Context, dpath string, signingAdmin bool) error { - log.Printf("Building manifest for plug-in at %q", dpath) - - apiKey := os.Getenv("GRAFANA_API_KEY") - if apiKey == "" { - return fmt.Errorf("GRAFANA_API_KEY must be set") - } - - manifestPath := filepath.Join(dpath, "MANIFEST.txt") - chksums, err := getChksums(dpath, manifestPath) - if err != nil { - return err - } - m, err := getManifest(dpath, chksums) - if err != nil { - return err - } - - b := bytes.NewBuffer(nil) - encoder := json.NewEncoder(b) - if err := encoder.Encode(&m); err != nil { - return err - } - jsonB := b.Bytes() - u := "https://grafana.com/api/plugins/ci/sign" - if !signingAdmin { - u = fmt.Sprintf("https://grafana.com/api/plugins/%s/ci/sign", m.Plugin) - } - log.Printf("Requesting signed manifest from Grafana API...") - req, err := http.NewRequestWithContext(ctx, "POST", u, bytes.NewReader(jsonB)) - if err != nil { - return err - } - req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", apiKey)) - req.Header.Add("Content-Type", "application/json") - resp, err := http.DefaultClient.Do(req) - if err != nil { - return fmt.Errorf("failed to get signed manifest from Grafana API: %w", err) - } - defer func() { - if err := resp.Body.Close(); err != nil { - log.Println("failed to close response body, err: %w", err) - } - }() - if resp.StatusCode != 200 { - msg, err := io.ReadAll(resp.Body) - if err != nil { - log.Printf("Failed to read response body: %s", err) - msg = []byte("") - } - return fmt.Errorf("request for signed manifest failed with status code %d: %s", resp.StatusCode, string(msg)) - } - - log.Printf("Successfully signed manifest via Grafana API, writing to %q", manifestPath) - //nolint:gosec - f, err := os.Create(manifestPath) - if err != nil { - return fmt.Errorf("failed to create %s: %w", manifestPath, err) - } - defer func() { - if err := f.Close(); err != nil { - log.Println("failed to close file, err: %w", err) - } - }() - if _, err := io.Copy(f, resp.Body); err != nil { - return fmt.Errorf("failed to write %s: %w", manifestPath, err) - } - if err := f.Close(); err != nil { - return fmt.Errorf("failed to write %s: %w", manifestPath, err) - } - - return nil -} - -func getChksums(dpath, manifestPath string) (map[string]string, error) { - manifestPath = filepath.Clean(manifestPath) - - chksums := map[string]string{} - if err := filepath.Walk(dpath, func(path string, fi os.FileInfo, err error) error { - if err != nil { - return err - } - - if fi.IsDir() { - return nil - } - - path = filepath.Clean(path) - - // Handle symbolic links - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - finalPath, err := filepath.EvalSymlinks(path) - if err != nil { - return err - } - - log.Printf("Handling symlink %q, pointing to %q", path, finalPath) - - info, err := os.Stat(finalPath) - if err != nil { - return err - } - if info.IsDir() { - return nil - } - - if _, err := filepath.Rel(dpath, finalPath); err != nil { - return fmt.Errorf("symbolic link %q targets a file outside of the plugin directory: %q", path, finalPath) - } - - if finalPath == manifestPath { - return nil - } - } - - if path == manifestPath { - return nil - } - - h := sha256.New() - //nolint:gosec - f, err := os.Open(path) - if err != nil { - return err - } - defer logCloseError(f.Close) - if _, err := io.Copy(h, f); err != nil { - return err - } - - relPath, err := filepath.Rel(dpath, path) - if err != nil { - return err - } - chksums[relPath] = fmt.Sprintf("%x", h.Sum(nil)) - - return nil - }); err != nil { - return nil, err - } - - return chksums, nil -} diff --git a/pkg/build/plugins/zip.go b/pkg/build/plugins/zip.go deleted file mode 100644 index 73f8e8d82f6b3..0000000000000 --- a/pkg/build/plugins/zip.go +++ /dev/null @@ -1,64 +0,0 @@ -package plugins - -import ( - "archive/zip" - "io" - "log" - "os" - "path/filepath" -) - -// Unzip unzips a plugin. -func Unzip(fpath, tgtDir string) error { - log.Printf("Unzipping plugin %q into %q...", fpath, tgtDir) - - r, err := zip.OpenReader(fpath) - if err != nil { - return err - } - defer logCloseError(r.Close) - - // Closure to address file descriptors issue with all the deferred .Close() methods - extractAndWriteFile := func(f *zip.File) error { - log.Printf("Extracting zip member %q...", f.Name) - - rc, err := f.Open() - if err != nil { - return err - } - defer logCloseError(rc.Close) - - //nolint:gosec - dstPath := filepath.Join(tgtDir, f.Name) - - if f.FileInfo().IsDir() { - return os.MkdirAll(dstPath, f.Mode()) - } - - if err := os.MkdirAll(filepath.Dir(dstPath), f.Mode()); err != nil { - return err - } - - //nolint:gosec - fd, err := os.OpenFile(dstPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) - if err != nil { - return err - } - defer logCloseError(fd.Close) - - // nolint:gosec - if _, err := io.Copy(fd, rc); err != nil { - return err - } - - return fd.Close() - } - - for _, f := range r.File { - if err := extractAndWriteFile(f); err != nil { - return err - } - } - - return nil -} diff --git a/pkg/build/stringutil/contains.go b/pkg/build/stringutil/contains.go deleted file mode 100644 index b53efe707594d..0000000000000 --- a/pkg/build/stringutil/contains.go +++ /dev/null @@ -1,10 +0,0 @@ -package stringutil - -func Contains(arr []string, s string) bool { - for _, e := range arr { - if e == s { - return true - } - } - return false -} diff --git a/pkg/build/syncutil/pool.go b/pkg/build/syncutil/pool.go deleted file mode 100644 index 6034059d2bf8b..0000000000000 --- a/pkg/build/syncutil/pool.go +++ /dev/null @@ -1,43 +0,0 @@ -package syncutil - -import ( - "log" - "runtime" -) - -func worker(jobs chan func()) { - for j := range jobs { - j() - } -} - -// WorkerPool represents a concurrent worker pool. -type WorkerPool struct { - NumWorkers int - jobs chan func() -} - -// NewWorkerPool constructs a new WorkerPool. -func NewWorkerPool(numWorkers int) WorkerPool { - if numWorkers <= 0 { - numWorkers = runtime.NumCPU() - } - log.Printf("Creating worker pool with %d workers", numWorkers) - jobs := make(chan func(), 100) - for i := 0; i < numWorkers; i++ { - go worker(jobs) - } - return WorkerPool{ - NumWorkers: numWorkers, - jobs: jobs, - } -} - -// Schedule schedules a job to be executed by a worker in the pool. -func (p WorkerPool) Schedule(job func()) { - p.jobs <- job -} - -func (p WorkerPool) Close() { - close(p.jobs) -} diff --git a/pkg/build/validation/validation.go b/pkg/build/validation/validation.go index 1795126111441..bc01ed189b7a2 100644 --- a/pkg/build/validation/validation.go +++ b/pkg/build/validation/validation.go @@ -1,9 +1,5 @@ package validation -import ( - "context" -) - type ArtifactType int const ( @@ -15,13 +11,3 @@ type Artifact struct { Type ArtifactType URL string } - -// ReleaseArtifacts generates a list of release artifacts -func ReleaseArtifacts(version string) ([]Artifact, error) { - return nil, nil -} - -// VerifyRelease tests that a that, given the information, a release will completed wholly and successfully. -func VerifyRelease(ctx context.Context, version string) (bool, error) { - return false, nil -} diff --git a/pkg/build/versions/parse.go b/pkg/build/versions/parse.go new file mode 100644 index 0000000000000..0a38af4329cfa --- /dev/null +++ b/pkg/build/versions/parse.go @@ -0,0 +1,31 @@ +package versions + +import "regexp" + +var semverRegex = regexp.MustCompile(`^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`) + +type Semver struct { + Major string + Minor string + Patch string + Prerelease string + BuildMetadata string +} + +func ParseSemver(version string) Semver { + matches := semverRegex.FindStringSubmatch(version) + results := make(map[string]string) + for i, name := range semverRegex.SubexpNames() { + if i != 0 && name != "" { + results[name] = matches[i] + } + } + + return Semver{ + Major: results["major"], + Minor: results["minor"], + Patch: results["patch"], + Prerelease: results["prerelease"], + BuildMetadata: results["buildmetadata"], + } +} diff --git a/pkg/build/versions/version.go b/pkg/build/versions/version.go index 5197559b95af0..240c750bf6825 100644 --- a/pkg/build/versions/version.go +++ b/pkg/build/versions/version.go @@ -13,15 +13,17 @@ import ( ) var ( - reGrafanaTag = regexp.MustCompile(`^v(\d+\.\d+\.\d+$)`) - reGrafanaTagPreview = regexp.MustCompile(`^v(\d+\.\d+\.\d+-preview)`) - reGrafanaTagCustom = regexp.MustCompile(`^v(\d+\.\d+\.\d+-\w+)`) + reGrafanaTag = regexp.MustCompile(`^v(\d+\.\d+\.\d+$)`) + reGrafanaTagPreview = regexp.MustCompile(`^v(\d+\.\d+\.\d+-preview)`) + reGrafanaTagCustom = regexp.MustCompile(`^v(\d+\.\d+\.\d+-\w+)`) + reGrafanaTagSecurity = regexp.MustCompile(`^v(\d+\.\d+\.\d+\+\w+\-\d+)`) ) const ( - Latest = "latest" - Next = "next" - Test = "test" + Latest = "latest" + Next = "next" + Test = "test" + Security = "security" ) type Version struct { @@ -152,6 +154,11 @@ func GetVersion(tag string) (*Version, error) { Version: reGrafanaTagCustom.FindStringSubmatch(tag)[1], Channel: Test, } + case reGrafanaTagSecurity.MatchString(tag): + version = Version{ + Version: reGrafanaTagSecurity.FindStringSubmatch(tag)[1], + Channel: Security, + } default: return nil, fmt.Errorf("%s not a supported Grafana version, exitting", tag) } diff --git a/pkg/build/wire/go.mod b/pkg/build/wire/go.mod index 5305670e8e02b..12c2659eef49e 100644 --- a/pkg/build/wire/go.mod +++ b/pkg/build/wire/go.mod @@ -1,6 +1,6 @@ module github.com/grafana/grafana/pkg/build/wire -go 1.22.4 +go 1.22.7 require ( github.com/google/go-cmp v0.6.0 diff --git a/pkg/expr/sql/db.go b/pkg/expr/sql/db.go new file mode 100644 index 0000000000000..1e7aca0c1b619 --- /dev/null +++ b/pkg/expr/sql/db.go @@ -0,0 +1,26 @@ +package sql + +import ( + "errors" + + "github.com/grafana/grafana-plugin-sdk-go/data" +) + +type DB struct { +} + +func (db *DB) TablesList(rawSQL string) ([]string, error) { + return nil, errors.New("not implemented") +} + +func (db *DB) RunCommands(commands []string) (string, error) { + return "", errors.New("not implemented") +} + +func (db *DB) QueryFramesInto(name string, query string, frames []*data.Frame, f *data.Frame) error { + return errors.New("not implemented") +} + +func NewInMemoryDB() *DB { + return &DB{} +} diff --git a/pkg/expr/sql/parser.go b/pkg/expr/sql/parser.go index 809bbe96f9b33..a5ca1fb6f5bbc 100644 --- a/pkg/expr/sql/parser.go +++ b/pkg/expr/sql/parser.go @@ -8,7 +8,6 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/jeremywohl/flatten" - "github.com/scottlepp/go-duck/duck" ) const ( @@ -21,7 +20,7 @@ var logger = log.New("sql_expr") // TablesList returns a list of tables for the sql statement func TablesList(rawSQL string) ([]string, error) { - duckDB := duck.NewInMemoryDB() + duckDB := NewInMemoryDB() rawSQL = strings.Replace(rawSQL, "'", "''", -1) cmd := fmt.Sprintf("SELECT json_serialize_sql('%s')", rawSQL) ret, err := duckDB.RunCommands([]string{cmd}) diff --git a/pkg/expr/sql_command.go b/pkg/expr/sql_command.go index ce86d3258c667..41e966e02e9e2 100644 --- a/pkg/expr/sql_command.go +++ b/pkg/expr/sql_command.go @@ -7,7 +7,6 @@ import ( "time" "github.com/grafana/grafana-plugin-sdk-go/data" - "github.com/scottlepp/go-duck/duck" "github.com/grafana/grafana/pkg/expr/mathexp" "github.com/grafana/grafana/pkg/expr/sql" @@ -94,7 +93,7 @@ func (gr *SQLCommand) Execute(ctx context.Context, now time.Time, vars mathexp.V rsp := mathexp.Results{} - duckDB := duck.NewInMemoryDB() + duckDB := sql.NewInMemoryDB() var frame = &data.Frame{} logger.Debug("Executing query", "query", gr.query, "frames", len(allFrames)) diff --git a/pkg/promlib/go.mod b/pkg/promlib/go.mod index 3c1e68768a23d..1411f26ad8780 100644 --- a/pkg/promlib/go.mod +++ b/pkg/promlib/go.mod @@ -1,6 +1,6 @@ module github.com/grafana/grafana/pkg/promlib -go 1.21.10 +go 1.22.7 require ( github.com/grafana/grafana-plugin-sdk-go v0.234.0 diff --git a/pkg/services/accesscontrol/resourcepermissions/store.go b/pkg/services/accesscontrol/resourcepermissions/store.go index bd8a76666718a..7e008c08e8314 100644 --- a/pkg/services/accesscontrol/resourcepermissions/store.go +++ b/pkg/services/accesscontrol/resourcepermissions/store.go @@ -671,7 +671,7 @@ func (s *store) createPermissions(sess *db.Session, roleID int64, cmd SetResourc /* Add ACTION SET of managed permissions to in-memory store */ - if s.shouldStoreActionSet(permission) { + if s.shouldStoreActionSet(resource, permission) { actionSetName := GetActionSetName(resource, permission) p := managedPermission(actionSetName, resource, resourceID, resourceAttribute) p.RoleID = roleID @@ -683,13 +683,13 @@ func (s *store) createPermissions(sess *db.Session, roleID int64, cmd SetResourc // If there are no missing actions for the resource (in case of access level downgrade or resource removal), we don't need to insert any actions // we still want to add the action set (when permission != "") - if len(missingActions) == 0 && !s.shouldStoreActionSet(permission) { + if len(missingActions) == 0 && !s.shouldStoreActionSet(resource, permission) { return nil } // if we have actionset feature enabled and are only working with action sets // skip adding the missing actions to the permissions table - if !(s.shouldStoreActionSet(permission) && s.cfg.OnlyStoreAccessActionSets) { + if !(s.shouldStoreActionSet(resource, permission) && s.cfg.OnlyStoreAccessActionSets) { for action := range missingActions { p := managedPermission(action, resource, resourceID, resourceAttribute) p.RoleID = roleID @@ -706,8 +706,12 @@ func (s *store) createPermissions(sess *db.Session, roleID int64, cmd SetResourc return nil } -func (s *store) shouldStoreActionSet(permission string) bool { - return (s.features.IsEnabled(context.TODO(), featuremgmt.FlagAccessActionSets) && permission != "") +func (s *store) shouldStoreActionSet(resource, permission string) bool { + if permission == "" { + return false + } + actionSetName := GetActionSetName(resource, permission) + return isFolderOrDashboardAction(actionSetName) } func deletePermissions(sess *db.Session, ids []int64) error { diff --git a/pkg/services/annotations/accesscontrol/accesscontrol.go b/pkg/services/annotations/accesscontrol/accesscontrol.go index d0d22703414cf..5462b49d0fe2a 100644 --- a/pkg/services/annotations/accesscontrol/accesscontrol.go +++ b/pkg/services/annotations/accesscontrol/accesscontrol.go @@ -39,7 +39,7 @@ func NewAuthService(db db.DB, features featuremgmt.FeatureToggles) *AuthService } // Authorize checks if the user has permission to read annotations, then returns a struct containing dashboards and scope types that the user has access to. -func (authz *AuthService) Authorize(ctx context.Context, orgID int64, query *annotations.ItemQuery) (*AccessResources, error) { +func (authz *AuthService) Authorize(ctx context.Context, query annotations.ItemQuery) (*AccessResources, error) { user := query.SignedInUser if user == nil || user.IsNil() { return nil, ErrReadForbidden.Errorf("missing user") @@ -60,14 +60,14 @@ func (authz *AuthService) Authorize(ctx context.Context, orgID int64, query *ann var err error if canAccessDashAnnotations { if query.AnnotationID != 0 { - annotationDashboardID, err := authz.getAnnotationDashboard(ctx, query, orgID) + annotationDashboardID, err := authz.getAnnotationDashboard(ctx, query) if err != nil { return nil, ErrAccessControlInternal.Errorf("failed to fetch annotations: %w", err) } query.DashboardID = annotationDashboardID } - visibleDashboards, err = authz.dashboardsWithVisibleAnnotations(ctx, query, orgID) + visibleDashboards, err = authz.dashboardsWithVisibleAnnotations(ctx, query) if err != nil { return nil, ErrAccessControlInternal.Errorf("failed to fetch dashboards: %w", err) } @@ -80,7 +80,7 @@ func (authz *AuthService) Authorize(ctx context.Context, orgID int64, query *ann }, nil } -func (authz *AuthService) getAnnotationDashboard(ctx context.Context, query *annotations.ItemQuery, orgID int64) (int64, error) { +func (authz *AuthService) getAnnotationDashboard(ctx context.Context, query annotations.ItemQuery) (int64, error) { var items []annotations.Item params := make([]any, 0) err := authz.db.WithDbSession(ctx, func(sess *db.Session) error { @@ -92,7 +92,7 @@ func (authz *AuthService) getAnnotationDashboard(ctx context.Context, query *ann FROM annotation as a WHERE a.org_id = ? AND a.id = ? ` - params = append(params, orgID, query.AnnotationID) + params = append(params, query.OrgID, query.AnnotationID) return sess.SQL(sql, params...).Find(&items) }) @@ -106,7 +106,7 @@ func (authz *AuthService) getAnnotationDashboard(ctx context.Context, query *ann return items[0].DashboardID, nil } -func (authz *AuthService) dashboardsWithVisibleAnnotations(ctx context.Context, query *annotations.ItemQuery, orgID int64) (map[string]int64, error) { +func (authz *AuthService) dashboardsWithVisibleAnnotations(ctx context.Context, query annotations.ItemQuery) (map[string]int64, error) { recursiveQueriesSupported, err := authz.db.RecursiveQueriesAreSupported() if err != nil { return nil, err @@ -119,7 +119,7 @@ func (authz *AuthService) dashboardsWithVisibleAnnotations(ctx context.Context, filters := []any{ permissions.NewAccessControlDashboardPermissionFilter(query.SignedInUser, dashboardaccess.PERMISSION_VIEW, filterType, authz.features, recursiveQueriesSupported), - searchstore.OrgFilter{OrgId: orgID}, + searchstore.OrgFilter{OrgId: query.OrgID}, } if query.DashboardUID != "" { @@ -134,32 +134,25 @@ func (authz *AuthService) dashboardsWithVisibleAnnotations(ctx context.Context, } sb := &searchstore.Builder{Dialect: authz.db.GetDialect(), Filters: filters, Features: authz.features} - - visibleDashboards := make(map[string]int64) - - var page int64 = 1 + // This is a limit for a batch size, not for the end query result. var limit int64 = 1000 - for { - var res []dashboardProjection - sql, params := sb.ToSQL(limit, page) - - err = authz.db.WithDbSession(ctx, func(sess *db.Session) error { - return sess.SQL(sql, params...).Find(&res) - }) - if err != nil { - return nil, err - } + if query.Page == 0 { + query.Page = 1 + } + sql, params := sb.ToSQL(limit, query.Page) - for _, p := range res { - visibleDashboards[p.UID] = p.ID - } + visibleDashboards := make(map[string]int64) + var res []dashboardProjection - // if the result is less than the limit, we have reached the end - if len(res) < int(limit) { - break - } + err = authz.db.WithDbSession(ctx, func(sess *db.Session) error { + return sess.SQL(sql, params...).Find(&res) + }) + if err != nil { + return nil, err + } - page++ + for _, p := range res { + visibleDashboards[p.UID] = p.ID } return visibleDashboards, nil diff --git a/pkg/services/annotations/accesscontrol/accesscontrol_test.go b/pkg/services/annotations/accesscontrol/accesscontrol_test.go index ad845b8996785..f3d82853b7cad 100644 --- a/pkg/services/annotations/accesscontrol/accesscontrol_test.go +++ b/pkg/services/annotations/accesscontrol/accesscontrol_test.go @@ -175,8 +175,8 @@ func TestIntegrationAuthorize(t *testing.T) { authz := NewAuthService(sql, featuremgmt.WithFeatures(tc.featureToggle)) - query := &annotations.ItemQuery{SignedInUser: u} - resources, err := authz.Authorize(context.Background(), 1, query) + query := annotations.ItemQuery{SignedInUser: u, OrgID: 1} + resources, err := authz.Authorize(context.Background(), query) require.NoError(t, err) if tc.expectedResources.Dashboards != nil { diff --git a/pkg/services/annotations/accesscontrol/models.go b/pkg/services/annotations/accesscontrol/models.go index 7b74de929d8e0..8a2c7ed01e5c8 100644 --- a/pkg/services/annotations/accesscontrol/models.go +++ b/pkg/services/annotations/accesscontrol/models.go @@ -8,6 +8,8 @@ type AccessResources struct { CanAccessDashAnnotations bool // CanAccessOrgAnnotations true if the user is allowed to access organization annotations CanAccessOrgAnnotations bool + // Skip filtering + SkipAccessControlFilter bool } type dashboardProjection struct { diff --git a/pkg/services/annotations/annotationsimpl/annotations.go b/pkg/services/annotations/annotationsimpl/annotations.go index 3cf88288d270b..34c142df8dcd3 100644 --- a/pkg/services/annotations/annotationsimpl/annotations.go +++ b/pkg/services/annotations/annotationsimpl/annotations.go @@ -68,12 +68,50 @@ func (r *RepositoryImpl) Update(ctx context.Context, item *annotations.Item) err } func (r *RepositoryImpl) Find(ctx context.Context, query *annotations.ItemQuery) ([]*annotations.ItemDTO, error) { - resources, err := r.authZ.Authorize(ctx, query.OrgID, query) - if err != nil { - return make([]*annotations.ItemDTO, 0), err + if query.Limit == 0 { + query.Limit = 100 } - return r.reader.Get(ctx, query, resources) + // Search without dashboard UID filter is expensive, so check without access control first + if query.DashboardID == 0 && query.DashboardUID == "" { + // Return early if no annotations found, it's not necessary to perform expensive access control filtering + res, err := r.reader.Get(ctx, *query, &accesscontrol.AccessResources{ + SkipAccessControlFilter: true, + }) + if err != nil || len(res) == 0 { + return []*annotations.ItemDTO{}, err + } + // If number of resources is less than limit, it makes sense to set query limit to this + // value, otherwise query will be iterating over all user's dashboards since original + // query limit is never reached. + query.Limit = int64(len(res)) + } + + results := make([]*annotations.ItemDTO, 0, query.Limit) + query.Page = 1 + + // Iterate over available annotations until query limit is reached + // or all available dashboards are checked + for len(results) < int(query.Limit) { + resources, err := r.authZ.Authorize(ctx, *query) + if err != nil { + return nil, err + } + + res, err := r.reader.Get(ctx, *query, resources) + if err != nil { + return nil, err + } + + results = append(results, res...) + query.Page++ + // All user's dashboards are fetched + if len(resources.Dashboards) < int(query.Limit) { + break + } + } + + return results, nil } func (r *RepositoryImpl) Delete(ctx context.Context, params *annotations.DeleteParams) error { @@ -81,5 +119,5 @@ func (r *RepositoryImpl) Delete(ctx context.Context, params *annotations.DeleteP } func (r *RepositoryImpl) FindTags(ctx context.Context, query *annotations.TagsQuery) (annotations.FindTagsResult, error) { - return r.reader.GetTags(ctx, query) + return r.reader.GetTags(ctx, *query) } diff --git a/pkg/services/annotations/annotationsimpl/composite_store.go b/pkg/services/annotations/annotationsimpl/composite_store.go index 3bcf0724b52d5..5f0b1a213b4c1 100644 --- a/pkg/services/annotations/annotationsimpl/composite_store.go +++ b/pkg/services/annotations/annotationsimpl/composite_store.go @@ -31,7 +31,7 @@ func (c *CompositeStore) Type() string { } // Get returns annotations from all stores, and combines the results. -func (c *CompositeStore) Get(ctx context.Context, query *annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { +func (c *CompositeStore) Get(ctx context.Context, query annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { itemCh := make(chan []*annotations.ItemDTO, len(c.readers)) err := concurrency.ForEachJob(ctx, len(c.readers), len(c.readers), func(ctx context.Context, i int) (err error) { @@ -56,7 +56,7 @@ func (c *CompositeStore) Get(ctx context.Context, query *annotations.ItemQuery, } // GetTags returns tags from all stores, and combines the results. -func (c *CompositeStore) GetTags(ctx context.Context, query *annotations.TagsQuery) (annotations.FindTagsResult, error) { +func (c *CompositeStore) GetTags(ctx context.Context, query annotations.TagsQuery) (annotations.FindTagsResult, error) { resCh := make(chan annotations.FindTagsResult, len(c.readers)) err := concurrency.ForEachJob(ctx, len(c.readers), len(c.readers), func(ctx context.Context, i int) (err error) { diff --git a/pkg/services/annotations/annotationsimpl/composite_store_test.go b/pkg/services/annotations/annotationsimpl/composite_store_test.go index b5aa1d22d8c21..c916fc3eaa075 100644 --- a/pkg/services/annotations/annotationsimpl/composite_store_test.go +++ b/pkg/services/annotations/annotationsimpl/composite_store_test.go @@ -7,10 +7,11 @@ import ( "testing" "time" + "github.com/stretchr/testify/require" + "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/services/annotations" "github.com/grafana/grafana/pkg/services/annotations/accesscontrol" - "github.com/stretchr/testify/require" ) var ( @@ -21,7 +22,7 @@ var ( func TestCompositeStore(t *testing.T) { t.Run("should handle panic", func(t *testing.T) { r1 := newFakeReader() - getPanic := func(context.Context, *annotations.ItemQuery, *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { + getPanic := func(context.Context, annotations.ItemQuery, *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { panic("ohno") } r2 := newFakeReader(withGetFn(getPanic)) @@ -30,7 +31,7 @@ func TestCompositeStore(t *testing.T) { []readStore{r1, r2}, } - _, err := store.Get(context.Background(), nil, nil) + _, err := store.Get(context.Background(), annotations.ItemQuery{}, nil) require.Error(t, err) require.Contains(t, err.Error(), "concurrent job panic") }) @@ -51,11 +52,11 @@ func TestCompositeStore(t *testing.T) { err error }{ { - f: func() (any, error) { return store.Get(context.Background(), nil, nil) }, + f: func() (any, error) { return store.Get(context.Background(), annotations.ItemQuery{}, nil) }, err: errGet, }, { - f: func() (any, error) { return store.GetTags(context.Background(), nil) }, + f: func() (any, error) { return store.GetTags(context.Background(), annotations.TagsQuery{}) }, err: errGetTags, }, } @@ -93,7 +94,7 @@ func TestCompositeStore(t *testing.T) { {TimeEnd: 1, Time: 1}, } - items, _ := store.Get(context.Background(), nil, nil) + items, _ := store.Get(context.Background(), annotations.ItemQuery{}, nil) require.Equal(t, expected, items) }) @@ -122,16 +123,40 @@ func TestCompositeStore(t *testing.T) { {Tag: "key2:val2"}, } - res, _ := store.GetTags(context.Background(), nil) + res, _ := store.GetTags(context.Background(), annotations.TagsQuery{}) require.Equal(t, expected, res.Tags) }) + + // Check if reader is not modifying query since it might cause a race condition in case of composite store + t.Run("should not modify query", func(t *testing.T) { + getFn1 := func(ctx context.Context, query annotations.ItemQuery, resources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { + query.From = 1 + return []*annotations.ItemDTO{}, nil + } + getFn2 := func(ctx context.Context, query annotations.ItemQuery, resources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { + return []*annotations.ItemDTO{}, nil + } + + r1 := newFakeReader(withGetFn(getFn1)) + r2 := newFakeReader(withGetFn(getFn2)) + + store := &CompositeStore{ + log.NewNopLogger(), + []readStore{r1, r2}, + } + + query := annotations.ItemQuery{} + _, err := store.Get(context.Background(), query, nil) + require.NoError(t, err) + require.Equal(t, int64(0), query.From) + }) } type fakeReader struct { items []*annotations.ItemDTO tagRes annotations.FindTagsResult - getFn func(context.Context, *annotations.ItemQuery, *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) - getTagFn func(context.Context, *annotations.TagsQuery) (annotations.FindTagsResult, error) + getFn func(context.Context, annotations.ItemQuery, *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) + getTagFn func(context.Context, annotations.TagsQuery) (annotations.FindTagsResult, error) wait time.Duration err error } @@ -140,7 +165,7 @@ func (f *fakeReader) Type() string { return "fake" } -func (f *fakeReader) Get(ctx context.Context, query *annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { +func (f *fakeReader) Get(ctx context.Context, query annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { if f.getFn != nil { return f.getFn(ctx, query, accessResources) } @@ -157,7 +182,7 @@ func (f *fakeReader) Get(ctx context.Context, query *annotations.ItemQuery, acce return f.items, nil } -func (f *fakeReader) GetTags(ctx context.Context, query *annotations.TagsQuery) (annotations.FindTagsResult, error) { +func (f *fakeReader) GetTags(ctx context.Context, query annotations.TagsQuery) (annotations.FindTagsResult, error) { if f.getTagFn != nil { return f.getTagFn(ctx, query) } @@ -198,7 +223,7 @@ func withTags(tags []*annotations.TagsDTO) func(*fakeReader) { } } -func withGetFn(fn func(context.Context, *annotations.ItemQuery, *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error)) func(*fakeReader) { +func withGetFn(fn func(context.Context, annotations.ItemQuery, *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error)) func(*fakeReader) { return func(f *fakeReader) { f.getFn = fn } diff --git a/pkg/services/annotations/annotationsimpl/loki/historian_store.go b/pkg/services/annotations/annotationsimpl/loki/historian_store.go index 1c4cf9da98cd1..c37572524a2ce 100644 --- a/pkg/services/annotations/annotationsimpl/loki/historian_store.go +++ b/pkg/services/annotations/annotationsimpl/loki/historian_store.go @@ -8,23 +8,21 @@ import ( "sort" "time" - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/services/annotations/accesscontrol" - "github.com/grafana/grafana/pkg/services/featuremgmt" - "github.com/grafana/grafana/pkg/services/ngalert" + "github.com/prometheus/client_golang/prometheus" "golang.org/x/exp/constraints" + "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/annotations" + "github.com/grafana/grafana/pkg/services/annotations/accesscontrol" + "github.com/grafana/grafana/pkg/services/featuremgmt" + "github.com/grafana/grafana/pkg/services/ngalert" ngmetrics "github.com/grafana/grafana/pkg/services/ngalert/metrics" ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/ngalert/state" "github.com/grafana/grafana/pkg/services/ngalert/state/historian" historymodel "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model" - - "github.com/prometheus/client_golang/prometheus" - - "github.com/grafana/grafana/pkg/services/annotations" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util/errutil" ) @@ -73,7 +71,7 @@ func (r *LokiHistorianStore) Type() string { return "loki" } -func (r *LokiHistorianStore) Get(ctx context.Context, query *annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { +func (r *LokiHistorianStore) Get(ctx context.Context, query annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { if query.Type == "annotation" { return make([]*annotations.ItemDTO, 0), nil } @@ -96,7 +94,7 @@ func (r *LokiHistorianStore) Get(ctx context.Context, query *annotations.ItemQue } } - logQL, err := historian.BuildLogQuery(buildHistoryQuery(query, accessResources.Dashboards, rule.UID)) + logQL, err := historian.BuildLogQuery(buildHistoryQuery(&query, accessResources.Dashboards, rule.UID)) if err != nil { return make([]*annotations.ItemDTO, 0), ErrLokiStoreInternal.Errorf("failed to build loki query: %w", err) } @@ -178,7 +176,7 @@ func (r *LokiHistorianStore) annotationsFromStream(stream historian.Stream, ac a return items } -func (r *LokiHistorianStore) GetTags(ctx context.Context, query *annotations.TagsQuery) (annotations.FindTagsResult, error) { +func (r *LokiHistorianStore) GetTags(ctx context.Context, query annotations.TagsQuery) (annotations.FindTagsResult, error) { return annotations.FindTagsResult{Tags: []*annotations.TagsDTO{}}, nil } diff --git a/pkg/services/annotations/annotationsimpl/loki/historian_store_test.go b/pkg/services/annotations/annotationsimpl/loki/historian_store_test.go index 5202952cda95f..78c450148301e 100644 --- a/pkg/services/annotations/annotationsimpl/loki/historian_store_test.go +++ b/pkg/services/annotations/annotationsimpl/loki/historian_store_test.go @@ -11,6 +11,7 @@ import ( "time" "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/db" @@ -29,8 +30,6 @@ import ( historymodel "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tests/testsuite" - - "github.com/stretchr/testify/require" ) func TestMain(m *testing.M) { @@ -94,7 +93,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, @@ -120,7 +119,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, @@ -144,7 +143,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, @@ -170,7 +169,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, @@ -200,7 +199,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, @@ -229,7 +228,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, @@ -263,7 +262,7 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) { } res, err := store.Get( context.Background(), - &query, + query, &annotation_ac.AccessResources{ Dashboards: map[string]int64{ dashboard1.UID: dashboard1.ID, diff --git a/pkg/services/annotations/annotationsimpl/store.go b/pkg/services/annotations/annotationsimpl/store.go index b1c3643a9c35d..9210a2c8b96ac 100644 --- a/pkg/services/annotations/annotationsimpl/store.go +++ b/pkg/services/annotations/annotationsimpl/store.go @@ -20,8 +20,8 @@ type commonStore interface { type readStore interface { commonStore - Get(ctx context.Context, query *annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) - GetTags(ctx context.Context, query *annotations.TagsQuery) (annotations.FindTagsResult, error) + Get(ctx context.Context, query annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) + GetTags(ctx context.Context, query annotations.TagsQuery) (annotations.FindTagsResult, error) } type writeStore interface { diff --git a/pkg/services/annotations/annotationsimpl/xorm_store.go b/pkg/services/annotations/annotationsimpl/xorm_store.go index 6ae9ddc3ce1bd..31192c250a7e3 100644 --- a/pkg/services/annotations/annotationsimpl/xorm_store.go +++ b/pkg/services/annotations/annotationsimpl/xorm_store.go @@ -245,7 +245,7 @@ func tagSet[T any](fn func(T) int64, list []T) map[int64]struct{} { return set } -func (r *xormRepositoryImpl) Get(ctx context.Context, query *annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { +func (r *xormRepositoryImpl) Get(ctx context.Context, query annotations.ItemQuery, accessResources *accesscontrol.AccessResources) ([]*annotations.ItemDTO, error) { var sql bytes.Buffer params := make([]interface{}, 0) items := make([]*annotations.ItemDTO, 0) @@ -351,14 +351,16 @@ func (r *xormRepositoryImpl) Get(ctx context.Context, query *annotations.ItemQue if err != nil { return err } - sql.WriteString(fmt.Sprintf(" AND (%s)", acFilter)) - - if query.Limit == 0 { - query.Limit = 100 + if acFilter != "" { + sql.WriteString(fmt.Sprintf(" AND (%s)", acFilter)) } // order of ORDER BY arguments match the order of a sql index for performance - sql.WriteString(" ORDER BY a.org_id, a.epoch_end DESC, a.epoch DESC" + r.db.GetDialect().Limit(query.Limit) + " ) dt on dt.id = annotation.id") + orderBy := " ORDER BY a.org_id, a.epoch_end DESC, a.epoch DESC" + if query.Limit > 0 { + orderBy += r.db.GetDialect().Limit(query.Limit) + } + sql.WriteString(orderBy + " ) dt on dt.id = annotation.id") if err := sess.SQL(sql.String(), params...).Find(&items); err != nil { items = nil @@ -372,6 +374,10 @@ func (r *xormRepositoryImpl) Get(ctx context.Context, query *annotations.ItemQue } func (r *xormRepositoryImpl) getAccessControlFilter(user identity.Requester, accessResources *accesscontrol.AccessResources) (string, error) { + if accessResources.SkipAccessControlFilter { + return "", nil + } + var filters []string if accessResources.CanAccessOrgAnnotations { @@ -440,7 +446,7 @@ func (r *xormRepositoryImpl) Delete(ctx context.Context, params *annotations.Del }) } -func (r *xormRepositoryImpl) GetTags(ctx context.Context, query *annotations.TagsQuery) (annotations.FindTagsResult, error) { +func (r *xormRepositoryImpl) GetTags(ctx context.Context, query annotations.TagsQuery) (annotations.FindTagsResult, error) { var items []*annotations.Tag err := r.db.WithDbSession(ctx, func(dbSession *db.Session) error { if query.Limit == 0 { diff --git a/pkg/services/annotations/annotationsimpl/xorm_store_test.go b/pkg/services/annotations/annotationsimpl/xorm_store_test.go index ae06428f8350f..d72e61e3b1306 100644 --- a/pkg/services/annotations/annotationsimpl/xorm_store_test.go +++ b/pkg/services/annotations/annotationsimpl/xorm_store_test.go @@ -133,7 +133,7 @@ func TestIntegrationAnnotations(t *testing.T) { assert.Greater(t, organizationAnnotation2.ID, int64(0)) t.Run("Can query for annotation by dashboard id", func(t *testing.T) { - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, DashboardID: dashboard.ID, From: 0, @@ -182,7 +182,7 @@ func TestIntegrationAnnotations(t *testing.T) { err := store.AddMany(context.Background(), items) require.NoError(t, err) - query := &annotations.ItemQuery{OrgID: 100, SignedInUser: testUser} + query := annotations.ItemQuery{OrgID: 100, SignedInUser: testUser} accRes := &annotation_ac.AccessResources{CanAccessOrgAnnotations: true} inserted, err := store.Get(context.Background(), query, accRes) require.NoError(t, err) @@ -209,7 +209,7 @@ func TestIntegrationAnnotations(t *testing.T) { err := store.AddMany(context.Background(), items) require.NoError(t, err) - query := &annotations.ItemQuery{OrgID: 101, SignedInUser: testUser} + query := annotations.ItemQuery{OrgID: 101, SignedInUser: testUser} accRes := &annotation_ac.AccessResources{CanAccessOrgAnnotations: true} inserted, err := store.Get(context.Background(), query, accRes) require.NoError(t, err) @@ -217,7 +217,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Can query for annotation by id", func(t *testing.T) { - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, AnnotationID: annotation2.ID, SignedInUser: testUser, @@ -237,7 +237,7 @@ func TestIntegrationAnnotations(t *testing.T) { Dashboards: map[string]int64{"foo": 1}, CanAccessDashAnnotations: true, } - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 12, @@ -253,7 +253,7 @@ func TestIntegrationAnnotations(t *testing.T) { Dashboards: map[string]int64{"foo": 1}, CanAccessDashAnnotations: true, } - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 1, @@ -270,7 +270,7 @@ func TestIntegrationAnnotations(t *testing.T) { Dashboards: map[string]int64{"foo": 1}, CanAccessDashAnnotations: true, } - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 1, @@ -287,7 +287,7 @@ func TestIntegrationAnnotations(t *testing.T) { Dashboards: map[string]int64{"foo": 1}, CanAccessDashAnnotations: true, } - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 1, @@ -301,7 +301,7 @@ func TestIntegrationAnnotations(t *testing.T) { t.Run("Should find two annotations using partial match", func(t *testing.T) { accRes := &annotation_ac.AccessResources{CanAccessOrgAnnotations: true} - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, From: 1, To: 25, @@ -318,7 +318,7 @@ func TestIntegrationAnnotations(t *testing.T) { Dashboards: map[string]int64{"foo": 1}, CanAccessDashAnnotations: true, } - items, err := store.Get(context.Background(), &annotations.ItemQuery{ + items, err := store.Get(context.Background(), annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 1, @@ -331,7 +331,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Can update annotation and remove all tags", func(t *testing.T) { - query := &annotations.ItemQuery{ + query := annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 0, @@ -366,7 +366,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Can update annotation with new tags", func(t *testing.T) { - query := &annotations.ItemQuery{ + query := annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 0, @@ -399,7 +399,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Can update annotation with additional tags", func(t *testing.T) { - query := &annotations.ItemQuery{ + query := annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 0, @@ -432,7 +432,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Can update annotations with data", func(t *testing.T) { - query := &annotations.ItemQuery{ + query := annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 0, @@ -468,7 +468,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Can delete annotation", func(t *testing.T) { - query := &annotations.ItemQuery{ + query := annotations.ItemQuery{ OrgID: 1, DashboardID: 1, From: 0, @@ -512,7 +512,7 @@ func TestIntegrationAnnotations(t *testing.T) { CanAccessDashAnnotations: true, } - query := &annotations.ItemQuery{ + query := annotations.ItemQuery{ OrgID: 1, AnnotationID: annotation3.ID, SignedInUser: testUser, @@ -531,7 +531,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Should find tags by key", func(t *testing.T) { - result, err := store.GetTags(context.Background(), &annotations.TagsQuery{ + result, err := store.GetTags(context.Background(), annotations.TagsQuery{ OrgID: 1, Tag: "server", }) @@ -542,7 +542,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Should find tags by value", func(t *testing.T) { - result, err := store.GetTags(context.Background(), &annotations.TagsQuery{ + result, err := store.GetTags(context.Background(), annotations.TagsQuery{ OrgID: 1, Tag: "outage", }) @@ -555,7 +555,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Should not find tags in other org", func(t *testing.T) { - result, err := store.GetTags(context.Background(), &annotations.TagsQuery{ + result, err := store.GetTags(context.Background(), annotations.TagsQuery{ OrgID: 0, Tag: "server-1", }) @@ -564,7 +564,7 @@ func TestIntegrationAnnotations(t *testing.T) { }) t.Run("Should not find tags that do not exist", func(t *testing.T) { - result, err := store.GetTags(context.Background(), &annotations.TagsQuery{ + result, err := store.GetTags(context.Background(), annotations.TagsQuery{ OrgID: 0, Tag: "unknown:tag", }) @@ -650,7 +650,7 @@ func benchmarkFindTags(b *testing.B, numAnnotations int) { b.ResetTimer() for i := 0; i < b.N; i++ { - result, err := store.GetTags(context.Background(), &annotations.TagsQuery{ + result, err := store.GetTags(context.Background(), annotations.TagsQuery{ OrgID: 1, Tag: "outage", }) diff --git a/pkg/services/annotations/models.go b/pkg/services/annotations/models.go index 2b1325b4f3c93..71c307123e055 100644 --- a/pkg/services/annotations/models.go +++ b/pkg/services/annotations/models.go @@ -21,6 +21,7 @@ type ItemQuery struct { SignedInUser identity.Requester Limit int64 `json:"limit"` + Page int64 } // TagsQuery is the query for a tags search. diff --git a/pkg/services/anonymous/anonimpl/client.go b/pkg/services/anonymous/anonimpl/client.go index ac20182b00ff9..9ef62c4819dfc 100644 --- a/pkg/services/anonymous/anonimpl/client.go +++ b/pkg/services/anonymous/anonimpl/client.go @@ -17,8 +17,9 @@ import ( ) var ( - errInvalidOrg = errutil.Unauthorized("anonymous.invalid-org") - errInvalidID = errutil.Unauthorized("anonymous.invalid-id") + errInvalidOrg = errutil.Unauthorized("anonymous.invalid-org") + errInvalidID = errutil.Unauthorized("anonymous.invalid-id") + errDeviceLimit = errutil.Unauthorized("anonymous.device-limit-reached", errutil.WithPublicMessage("Anonymous device limit reached. Contact Administrator")) ) var _ authn.ContextAwareClient = new(Anonymous) @@ -51,7 +52,7 @@ func (a *Anonymous) Authenticate(ctx context.Context, r *authn.Request) (*authn. if err := a.anonDeviceService.TagDevice(ctx, httpReqCopy, anonymous.AnonDeviceUI); err != nil { if errors.Is(err, anonstore.ErrDeviceLimitReached) { - return nil, err + return nil, errDeviceLimit.Errorf("limit reached for anonymous devices: %w", err) } a.log.Warn("Failed to tag anonymous session", "error", err) diff --git a/pkg/services/anonymous/anonimpl/impl.go b/pkg/services/anonymous/anonimpl/impl.go index b34048be40760..c0105441d62a1 100644 --- a/pkg/services/anonymous/anonimpl/impl.go +++ b/pkg/services/anonymous/anonimpl/impl.go @@ -2,6 +2,7 @@ package anonimpl import ( "context" + "errors" "net/http" "time" @@ -79,20 +80,29 @@ func (a *AnonDeviceService) usageStatFn(ctx context.Context) (map[string]any, er }, nil } -func (a *AnonDeviceService) tagDeviceUI(ctx context.Context, httpReq *http.Request, device *anonstore.Device) error { +func (a *AnonDeviceService) tagDeviceUI(ctx context.Context, device *anonstore.Device) error { key := device.CacheKey() - if _, ok := a.localCache.Get(key); ok { + if val, ok := a.localCache.Get(key); ok { + if boolVal, ok := val.(bool); ok && !boolVal { + return anonstore.ErrDeviceLimitReached + } return nil } - a.localCache.SetDefault(key, struct{}{}) + a.localCache.SetDefault(key, true) if a.cfg.Env == setting.Dev { a.log.Debug("Tagging device for UI", "deviceID", device.DeviceID, "device", device, "key", key) } if err := a.anonStore.CreateOrUpdateDevice(ctx, device); err != nil { + if errors.Is(err, anonstore.ErrDeviceLimitReached) { + a.localCache.SetDefault(key, false) + return err + } + // invalidate cache if there is an error + a.localCache.Delete(key) return err } @@ -142,7 +152,7 @@ func (a *AnonDeviceService) TagDevice(ctx context.Context, httpReq *http.Request UpdatedAt: time.Now(), } - err = a.tagDeviceUI(ctx, httpReq, taggedDevice) + err = a.tagDeviceUI(ctx, taggedDevice) if err != nil { a.log.Debug("Failed to tag device for UI", "error", err) return err diff --git a/pkg/services/anonymous/anonimpl/impl_test.go b/pkg/services/anonymous/anonimpl/impl_test.go index a84e913f3b1ae..b193d22edb6a7 100644 --- a/pkg/services/anonymous/anonimpl/impl_test.go +++ b/pkg/services/anonymous/anonimpl/impl_test.go @@ -26,6 +26,10 @@ func TestMain(m *testing.M) { } func TestIntegrationDeviceService_tag(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } + type tagReq struct { httpReq *http.Request kind anonymous.DeviceKind @@ -152,6 +156,9 @@ func TestIntegrationDeviceService_tag(t *testing.T) { // Ensure that the local cache prevents request from being tagged func TestIntegrationAnonDeviceService_localCacheSafety(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } store := db.InitTestDB(t) anonService := ProvideAnonymousDeviceService(&usagestats.UsageStatsMock{}, &authntest.FakeService{}, store, setting.NewCfg(), orgtest.NewOrgServiceFake(), nil, actest.FakeAccessControl{}, &routing.RouteRegisterImpl{}) @@ -184,6 +191,10 @@ func TestIntegrationAnonDeviceService_localCacheSafety(t *testing.T) { } func TestIntegrationDeviceService_SearchDevice(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } + fixedTime := time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC) // Fixed timestamp for testing testCases := []struct { @@ -271,3 +282,88 @@ func TestIntegrationDeviceService_SearchDevice(t *testing.T) { }) } } + +func TestIntegrationAnonDeviceService_DeviceLimitWithCache(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } + // Setup test environment + store := db.InitTestDB(t) + cfg := setting.NewCfg() + cfg.AnonymousDeviceLimit = 1 // Set device limit to 1 for testing + anonService := ProvideAnonymousDeviceService( + &usagestats.UsageStatsMock{}, + &authntest.FakeService{}, + store, + cfg, + orgtest.NewOrgServiceFake(), + nil, + actest.FakeAccessControl{}, + &routing.RouteRegisterImpl{}, + ) + + // Define test cases + testCases := []struct { + name string + httpReq *http.Request + expectedErr error + }{ + { + name: "first request should succeed", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.1"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device1"}, + }, + }, + expectedErr: nil, + }, + { + name: "second request should fail due to device limit", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.2"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device2"}, + }, + }, + expectedErr: anonstore.ErrDeviceLimitReached, + }, + { + name: "repeat request should hit cache and succeed", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.1"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device1"}, + }, + }, + expectedErr: nil, + }, + { + name: "third request should hit cache and fail due to device limit", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.2"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device2"}, + }, + }, + expectedErr: anonstore.ErrDeviceLimitReached, + }, + } + + // Run test cases + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := anonService.TagDevice(context.Background(), tc.httpReq, anonymous.AnonDeviceUI) + if tc.expectedErr != nil { + require.Error(t, err) + assert.Equal(t, tc.expectedErr, err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/pkg/services/authn/authnimpl/sync/org_sync.go b/pkg/services/authn/authnimpl/sync/org_sync.go index 38429a07704cb..53117a1c86199 100644 --- a/pkg/services/authn/authnimpl/sync/org_sync.go +++ b/pkg/services/authn/authnimpl/sync/org_sync.go @@ -82,18 +82,25 @@ func (s *OrgSync) SyncOrgRolesHook(ctx context.Context, id *authn.Identity, _ *a orgIDs := make([]int64, 0, len(id.OrgRoles)) // add any new org roles for orgId, orgRole := range id.OrgRoles { - orgIDs = append(orgIDs, orgId) if _, exists := handledOrgIds[orgId]; exists { + orgIDs = append(orgIDs, orgId) continue } // add role cmd := &org.AddOrgUserCommand{UserID: userID, Role: orgRole, OrgID: orgId} err := s.orgService.AddOrgUser(ctx, cmd) - if err != nil && !errors.Is(err, org.ErrOrgNotFound) { + + if errors.Is(err, org.ErrOrgNotFound) { + continue + } + + if err != nil { ctxLogger.Error("Failed to update active org for user", "error", err) return err } + + orgIDs = append(orgIDs, orgId) } // delete any removed org roles diff --git a/pkg/services/authn/authnimpl/sync/org_sync_test.go b/pkg/services/authn/authnimpl/sync/org_sync_test.go index f8b774696d9e3..a2383c2b05af5 100644 --- a/pkg/services/authn/authnimpl/sync/org_sync_test.go +++ b/pkg/services/authn/authnimpl/sync/org_sync_test.go @@ -22,7 +22,8 @@ import ( ) func TestOrgSync_SyncOrgRolesHook(t *testing.T) { - orgService := &orgtest.FakeOrgService{ExpectedUserOrgDTO: []*org.UserOrgDTO{ + orgService := &orgtest.MockService{} + orgService.On("GetUserOrgList", mock.Anything, mock.Anything).Return([]*org.UserOrgDTO{ { OrgID: 1, Role: org.RoleEditor, @@ -31,14 +32,16 @@ func TestOrgSync_SyncOrgRolesHook(t *testing.T) { OrgID: 3, Role: org.RoleViewer, }, - }, - ExpectedOrgListResponse: orgtest.OrgListResponse{ - { - OrgID: 3, - Response: nil, - }, - }, - } + }, nil) + orgService.On("RemoveOrgUser", mock.Anything, mock.MatchedBy(func(cmd *org.RemoveOrgUserCommand) bool { + return cmd.OrgID == 3 && cmd.UserID == 1 + })).Return(nil) + orgService.On("UpdateOrgUser", mock.Anything, mock.MatchedBy(func(cmd *org.UpdateOrgUserCommand) bool { + return cmd.OrgID == 1 && cmd.UserID == 1 && cmd.Role == org.RoleAdmin + })).Return(nil) + orgService.On("AddOrgUser", mock.Anything, mock.MatchedBy(func(cmd *org.AddOrgUserCommand) bool { + return cmd.OrgID == 2 && cmd.UserID == 1 && cmd.Role == org.RoleEditor + })).Return(org.ErrOrgNotFound) acService := &actest.FakeService{} userService := &usertest.FakeUserService{ExpectedUser: &user.User{ ID: 1, @@ -65,7 +68,7 @@ func TestOrgSync_SyncOrgRolesHook(t *testing.T) { wantID *authn.Identity }{ { - name: "add user to multiple orgs", + name: "add user to multiple orgs, should not set the user's default orgID to an org that does not exist", fields: fields{ userService: userService, orgService: orgService, @@ -96,7 +99,7 @@ func TestOrgSync_SyncOrgRolesHook(t *testing.T) { Name: "test", Email: "test", OrgRoles: map[int64]roletype.RoleType{1: org.RoleAdmin, 2: org.RoleEditor}, - OrgID: 1, //set using org + OrgID: 1, // set using org IsGrafanaAdmin: ptrBool(false), ClientParams: authn.ClientParams{ SyncOrgRoles: true, diff --git a/pkg/services/correlations/accesscontrol.go b/pkg/services/correlations/accesscontrol.go index fa68b14747e19..4034e495da06a 100644 --- a/pkg/services/correlations/accesscontrol.go +++ b/pkg/services/correlations/accesscontrol.go @@ -2,10 +2,9 @@ package correlations import ( "github.com/grafana/grafana/pkg/services/accesscontrol" - "github.com/grafana/grafana/pkg/services/datasources" ) var ( // ConfigurationPageAccess is used to protect the "Configure > correlations" tab access - ConfigurationPageAccess = accesscontrol.EvalPermission(datasources.ActionRead) + ConfigurationPageAccess = accesscontrol.EvalPermission(accesscontrol.ActionDatasourcesExplore) ) diff --git a/pkg/services/live/live.go b/pkg/services/live/live.go index 975613835addd..cca7a1360ca60 100644 --- a/pkg/services/live/live.go +++ b/pkg/services/live/live.go @@ -97,6 +97,11 @@ func ProvideService(plugCtxProvider *plugincontext.Provider, cfg *setting.Cfg, r }, usageStatsService: usageStatsService, orgService: orgService, + keyPrefix: "gf_live", + } + + if cfg.LiveHAPrefix != "" { + g.keyPrefix = cfg.LiveHAPrefix + ".gf_live" } logger.Debug("GrafanaLive initialization", "ha", g.IsHA()) @@ -152,7 +157,7 @@ func ProvideService(plugCtxProvider *plugincontext.Provider, cfg *setting.Cfg, r managedStreamRunner = managedstream.NewRunner( g.Publish, channelLocalPublisher, - managedstream.NewRedisFrameCache(redisClient), + managedstream.NewRedisFrameCache(redisClient, g.keyPrefix), ) } else { managedStreamRunner = managedstream.NewRunner( @@ -342,7 +347,7 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error { } broker, err := centrifuge.NewRedisBroker(node, centrifuge.RedisBrokerConfig{ - Prefix: "gf_live", + Prefix: g.keyPrefix, Shards: redisShards, }) if err != nil { @@ -351,7 +356,7 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error { node.SetBroker(broker) presenceManager, err := centrifuge.NewRedisPresenceManager(node, centrifuge.RedisPresenceManagerConfig{ - Prefix: "gf_live", + Prefix: g.keyPrefix, Shards: redisShards, }) if err != nil { @@ -380,6 +385,8 @@ type GrafanaLive struct { queryDataService query.Service orgService org.Service + keyPrefix string + node *centrifuge.Node surveyCaller *survey.Caller diff --git a/pkg/services/live/managedstream/cache_redis.go b/pkg/services/live/managedstream/cache_redis.go index b547a11cd20ff..34b16991671a0 100644 --- a/pkg/services/live/managedstream/cache_redis.go +++ b/pkg/services/live/managedstream/cache_redis.go @@ -18,11 +18,13 @@ type RedisFrameCache struct { mu sync.RWMutex redisClient *redis.Client frames map[int64]map[string]data.FrameJSONCache + keyPrefix string } // NewRedisFrameCache ... -func NewRedisFrameCache(redisClient *redis.Client) *RedisFrameCache { +func NewRedisFrameCache(redisClient *redis.Client, keyPrefix string) *RedisFrameCache { return &RedisFrameCache{ + keyPrefix: keyPrefix, frames: map[int64]map[string]data.FrameJSONCache{}, redisClient: redisClient, } @@ -43,7 +45,7 @@ func (c *RedisFrameCache) GetActiveChannels(orgID int64) (map[string]json.RawMes } func (c *RedisFrameCache) GetFrame(ctx context.Context, orgID int64, channel string) (json.RawMessage, bool, error) { - key := getCacheKey(orgchannel.PrependOrgID(orgID, channel)) + key := c.getCacheKey(orgchannel.PrependOrgID(orgID, channel)) cmd := c.redisClient.HGetAll(ctx, key) result, err := cmd.Result() if err != nil { @@ -69,7 +71,7 @@ func (c *RedisFrameCache) Update(ctx context.Context, orgID int64, channel strin stringSchema := string(jsonFrame.Bytes(data.IncludeSchemaOnly)) - key := getCacheKey(orgchannel.PrependOrgID(orgID, channel)) + key := c.getCacheKey(orgchannel.PrependOrgID(orgID, channel)) pipe := c.redisClient.TxPipeline() defer func() { _ = pipe.Close() }() @@ -107,6 +109,6 @@ func (c *RedisFrameCache) Update(ctx context.Context, orgID int64, channel strin return true, nil } -func getCacheKey(channelID string) string { - return "gf_live.managed_stream." + channelID +func (c *RedisFrameCache) getCacheKey(channelID string) string { + return c.keyPrefix + ".managed_stream." + channelID } diff --git a/pkg/services/live/managedstream/cache_redis_test.go b/pkg/services/live/managedstream/cache_redis_test.go index d7c3f9fddbb5c..905684cb46c48 100644 --- a/pkg/services/live/managedstream/cache_redis_test.go +++ b/pkg/services/live/managedstream/cache_redis_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/go-redis/redis/v8" + "github.com/google/uuid" "github.com/stretchr/testify/require" ) @@ -30,7 +31,25 @@ func TestIntegrationRedisCacheStorage(t *testing.T) { Addr: addr, DB: db, }) - c := NewRedisFrameCache(redisClient) + prefix := uuid.New().String() + + t.Cleanup(redisCleanup(t, redisClient, prefix)) + + c := NewRedisFrameCache(redisClient, prefix) require.NotNil(t, c) testFrameCache(t, c) } + +func redisCleanup(t *testing.T, redisClient *redis.Client, prefix string) func() { + return func() { + keys, err := redisClient.Keys(redisClient.Context(), prefix+"*").Result() + if err != nil { + require.NoError(t, err) + } + + for _, key := range keys { + _, err := redisClient.Del(redisClient.Context(), key).Result() + require.NoError(t, err) + } + } +} diff --git a/pkg/services/ngalert/api/authorization.go b/pkg/services/ngalert/api/authorization.go index dd03da7ca4c6e..2b3a0a45b4ebc 100644 --- a/pkg/services/ngalert/api/authorization.go +++ b/pkg/services/ngalert/api/authorization.go @@ -112,7 +112,7 @@ func (api *API) authorize(method, path string) web.Handler { case http.MethodGet + "/api/ruler/{DatasourceUID}/api/v1/rules": eval = ac.EvalPermission(ac.ActionAlertingRuleExternalRead, datasources.ScopeProvider.GetResourceScopeUID(ac.Parameter(":DatasourceUID"))) case http.MethodPost + "/api/ruler/{DatasourceUID}/api/v1/rules/{Namespace}": - eval = ac.EvalPermission(ac.ActionAlertingInstancesExternalWrite, datasources.ScopeProvider.GetResourceScopeUID(ac.Parameter(":DatasourceUID"))) + eval = ac.EvalPermission(ac.ActionAlertingRuleExternalWrite, datasources.ScopeProvider.GetResourceScopeUID(ac.Parameter(":DatasourceUID"))) // Lotex Prometheus-compatible Paths case http.MethodGet + "/api/prometheus/{DatasourceUID}/api/v1/rules": diff --git a/pkg/services/serviceaccounts/manager/service.go b/pkg/services/serviceaccounts/manager/service.go index b225cc6f0a19d..5fbae6c88efe6 100644 --- a/pkg/services/serviceaccounts/manager/service.go +++ b/pkg/services/serviceaccounts/manager/service.go @@ -26,7 +26,9 @@ const ( ) type ServiceAccountsService struct { - acService accesscontrol.Service + acService accesscontrol.Service + permissions accesscontrol.ServiceAccountPermissionsService + store store log log.Logger backgroundLog log.Logger @@ -44,7 +46,8 @@ func ProvideServiceAccountsService( kvStore kvstore.KVStore, userService user.Service, orgService org.Service, - accesscontrolService accesscontrol.Service, + acService accesscontrol.Service, + permissions accesscontrol.ServiceAccountPermissionsService, ) (*ServiceAccountsService, error) { serviceAccountsStore := database.ProvideServiceAccountsStore( cfg, @@ -55,13 +58,14 @@ func ProvideServiceAccountsService( orgService, ) s := &ServiceAccountsService{ - acService: accesscontrolService, + acService: acService, + permissions: permissions, store: serviceAccountsStore, log: log.New("serviceaccounts"), backgroundLog: log.New("serviceaccounts.background"), } - if err := RegisterRoles(accesscontrolService); err != nil { + if err := RegisterRoles(acService); err != nil { s.log.Error("Failed to register roles", "error", err) } @@ -179,7 +183,10 @@ func (sa *ServiceAccountsService) DeleteServiceAccount(ctx context.Context, orgI if err := sa.store.DeleteServiceAccount(ctx, orgID, serviceAccountID); err != nil { return err } - return sa.acService.DeleteUserPermissions(ctx, orgID, serviceAccountID) + if err := sa.acService.DeleteUserPermissions(ctx, orgID, serviceAccountID); err != nil { + return err + } + return sa.permissions.DeleteResourcePermissions(ctx, orgID, fmt.Sprintf("%d", serviceAccountID)) } func (sa *ServiceAccountsService) EnableServiceAccount(ctx context.Context, orgID, serviceAccountID int64, enable bool) error { diff --git a/pkg/services/serviceaccounts/manager/service_test.go b/pkg/services/serviceaccounts/manager/service_test.go index c7175f480c87b..019178c8eebf4 100644 --- a/pkg/services/serviceaccounts/manager/service_test.go +++ b/pkg/services/serviceaccounts/manager/service_test.go @@ -119,7 +119,8 @@ func (f *SecretsCheckerFake) CheckTokens(ctx context.Context) error { func TestProvideServiceAccount_DeleteServiceAccount(t *testing.T) { storeMock := newServiceAccountStoreFake() acSvc := actest.FakeService{} - svc := ServiceAccountsService{acSvc, storeMock, log.New("test"), log.New("background.test"), &SecretsCheckerFake{}, false, 0} + pSvc := &actest.FakePermissionsService{} + svc := ServiceAccountsService{acSvc, pSvc, storeMock, log.NewNopLogger(), log.NewNopLogger(), &SecretsCheckerFake{}, false, 0} testOrgId := 1 t.Run("should create service account", func(t *testing.T) { diff --git a/pkg/services/serviceaccounts/manager/stats_test.go b/pkg/services/serviceaccounts/manager/stats_test.go index e85e9adb12912..43698a1849457 100644 --- a/pkg/services/serviceaccounts/manager/stats_test.go +++ b/pkg/services/serviceaccounts/manager/stats_test.go @@ -14,8 +14,9 @@ import ( func Test_UsageStats(t *testing.T) { acSvc := actest.FakeService{} + pSvc := actest.FakePermissionsService{} storeMock := newServiceAccountStoreFake() - svc := ServiceAccountsService{acSvc, storeMock, log.New("test"), log.New("background-test"), &SecretsCheckerFake{}, true, 5} + svc := ServiceAccountsService{acSvc, &pSvc, storeMock, log.NewNopLogger(), log.NewNopLogger(), &SecretsCheckerFake{}, true, 5} err := svc.DeleteServiceAccount(context.Background(), 1, 1) require.NoError(t, err) diff --git a/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go b/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go new file mode 100644 index 0000000000000..e96f966c86296 --- /dev/null +++ b/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go @@ -0,0 +1,102 @@ +package accesscontrol + +import ( + "fmt" + "strconv" + "strings" + + "xorm.io/xorm" + + "github.com/grafana/grafana/pkg/services/sqlstore/migrator" +) + +const ( + orphanedServiceAccountsPermissions = "delete orphaned service account permissions" +) + +func AddOrphanedMigrations(mg *migrator.Migrator) { + mg.AddMigration(orphanedServiceAccountsPermissions, &orphanedServiceAccountPermissions{}) +} + +var _ migrator.CodeMigration = new(alertingScopeRemovalMigrator) + +type orphanedServiceAccountPermissions struct { + migrator.MigrationBase +} + +func (m *orphanedServiceAccountPermissions) SQL(dialect migrator.Dialect) string { + return CodeMigrationSQL +} + +func (m *orphanedServiceAccountPermissions) Exec(sess *xorm.Session, mg *migrator.Migrator) error { + var idents []string + + // find all permissions that are scopes directly to a service account + err := sess.SQL("SELECT DISTINCT p.identifier FROM permission AS p WHERE p.kind = 'serviceaccounts' AND NOT p.identifier = '*'").Find(&idents) + if err != nil { + return fmt.Errorf("failed to fetch permissinos scoped to service accounts: %w", err) + } + + ids := make([]int64, 0, len(idents)) + for _, id := range idents { + id, err := strconv.ParseInt(id, 10, 64) + if err == nil { + ids = append(ids, id) + } + } + + if len(ids) == 0 { + return nil + } + + return batch(len(ids), batchSize, func(start, end int) error { + return m.exec(sess, mg, ids[start:end]) + }) +} + +func (m *orphanedServiceAccountPermissions) exec(sess *xorm.Session, mg *migrator.Migrator, ids []int64) error { + // get all service accounts from batch + raw := "SELECT u.id FROM " + mg.Dialect.Quote("user") + " AS u WHERE u.is_service_account AND u.id IN(?" + strings.Repeat(",?", len(ids)-1) + ")" + args := make([]any, 0, len(ids)) + for _, id := range ids { + args = append(args, id) + } + + var existingIDs []int64 + err := sess.SQL(raw, args...).Find(&existingIDs) + if err != nil { + return fmt.Errorf("failed to fetch existing service accounts: %w", err) + } + + existing := make(map[int64]struct{}, len(existingIDs)) + for _, id := range existingIDs { + existing[id] = struct{}{} + } + + // filter out orphaned permissions + var orphaned []string + for _, id := range ids { + if _, ok := existing[id]; !ok { + orphaned = append(orphaned, strconv.FormatInt(id, 10)) + } + } + + if len(orphaned) == 0 { + return nil + } + + // delete all orphaned permissions + rawDelete := "DELETE FROM permission AS p WHERE p.kind = 'serviceaccounts' AND p.identifier IN(?" + strings.Repeat(",?", len(orphaned)-1) + ")" + deleteArgs := make([]any, 0, len(orphaned)+1) + deleteArgs = append(deleteArgs, rawDelete) + for _, id := range orphaned { + deleteArgs = append(deleteArgs, id) + } + + _, err = sess.Exec(deleteArgs...) + if err != nil { + return fmt.Errorf("failed to delete orphaned service accounts: %w", err) + } + + return nil +} diff --git a/pkg/services/sqlstore/migrations/migrations.go b/pkg/services/sqlstore/migrations/migrations.go index ae88297da9623..6daea8bb5ba9a 100644 --- a/pkg/services/sqlstore/migrations/migrations.go +++ b/pkg/services/sqlstore/migrations/migrations.go @@ -123,6 +123,8 @@ func (oss *OSSMigrations) AddMigration(mg *Migrator) { accesscontrol.AddManagedFolderAlertingSilencesActionsMigrator(mg) ualert.AddRecordingRuleColumns(mg) + + accesscontrol.AddOrphanedMigrations(mg) } func addStarMigrations(mg *Migrator) { diff --git a/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go b/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go index 1d60a4f7f2858..d51cfacd5a65d 100644 --- a/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go +++ b/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go @@ -35,32 +35,42 @@ func (p *ServiceAccountsSameLoginCrossOrgs) Exec(sess *xorm.Session, mg *migrato var err error switch p.dialect.DriverName() { case migrator.Postgres: - _, err = p.sess.Exec(`UPDATE "user" - SET login = 'sa-' || org_id::text || '-' || - CASE - WHEN login LIKE 'sa-%' THEN SUBSTRING(login FROM 4) - ELSE login - END - WHERE login IS NOT NULL AND is_service_account = true;`, - ) + _, err = p.sess.Exec(` + UPDATE "user" + SET login = 'sa-' || org_id::text || '-' || + CASE + WHEN login LIKE 'sa-%' THEN SUBSTRING(login FROM 4) + ELSE login + END + WHERE login IS NOT NULL + AND is_service_account = true + AND login NOT LIKE 'sa-' || org_id::text || '-%'; + `) case migrator.MySQL: - _, err = p.sess.Exec(`UPDATE user - SET login = CONCAT('sa-', CAST(org_id AS CHAR), '-', - CASE - WHEN login LIKE 'sa-%' THEN SUBSTRING(login, 4) - ELSE login - END) - WHERE login IS NOT NULL AND is_service_account = 1;`, - ) + _, err = p.sess.Exec(` + UPDATE user + SET login = CONCAT('sa-', CAST(org_id AS CHAR), '-', + CASE + WHEN login LIKE 'sa-%' THEN SUBSTRING(login, 4) + ELSE login + END + ) + WHERE login IS NOT NULL + AND is_service_account = 1 + AND login NOT LIKE CONCAT('sa-', org_id, '-%'); + `) case migrator.SQLite: - _, err = p.sess.Exec(`Update ` + p.dialect.Quote("user") + ` - SET login = 'sa-' || CAST(org_id AS TEXT) || '-' || - CASE - WHEN SUBSTR(login, 1, 3) = 'sa-' THEN SUBSTR(login, 4) - ELSE login - END - WHERE login IS NOT NULL AND is_service_account = 1;`, - ) + _, err = p.sess.Exec(` + UPDATE ` + p.dialect.Quote("user") + ` + SET login = 'sa-' || CAST(org_id AS TEXT) || '-' || + CASE + WHEN SUBSTR(login, 1, 3) = 'sa-' THEN SUBSTR(login, 4) + ELSE login + END + WHERE login IS NOT NULL + AND is_service_account = 1 + AND login NOT LIKE 'sa-' || CAST(org_id AS TEXT) || '-%'; + `) default: return fmt.Errorf("dialect not supported: %s", p.dialect) } diff --git a/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go b/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go index 043c98522453a..17330453cce43 100644 --- a/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go +++ b/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go @@ -15,6 +15,9 @@ import ( ) func TestIntegrationServiceAccountMigration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } // Run initial migration to have a working DB x := setupTestDB(t) @@ -211,6 +214,43 @@ func TestIntegrationServiceAccountMigration(t *testing.T) { }, }, }, + { + desc: "avoid reapply of migration", + serviceAccounts: []*user.User{ + { + ID: 11, + UID: "u11", + Name: "sa-1-extsvc-bug", + Login: "sa-1-extsvc-bug", + Email: "sa-1-extsvc-bug@org.com", + OrgID: 1, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + { + ID: 12, + UID: "u12", + Name: "sa-2-extsvc-bug2", + Login: "sa-2-extsvc-bug2", + Email: "sa-2-extsvc-bug2@org.com", + OrgID: 2, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + }, + wantServiceAccounts: []*user.User{ + { + ID: 11, + Login: "sa-1-extsvc-bug", + }, + { + ID: 12, + Login: "sa-2-extsvc-bug2", + }, + }, + }, } for _, tc := range testCases { diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 96ded83193b5b..0293039c46606 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -421,6 +421,8 @@ type Cfg struct { // LiveHAEngine is a type of engine to use to achieve HA with Grafana Live. // Zero value means in-memory single node setup. LiveHAEngine string + // LiveHAPRefix is a prefix for HA engine keys. + LiveHAPrefix string // LiveHAEngineAddress is a connection address for Live HA engine. LiveHAEngineAddress string LiveHAEnginePassword string @@ -1983,6 +1985,7 @@ func (cfg *Cfg) readLiveSettings(iniFile *ini.File) error { default: return fmt.Errorf("unsupported live HA engine type: %s", cfg.LiveHAEngine) } + cfg.LiveHAPrefix = section.Key("ha_prefix").MustString("") cfg.LiveHAEngineAddress = section.Key("ha_engine_address").MustString("127.0.0.1:6379") cfg.LiveHAEnginePassword = section.Key("ha_engine_password").MustString("") diff --git a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go index 86c9f4cf3b0b3..27d52ea5730ea 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go +++ b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go @@ -166,6 +166,7 @@ func buildLogAnalyticsQuery(query backend.DataQuery, dsInfo types.DatasourceInfo timeColumn := "" azureLogAnalyticsTarget := queryJSONModel.AzureLogAnalytics basicLogsQuery := false + basicLogsEnabled := false resultFormat := ParseResultFormat(azureLogAnalyticsTarget.ResultFormat, dataquery.AzureQueryTypeAzureLogAnalytics) @@ -177,8 +178,12 @@ func buildLogAnalyticsQuery(query backend.DataQuery, dsInfo types.DatasourceInfo resources, resourceOrWorkspace := retrieveResources(azureLogAnalyticsTarget) appInsightsQuery = appInsightsRegExp.Match([]byte(resourceOrWorkspace)) + if value, ok := dsInfo.JSONData["basicLogsEnabled"].(bool); ok { + basicLogsEnabled = value + } + if basicLogsQueryFlag { - if meetsBasicLogsCriteria, meetsBasicLogsCriteriaErr := meetsBasicLogsCriteria(resources, fromAlert); meetsBasicLogsCriteriaErr != nil { + if meetsBasicLogsCriteria, meetsBasicLogsCriteriaErr := meetsBasicLogsCriteria(resources, fromAlert, basicLogsEnabled); meetsBasicLogsCriteriaErr != nil { return nil, meetsBasicLogsCriteriaErr } else { basicLogsQuery = meetsBasicLogsCriteria @@ -386,7 +391,7 @@ func addDataLinksToFields(query *AzureLogAnalyticsQuery, azurePortalBaseUrl stri } func addTraceDataLinksToFields(query *AzureLogAnalyticsQuery, azurePortalBaseUrl string, frame *data.Frame, dsInfo types.DatasourceInfo) error { - tracesUrl, err := getTracesQueryUrl(query.Resources, azurePortalBaseUrl) + tracesUrl, err := getTracesQueryUrl(azurePortalBaseUrl) if err != nil { return err } @@ -545,20 +550,12 @@ func getQueryUrl(query string, resources []string, azurePortalUrl string, timeRa return portalUrl, nil } -func getTracesQueryUrl(resources []string, azurePortalUrl string) (string, error) { +func getTracesQueryUrl(azurePortalUrl string) (string, error) { portalUrl := azurePortalUrl portalUrl += "/#view/AppInsightsExtension/DetailsV2Blade/ComponentId~/" - resource := struct { - ResourceId string `json:"ResourceId"` - }{ - resources[0], - } - resourceMarshalled, err := json.Marshal(resource) - if err != nil { - return "", fmt.Errorf("failed to marshal application insights resource: %s", err) - } - portalUrl += url.PathEscape(string(resourceMarshalled)) + resource := "%7B%22ResourceId%22:%22${__data.fields.resource:percentencode}%22%7D" + portalUrl += resource portalUrl += "/DataModel~/" // We're making use of data link variables to select the necessary fields in the frontend diff --git a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go index a02ea2046eb6a..8cc975da67bbd 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go +++ b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go @@ -83,15 +83,6 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { t.Errorf("failed to create fake client") } - dsInfo := types.DatasourceInfo{ - Services: map[string]types.DatasourceService{ - "Azure Monitor": {URL: svr.URL, HTTPClient: client}, - }, - JSONData: map[string]any{ - "azureLogAnalyticsSameAs": false, - }, - } - appInsightsRegExp, err := regexp.Compile("(?i)providers/microsoft.insights/components") if err != nil { t.Error("failed to compile reg: %w", err) @@ -100,6 +91,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { tests := []struct { name string fromAlert bool + basicLogsEnabled bool queryModel backend.DataQuery azureLogAnalyticsQuery *AzureLogAnalyticsQuery Err require.ErrorAssertionFunc @@ -332,8 +324,9 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Err: require.NoError, }, { - name: "Basic Logs query", - fromAlert: false, + name: "Basic Logs query", + fromAlert: false, + basicLogsEnabled: true, queryModel: backend.DataQuery{ JSON: []byte(fmt.Sprintf(`{ "queryType": "Azure Log Analytics", @@ -377,8 +370,9 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Err: require.NoError, }, { - name: "Basic Logs query with multiple resources", - fromAlert: false, + name: "Basic Logs query with multiple resources", + fromAlert: false, + basicLogsEnabled: true, queryModel: backend.DataQuery{ JSON: []byte(fmt.Sprintf(`{ "queryType": "Azure Log Analytics", @@ -399,8 +393,9 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Err: require.Error, }, { - name: "Basic Logs query with non LA workspace resources", - fromAlert: false, + name: "Basic Logs query with non LA workspace resources", + fromAlert: false, + basicLogsEnabled: true, queryModel: backend.DataQuery{ JSON: []byte(fmt.Sprintf(`{ "queryType": "Azure Log Analytics", @@ -421,8 +416,9 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Err: require.Error, }, { - name: "Basic Logs query from alerts", - fromAlert: true, + name: "Basic Logs query from alerts", + fromAlert: true, + basicLogsEnabled: true, queryModel: backend.DataQuery{ JSON: []byte(fmt.Sprintf(`{ "queryType": "Azure Log Analytics", @@ -442,6 +438,30 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { azureLogAnalyticsQuery: nil, Err: require.Error, }, + { + name: "Basic Logs query fails if basicLogsEnabled is set to false", + fromAlert: true, + basicLogsEnabled: false, + queryModel: backend.DataQuery{ + JSON: []byte(fmt.Sprintf(`{ + "queryType": "Azure Log Analytics", + "azureLogAnalytics": { + "resources": ["/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"], + "query": "Perf", + "resultFormat": "%s", + "dashboardTime": true, + "timeColumn": "TimeGenerated", + "basicLogsQuery": true + } + }`, dataquery.ResultFormatTimeSeries)), + RefID: "A", + TimeRange: timeRange, + QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + }, + azureLogAnalyticsQuery: nil, + Err: require.Error, + }, + { name: "Detects App Insights resource queries", fromAlert: false, @@ -524,6 +544,15 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + dsInfo := types.DatasourceInfo{ + Services: map[string]types.DatasourceService{ + "Azure Monitor": {URL: svr.URL, HTTPClient: client}, + }, + JSONData: map[string]any{ + "azureLogAnalyticsSameAs": false, + "basicLogsEnabled": tt.basicLogsEnabled, // Use the value from the current test case + }, + } query, err := buildLogAnalyticsQuery(tt.queryModel, dsInfo, appInsightsRegExp, tt.fromAlert) tt.Err(t, err) if diff := cmp.Diff(tt.azureLogAnalyticsQuery, query); diff != "" { diff --git a/pkg/tsdb/azuremonitor/loganalytics/traces.go b/pkg/tsdb/azuremonitor/loganalytics/traces.go index fbc8e96179913..f944d301081b5 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/traces.go +++ b/pkg/tsdb/azuremonitor/loganalytics/traces.go @@ -114,8 +114,8 @@ func buildTracesQuery(operationId string, parentSpanID *string, traceTypes []str `| extend serviceName = cloud_RoleName` + `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` propertiesQuery := fmt.Sprintf(`| extend tags = %s`, propertiesFunc) - projectClause := `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + projectClause := `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc` return baseQuery + whereClause + parentWhereClause + propertiesStaticQuery + errorProperty + propertiesQuery + filtersClause + projectClause } diff --git a/pkg/tsdb/azuremonitor/loganalytics/traces_test.go b/pkg/tsdb/azuremonitor/loganalytics/traces_test.go index da069c41db461..3e365061c6d3c 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/traces_test.go +++ b/pkg/tsdb/azuremonitor/loganalytics/traces_test.go @@ -135,8 +135,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -150,8 +150,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true trace` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -163,8 +163,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -210,8 +210,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -225,8 +225,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true trace` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -238,8 +238,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -282,8 +282,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -297,8 +297,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,traces` + `| where (operation_Id != '' and operation_Id == '${__data.fields.traceID}') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == '${__data.fields.traceID}')` + @@ -310,8 +310,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"${__data.fields.traceID}\"", @@ -357,8 +357,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -372,8 +372,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,traces` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -385,8 +385,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -435,8 +435,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId in ("test-app-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -451,8 +451,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId in ("test-app-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,traces` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -465,8 +465,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId in ("test-app-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -515,8 +515,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId !in ("test-app-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -531,8 +531,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId !in ("test-app-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,traces` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -545,8 +545,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId !in ("test-app-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -595,8 +595,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId !in ("test-app-id")| where clientId in ("test-client-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -611,8 +611,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId !in ("test-app-id")| where clientId in ("test-client-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,traces` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -625,8 +625,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + `| where appId !in ("test-app-id")| where clientId in ("test-client-id")` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -669,8 +669,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -684,8 +684,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests` + `| where (operation_Id != '' and operation_Id == '${__data.fields.traceID}') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == '${__data.fields.traceID}')` + @@ -697,8 +697,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"${__data.fields.traceID}\"", @@ -744,8 +744,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -759,8 +759,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests` + `| where (operation_Id != '' and operation_Id == 'test-op-id') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'test-op-id')` + @@ -772,8 +772,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union availabilityResults,\n" + "customEvents,\n" + "dependencies,\n" + "exceptions,\n" + "pageViews,\n" + "requests,\n" + "traces\n" + "| where operation_Id == \"test-op-id\"", @@ -861,8 +861,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1"}, TimeRange: timeRange, @@ -875,8 +875,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').customEvents,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').dependencies,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').exceptions,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').pageViews,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').requests` + `| where (operation_Id != '' and operation_Id == 'op-id-multi') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'op-id-multi')` + @@ -887,8 +887,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union *,\n" + "app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,\n" + @@ -937,8 +937,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r2"}, TimeRange: timeRange, @@ -951,8 +951,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').customEvents,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').dependencies,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').exceptions,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').pageViews,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').requests` + `| where (operation_Id != '' and operation_Id == '${__data.fields.traceID}') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == '${__data.fields.traceID}')` + @@ -963,8 +963,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union *,\n" + "app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,\n" + @@ -1016,8 +1016,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r2"}, TimeRange: timeRange, @@ -1030,8 +1030,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').customEvents,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').dependencies,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').exceptions,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').pageViews,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').requests` + `| where (operation_Id != '' and operation_Id == 'op-id-multi') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'op-id-multi')` + @@ -1042,8 +1042,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union *,\n" + "app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,\n" + @@ -1095,8 +1095,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r2"}, TimeRange: timeRange, @@ -1109,8 +1109,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceParentExploreQuery: `set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true availabilityResults,customEvents,dependencies,exceptions,pageViews,requests,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').customEvents,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').dependencies,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').exceptions,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').pageViews,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').requests,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r3').availabilityResults,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r3').customEvents,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r3').dependencies,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r3').exceptions,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r3').pageViews,app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r3').requests` + `| where (operation_Id != '' and operation_Id == 'op-id-non-overlapping') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == 'op-id-non-overlapping')` + @@ -1121,8 +1121,8 @@ func TestBuildAppInsightsQuery(t *testing.T) { `| extend serviceName = cloud_RoleName| extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName)` + `| extend error = todynamic(iff(itemType == "exception", "true", "false"))` + `| extend tags = bag_merge(bag_pack_columns(appId,appName,application_Version,assembly,client_Browser,client_City,client_CountryOrRegion,client_IP,client_Model,client_OS,client_StateOrProvince,client_Type,data,details,duration,error,handledAt,iKey,id,innermostAssembly,innermostMessage,innermostMethod,innermostType,itemCount,itemId,itemType,location,message,method,name,operation_Id,operation_Name,operation_ParentId,operation_SyntheticSource,outerAssembly,outerMessage,outerMethod,outerType,performanceBucket,problemId,resultCode,sdkVersion,session_Id,severityLevel,size,source,success,target,timestamp,type,url,user_AccountId,user_AuthenticatedId,user_Id), customDimensions, customMeasurements)` + - `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp` + - `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId` + + `| project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp, resource = _ResourceId` + + `| project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId, resource` + `| order by startTime asc`, TraceLogsExploreQuery: "union *,\n" + "app('/subscriptions/test-sub/resourcegroups/test-rg/providers/microsoft.insights/components/r2').availabilityResults,\n" + diff --git a/pkg/tsdb/azuremonitor/loganalytics/utils.go b/pkg/tsdb/azuremonitor/loganalytics/utils.go index 5f1a91f24335a..a56c0d7df21f0 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/utils.go +++ b/pkg/tsdb/azuremonitor/loganalytics/utils.go @@ -40,10 +40,11 @@ func AddConfigLinks(frame data.Frame, dl string, title *string) data.Frame { } // Check whether a query should be handled as basic logs query -// 2. resource selected is a workspace -// 3. query is not an alerts query -// 4. number of selected resources is exactly one -func meetsBasicLogsCriteria(resources []string, fromAlert bool) (bool, error) { +// 1. resource selected is a workspace +// 2. query is not an alerts query +// 3. number of selected resources is exactly one +// 4. the ds toggle is set to true +func meetsBasicLogsCriteria(resources []string, fromAlert bool, basicLogsEnabled bool) (bool, error) { if fromAlert { return false, fmt.Errorf("basic Logs queries cannot be used for alerts") } @@ -52,7 +53,11 @@ func meetsBasicLogsCriteria(resources []string, fromAlert bool) (bool, error) { } if !strings.Contains(strings.ToLower(resources[0]), "microsoft.operationalinsights/workspaces") { - return false, fmt.Errorf("basic Logs queries may only be run against Log Analytics workspaces") + return false, fmt.Errorf("basic logs queries may only be run against Log Analytics workspaces") + } + + if !basicLogsEnabled { + return false, fmt.Errorf("basic Logs queries are disabled for this data source") } return true, nil diff --git a/pkg/tsdb/cloud-monitoring/time_series_filter.go b/pkg/tsdb/cloud-monitoring/time_series_filter.go index 8725c7223df98..7642191e0484d 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_filter.go +++ b/pkg/tsdb/cloud-monitoring/time_series_filter.go @@ -37,6 +37,9 @@ func parseTimeSeriesResponse(queryRes *backend.DataResponse, "groupBys": groupBys, }, } + // Ensure the time field is named correctly + timeField := frame.Fields[0] + timeField.Name = data.TimeSeriesTimeFieldName var err error frames, err = appendFrames(frames, series, 0, defaultMetricName, seriesLabels, frame, query) diff --git a/pkg/tsdb/cloud-monitoring/time_series_filter_test.go b/pkg/tsdb/cloud-monitoring/time_series_filter_test.go index 031d40a03471a..c44949e6c5154 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_filter_test.go +++ b/pkg/tsdb/cloud-monitoring/time_series_filter_test.go @@ -11,7 +11,7 @@ import ( "time" "github.com/grafana/grafana-plugin-sdk-go/backend" - sdkdata "github.com/grafana/grafana-plugin-sdk-go/data" + gdata "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery" "github.com/stretchr/testify/assert" @@ -429,7 +429,7 @@ func TestTimeSeriesFilter(t *testing.T) { frames := res.Frames custom, ok := frames[0].Meta.Custom.(map[string]any) require.True(t, ok) - labels, ok := custom["labels"].(sdkdata.Labels) + labels, ok := custom["labels"].(gdata.Labels) require.True(t, ok) assert.Equal(t, "114250375703598695", labels["resource.label.instance_id"]) }) @@ -459,12 +459,12 @@ func TestTimeSeriesFilter(t *testing.T) { require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query", service.logger)) require.NotNil(t, res.Frames[0].Meta) - assert.Equal(t, sdkdata.FrameMeta{ + assert.Equal(t, gdata.FrameMeta{ ExecutedQueryString: "test_query", Custom: map[string]any{ "groupBys": []string{"test_group_by"}, "alignmentPeriod": "", - "labels": sdkdata.Labels{ + "labels": gdata.Labels{ "resource.label.project_id": "grafana-prod", "resource.type": "https_lb_rule", }, @@ -482,12 +482,12 @@ func TestTimeSeriesFilter(t *testing.T) { require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query", service.logger)) require.NotNil(t, res.Frames[0].Meta) - assert.Equal(t, sdkdata.FrameMeta{ + assert.Equal(t, gdata.FrameMeta{ ExecutedQueryString: "test_query", Custom: map[string]any{ "groupBys": []string{"test_group_by"}, "alignmentPeriod": "", - "labels": sdkdata.Labels{ + "labels": gdata.Labels{ "resource.label.project_id": "grafana-demo", "resource.type": "global", }, @@ -505,12 +505,12 @@ func TestTimeSeriesFilter(t *testing.T) { require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query", service.logger)) require.NotNil(t, res.Frames[0].Meta) - assert.Equal(t, sdkdata.FrameMeta{ + assert.Equal(t, gdata.FrameMeta{ ExecutedQueryString: "test_query", Custom: map[string]any{ "groupBys": []string{"test_group_by"}, "alignmentPeriod": "", - "labels": sdkdata.Labels{ + "labels": gdata.Labels{ "resource.label.project_id": "grafana-prod", "resource.type": "https_lb_rule", }, @@ -544,6 +544,22 @@ func TestTimeSeriesFilter(t *testing.T) { assert.Contains(t, value, `zone=monitoring.regex.full_match("us-central1-a~")`) }) }) + + t.Run("time field is appropriately named", func(t *testing.T) { + res := &backend.DataResponse{} + data, err := loadTestFile("./test-data/4-series-response-distribution-explicit.json") + require.NoError(t, err) + query := &cloudMonitoringTimeSeriesList{ + parameters: &dataquery.TimeSeriesList{ + ProjectName: "test-proj", + }, + aliasBy: "", + } + err = query.parseResponse(res, data, "", service.logger) + require.NoError(t, err) + frames := res.Frames + assert.Equal(t, gdata.TimeSeriesTimeFieldName, frames[0].Fields[0].Name) + }) } func loadTestFile(path string) (cloudMonitoringResponse, error) { diff --git a/pkg/tsdb/cloud-monitoring/time_series_query.go b/pkg/tsdb/cloud-monitoring/time_series_query.go index 4a7f29f72069c..5c25900af7c46 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_query.go +++ b/pkg/tsdb/cloud-monitoring/time_series_query.go @@ -75,6 +75,9 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b return err } } + // Ensure the time field is named correctly + timeField := frame.Fields[0] + timeField.Name = data.TimeSeriesTimeFieldName } if len(response.TimeSeriesData) > 0 { dl := timeSeriesQuery.buildDeepLink() diff --git a/pkg/tsdb/cloud-monitoring/time_series_query_test.go b/pkg/tsdb/cloud-monitoring/time_series_query_test.go index 8ddfc183366d3..4097630f43d93 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_query_test.go +++ b/pkg/tsdb/cloud-monitoring/time_series_query_test.go @@ -148,4 +148,26 @@ func TestTimeSeriesQuery(t *testing.T) { query := &cloudMonitoringTimeSeriesQuery{parameters: &dataquery.TimeSeriesQuery{GraphPeriod: strPtr("disabled")}} assert.Equal(t, query.appendGraphPeriod(&backend.QueryDataRequest{Queries: []backend.DataQuery{{}}}), "") }) + + t.Run("time field is appropriately named", func(t *testing.T) { + res := &backend.DataResponse{} + data, err := loadTestFile("./test-data/7-series-response-mql.json") + require.NoError(t, err) + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + query := &cloudMonitoringTimeSeriesQuery{ + parameters: &dataquery.TimeSeriesQuery{ + ProjectName: "test-proj", + Query: "test-query", + }, + aliasBy: "", + timeRange: backend.TimeRange{ + From: fromStart, + To: fromStart.Add(34 * time.Minute), + }, + } + err = query.parseResponse(res, data, "", service.logger) + require.NoError(t, err) + frames := res.Frames + assert.Equal(t, gdata.TimeSeriesTimeFieldName, frames[0].Fields[0].Name) + }) } diff --git a/public/app/features/alerting/unified/Silences.test.tsx b/public/app/features/alerting/unified/Silences.test.tsx index d3ce1e15afb1d..34641b3f5b93b 100644 --- a/public/app/features/alerting/unified/Silences.test.tsx +++ b/public/app/features/alerting/unified/Silences.test.tsx @@ -305,6 +305,14 @@ describe('Silence create/edit', () => { TEST_TIMEOUT ); + it('works when previewing alerts with spaces in label name', async () => { + renderSilences(`${baseUrlPath}?alertmanager=${GRAFANA_RULES_SOURCE_NAME}`); + + await enterSilenceLabel(0, 'label with spaces', MatcherOperator.equal, 'value with spaces'); + + expect((await screen.findAllByTestId('row'))[0]).toBeInTheDocument(); + }); + it('shows an error when existing silence cannot be found', async () => { renderSilences('/alerting/silence/foo-bar/edit'); diff --git a/public/app/features/alerting/unified/api/alertmanagerApi.ts b/public/app/features/alerting/unified/api/alertmanagerApi.ts index f519acd1259fd..2d29b950c9be5 100644 --- a/public/app/features/alerting/unified/api/alertmanagerApi.ts +++ b/public/app/features/alerting/unified/api/alertmanagerApi.ts @@ -58,7 +58,9 @@ export const alertmanagerApi = alertingApi.injectEndpoints({ // TODO Add support for active, silenced, inhibited, unprocessed filters const filterMatchers = filter?.matchers ?.filter((matcher) => matcher.name && matcher.value) - .map((matcher) => `${matcher.name}${matcherToOperator(matcher)}${wrapWithQuotes(matcher.value)}`); + .map( + (matcher) => `${wrapWithQuotes(matcher.name)}${matcherToOperator(matcher)}${wrapWithQuotes(matcher.value)}` + ); const { silenced, inhibited, unprocessed, active } = filter || {}; diff --git a/public/app/features/alerting/unified/components/rule-editor/QueryWrapper.tsx b/public/app/features/alerting/unified/components/rule-editor/QueryWrapper.tsx index c449e07b446c2..e5e6014e2eca7 100644 --- a/public/app/features/alerting/unified/components/rule-editor/QueryWrapper.tsx +++ b/public/app/features/alerting/unified/components/rule-editor/QueryWrapper.tsx @@ -14,6 +14,7 @@ import { } from '@grafana/data'; import { DataQuery } from '@grafana/schema'; import { GraphThresholdsStyleMode, Icon, InlineField, Input, Tooltip, useStyles2, Stack } from '@grafana/ui'; +import { logInfo } from 'app/features/alerting/unified/Analytics'; import { QueryEditorRow } from 'app/features/query/components/QueryEditorRow'; import { AlertQuery } from 'app/types/unified-alerting-dto'; @@ -81,6 +82,19 @@ export const QueryWrapper = ({ ...cloneDeep(query.model), }; + if (queryWithDefaults.datasource && queryWithDefaults.datasource?.uid !== query.datasourceUid) { + logInfo('rule query datasource and datasourceUid mismatch', { + queryModelDatasourceUid: queryWithDefaults.datasource?.uid || '', + queryDatasourceUid: query.datasourceUid, + datasourceType: query.model.datasource?.type || 'unknown type', + }); + // There are occasions when the rule query model datasource UID and the datasourceUid do not match + // It's unclear as to why this happens, but we need better visibility on why this happens, + // so we log when it does, and make the query model datasource UID match the datasource UID + // We already elsewhere work under the assumption that the datasource settings are fetched from the datasourceUid property + queryWithDefaults.datasource.uid = query.datasourceUid; + } + function SelectingDataSourceTooltip() { const styles = useStyles2(getStyles); return ( diff --git a/public/app/features/alerting/unified/components/silences/SilencedInstancesPreview.tsx b/public/app/features/alerting/unified/components/silences/SilencedInstancesPreview.tsx index f527bf76a3d15..f2452debfae4f 100644 --- a/public/app/features/alerting/unified/components/silences/SilencedInstancesPreview.tsx +++ b/public/app/features/alerting/unified/components/silences/SilencedInstancesPreview.tsx @@ -66,7 +66,7 @@ export const SilencedInstancesPreview = ({ amSourceName, matchers: inputMatchers if (isError) { return ( - Error occured when generating preview of affected alerts. Are your matchers valid? + Error occurred when generating preview of affected alerts. Are your matchers valid? ); } diff --git a/public/app/features/alerting/unified/hooks/useCombinedRule.ts b/public/app/features/alerting/unified/hooks/useCombinedRule.ts index 705ee589e62e5..006c3ecefc9b1 100644 --- a/public/app/features/alerting/unified/hooks/useCombinedRule.ts +++ b/public/app/features/alerting/unified/hooks/useCombinedRule.ts @@ -204,12 +204,7 @@ export function useCombinedRule({ ruleIdentifier }: { ruleIdentifier: RuleIdenti const [ fetchRulerRuleGroup, - { - currentData: rulerRuleGroup, - isLoading: isLoadingRulerGroup, - error: rulerRuleGroupError, - isUninitialized: rulerRuleGroupUninitialized, - }, + { currentData: rulerRuleGroup, isLoading: isLoadingRulerGroup, error: rulerRuleGroupError }, ] = alertRuleApi.endpoints.rulerRuleGroup.useLazyQuery(); useEffect(() => { @@ -251,7 +246,7 @@ export function useCombinedRule({ ruleIdentifier }: { ruleIdentifier: RuleIdenti }, [ruleIdentifier, ruleSourceName, promRuleNs, rulerRuleGroup, ruleSource]); return { - loading: isLoadingDsFeatures || isLoadingPromRules || isLoadingRulerGroup || rulerRuleGroupUninitialized, + loading: isLoadingDsFeatures || isLoadingPromRules || isLoadingRulerGroup, error: ruleLocationError ?? promRuleNsError ?? rulerRuleGroupError, result: rule, }; diff --git a/public/app/features/alerting/unified/mocks/server/handlers/alertmanagers.ts b/public/app/features/alerting/unified/mocks/server/handlers/alertmanagers.ts index e789139f65bef..21805947c664c 100644 --- a/public/app/features/alerting/unified/mocks/server/handlers/alertmanagers.ts +++ b/public/app/features/alerting/unified/mocks/server/handlers/alertmanagers.ts @@ -10,8 +10,37 @@ export const grafanaAlertingConfigurationStatusHandler = ( response = defaultGrafanaAlertingConfigurationStatusResponse ) => http.get('/api/v1/ngalert', () => HttpResponse.json(response)); +const getInvalidMatcher = (matchers: string[]) => { + return matchers.find((matcher) => { + const split = matcher.split('='); + try { + // Try and parse as JSON, as this will fail if + // we've failed to wrap the label value in quotes + // (e.g. `foo space` can't be parsed, but `"foo space"` can) + JSON.parse(split[0]); + return false; + } catch (e) { + return true; + } + }); +}; + export const alertmanagerAlertsListHandler = () => - http.get<{ datasourceUid: string }>('/api/alertmanager/:datasourceUid/api/v2/alerts', ({ params }) => { + http.get<{ datasourceUid: string }>('/api/alertmanager/:datasourceUid/api/v2/alerts', ({ params, request }) => { + const matchers = new URL(request.url).searchParams.getAll('filter'); + + const invalidMatcher = getInvalidMatcher(matchers); + + if (invalidMatcher) { + return HttpResponse.json( + { + message: `bad matcher format: ${invalidMatcher}: unable to retrieve alerts`, + traceID: '', + }, + { status: 400 } + ); + } + if (params.datasourceUid === MOCK_DATASOURCE_UID_BROKEN_ALERTMANAGER) { return HttpResponse.json({ traceId: '' }, { status: 502 }); } diff --git a/public/app/features/explore/TraceView/components/utils/filter-spans.tsx b/public/app/features/explore/TraceView/components/utils/filter-spans.tsx index c141ca43ce76c..ca3482f63bd4f 100644 --- a/public/app/features/explore/TraceView/components/utils/filter-spans.tsx +++ b/public/app/features/explore/TraceView/components/utils/filter-spans.tsx @@ -76,7 +76,7 @@ export function getQueryMatches(query: string, spans: TraceSpan[] | TNil) { const isTextInKeyValues = (kvs: TraceKeyValuePair[]) => kvs ? kvs.some((kv) => { - return isTextInQuery(queryParts, kv.key) || isTextInQuery(queryParts, kv.value.toString()); + return isTextInQuery(queryParts, kv.key) || isTextInQuery(queryParts, getStringValue(kv.value)); }) : false; @@ -157,13 +157,17 @@ const checkKeyForMatch = (tagKey: string, key: string) => { }; const checkKeyAndValueForMatch = (tag: Tag, kv: TraceKeyValuePair) => { - return tag.key === kv.key.toString() && tag.value === kv.value.toString() ? true : false; + return tag.key === kv.key && tag.value === getStringValue(kv.value); }; const getReturnValue = (operator: string, found: boolean) => { return operator === '=' ? found : !found; }; +const getStringValue = (value: string | number | boolean | undefined) => { + return value ? value.toString() : ''; +}; + const getServiceNameMatches = (spans: TraceSpan[], searchProps: SearchProps) => { return spans.filter((span: TraceSpan) => { return searchProps.serviceNameOperator === '=' diff --git a/public/app/features/variables/pickers/OptionsPicker/reducer.test.ts b/public/app/features/variables/pickers/OptionsPicker/reducer.test.ts index 24bfeb094a915..2b6a26cd0ce25 100644 --- a/public/app/features/variables/pickers/OptionsPicker/reducer.test.ts +++ b/public/app/features/variables/pickers/OptionsPicker/reducer.test.ts @@ -860,6 +860,47 @@ describe('optionsPickerReducer', () => { }); }); + describe('when searching non-latin chars', () => { + it('should skip fuzzy matching and fall back to substring', () => { + const searchQuery = '水'; + + const options: VariableOption[] = 'A水'.split(' ').map((v) => ({ + selected: false, + text: v, + value: v, + })); + + const expect: VariableOption[] = [ + { + selected: false, + text: '> ' + searchQuery, + value: searchQuery, + }, + ].concat( + 'A水'.split(' ').map((v) => ({ + selected: false, + text: v, + value: v, + })) + ); + + const { initialState } = getVariableTestContext({ + queryValue: searchQuery, + }); + + reducerTester() + .givenReducer(optionsPickerReducer, cloneDeep(initialState)) + .whenActionIsDispatched(updateOptionsAndFilter(options)) + .thenStateShouldEqual({ + ...cloneDeep(initialState), + options: expect, + selectedValues: [], + queryValue: searchQuery, + highlightIndex: 1, + }); + }); + }); + describe('when large data for updateOptionsFromSearch is dispatched and variable has searchFilter', () => { it('then state should be correct', () => { const searchQuery = '__searchFilter'; diff --git a/public/app/features/variables/pickers/OptionsPicker/reducer.ts b/public/app/features/variables/pickers/OptionsPicker/reducer.ts index 090517d800b39..5525babc55b42 100644 --- a/public/app/features/variables/pickers/OptionsPicker/reducer.ts +++ b/public/app/features/variables/pickers/OptionsPicker/reducer.ts @@ -8,6 +8,9 @@ import { applyStateChanges } from '../../../../core/utils/applyStateChanges'; import { ALL_VARIABLE_VALUE } from '../../constants'; import { isMulti, isQuery } from '../../guard'; +// https://catonmat.net/my-favorite-regex :) +const REGEXP_NON_ASCII = /[^ -~]/gm; + export interface ToggleOption { option?: VariableOption; forceSelect: boolean; @@ -251,6 +254,8 @@ const optionsPickerSlice = createSlice({ if (needle === '') { opts = action.payload; + } else if (REGEXP_NON_ASCII.test(needle)) { + opts = action.payload.filter((o) => o.text.includes(needle)); } else { // with current API, not seeing a way to cache this on state using action.payload's uniqueness // since it's recreated and includes selected state on each item :( diff --git a/public/app/plugins/datasource/azuremonitor/azure_monitor/azure_monitor_datasource.ts b/public/app/plugins/datasource/azuremonitor/azure_monitor/azure_monitor_datasource.ts index 7b24a99eccd76..6b4eb4110d6d7 100644 --- a/public/app/plugins/datasource/azuremonitor/azure_monitor/azure_monitor_datasource.ts +++ b/public/app/plugins/datasource/azuremonitor/azure_monitor/azure_monitor_datasource.ts @@ -231,14 +231,15 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend) => { diff --git a/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.test.ts b/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.test.ts index fddceba791867..f6d0f53e92c40 100644 --- a/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.test.ts +++ b/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.test.ts @@ -142,6 +142,38 @@ describe('AzureMonitorUrlBuilder', () => { '/subscriptions/sub/resource-uri/resource/providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview®ion=global' ); }); + + it('builds a getMetricNamesnamespace url with a specific region', () => { + const url = UrlBuilder.buildAzureMonitorGetMetricNamespacesUrl( + '', + '2017-05-01-preview', + { + resourceUri: '/subscriptions/sub/resource-uri/resource', + }, + false, + templateSrv, + 'testregion' + ); + expect(url).toBe( + '/subscriptions/sub/resource-uri/resource/providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview®ion=testregion' + ); + }); + + it('builds a getMetricNamesnamespace url with a specific region (overriding global)', () => { + const url = UrlBuilder.buildAzureMonitorGetMetricNamespacesUrl( + '', + '2017-05-01-preview', + { + resourceUri: '/subscriptions/sub/resource-uri/resource', + }, + true, + templateSrv, + 'testregion' + ); + expect(url).toBe( + '/subscriptions/sub/resource-uri/resource/providers/microsoft.insights/metricNamespaces?api-version=2017-05-01-preview®ion=testregion' + ); + }); }); describe('when a resource uri and metric namespace is provided', () => { diff --git a/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.ts b/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.ts index 15230fb526ec8..295fcb85ea3c3 100644 --- a/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.ts +++ b/public/app/plugins/datasource/azuremonitor/azure_monitor/url_builder.ts @@ -51,7 +51,8 @@ export default class UrlBuilder { apiVersion: string, query: GetMetricNamespacesQuery, globalRegion: boolean, - templateSrv: TemplateSrv + templateSrv: TemplateSrv, + region?: string ) { let resourceUri: string; @@ -68,7 +69,7 @@ export default class UrlBuilder { } return `${baseUrl}${resourceUri}/providers/microsoft.insights/metricNamespaces?api-version=${apiVersion}${ - globalRegion ? '®ion=global' : '' + region ? `®ion=${region}` : globalRegion ? '®ion=global' : '' }`; } diff --git a/public/app/plugins/datasource/azuremonitor/components/LogsQueryEditor/LogsQueryEditor.tsx b/public/app/plugins/datasource/azuremonitor/components/LogsQueryEditor/LogsQueryEditor.tsx index 77da2ba779364..83fddc94b0493 100644 --- a/public/app/plugins/datasource/azuremonitor/components/LogsQueryEditor/LogsQueryEditor.tsx +++ b/public/app/plugins/datasource/azuremonitor/components/LogsQueryEditor/LogsQueryEditor.tsx @@ -19,9 +19,7 @@ import QueryField from './QueryField'; import { TimeManagement } from './TimeManagement'; import { setBasicLogsQuery, setFormatAs, setKustoQuery } from './setQueryValue'; import useMigrations from './useMigrations'; -import { calculateTimeRange, shouldShowBasicLogsToggle } from './utils'; - -const MAX_DATA_RETENTION_DAYS = 8; // limit is only for basic logs +import { shouldShowBasicLogsToggle } from './utils'; interface LogsQueryEditorProps { query: AzureMonitorQuery; @@ -52,7 +50,6 @@ const LogsQueryEditor = ({ const [showBasicLogsToggle, setShowBasicLogsToggle] = useState( shouldShowBasicLogsToggle(query.azureLogAnalytics?.resources || [], basicLogsEnabled) ); - const [showDataRetentionWarning, setShowDataRetentionWarning] = useState(false); const [dataIngestedWarning, setDataIngestedWarning] = useState(null); const templateSrv = getTemplateSrv(); const from = templateSrv?.replace('$__from'); @@ -96,17 +93,6 @@ const LogsQueryEditor = ({ } }, [basicLogsEnabled, onChange, query, showBasicLogsToggle]); - useEffect(() => { - const timeRange = calculateTimeRange(parseInt(from, 10), parseInt(to, 10)); - // Basic logs data retention is fixed at 8 days - // need to add this check to make user aware of this limitation in case they have selected a longer time range - if (showBasicLogsToggle && query.azureLogAnalytics?.basicLogsQuery && timeRange > MAX_DATA_RETENTION_DAYS) { - setShowDataRetentionWarning(true); - } else { - setShowDataRetentionWarning(false); - } - }, [query.azureLogAnalytics?.basicLogsQuery, showBasicLogsToggle, from, to]); - useEffect(() => { const getBasicLogsUsage = async (query: AzureMonitorQuery) => { try { @@ -247,13 +233,6 @@ const LogsQueryEditor = ({ - {showDataRetentionWarning && ( - - - Data retention for Basic Logs is fixed at eight days. You will only see data within this timeframe. - - - )} ); }; diff --git a/public/app/plugins/datasource/azuremonitor/package.json b/public/app/plugins/datasource/azuremonitor/package.json index 1c09cb36d0954..d83290b3f4e21 100644 --- a/public/app/plugins/datasource/azuremonitor/package.json +++ b/public/app/plugins/datasource/azuremonitor/package.json @@ -2,14 +2,14 @@ "name": "@grafana-plugins/grafana-azure-monitor-datasource", "description": "Grafana data source for Azure Monitor", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", + "@grafana/data": "11.1.8", "@grafana/experimental": "1.7.11", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "@kusto/monaco-kusto": "^10.0.0", "fast-deep-equal": "^3.1.3", "i18next": "^23.0.0", @@ -25,8 +25,8 @@ "tslib": "2.6.3" }, "devDependencies": { - "@grafana/e2e-selectors": "11.1.5", - "@grafana/plugin-configs": "11.1.5", + "@grafana/e2e-selectors": "11.1.8", + "@grafana/plugin-configs": "11.1.8", "@testing-library/dom": "10.0.0", "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", diff --git a/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.test.ts b/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.test.ts index de358322d4333..85cdc304c3723 100644 --- a/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.test.ts +++ b/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.test.ts @@ -225,14 +225,14 @@ describe('AzureMonitor resourcePickerData', () => { it('makes 1 call to ARG with the correct path and query arguments', async () => { const mockResponse = createARGResourcesResponse(); const { resourcePickerData, postResource } = createResourcePickerData([mockResponse]); - await resourcePickerData.getResourcesForResourceGroup('dev', 'logs'); + await resourcePickerData.getResourcesForResourceGroup('/subscription/sub1/resourceGroups/dev', 'logs'); expect(postResource).toBeCalledTimes(1); const firstCall = postResource.mock.calls[0]; const [path, postBody] = firstCall; expect(path).toEqual('resourcegraph/providers/Microsoft.ResourceGraph/resources?api-version=2021-03-01'); expect(postBody.query).toContain('resources'); - expect(postBody.query).toContain('where id hasprefix "dev"'); + expect(postBody.query).toContain('where id hasprefix "/subscription/sub1/resourceGroups/dev/"'); }); it('returns formatted resources', async () => { @@ -312,9 +312,33 @@ describe('AzureMonitor resourcePickerData', () => { }, ], }; - const { resourcePickerData, postResource } = createResourcePickerData([mockSubscriptionsResponse, mockResponse]); + const { resourcePickerData, postResource, mockDatasource } = createResourcePickerData([ + mockSubscriptionsResponse, + mockResponse, + ]); const formattedResults = await resourcePickerData.search('vmname', 'metrics'); - expect(postResource).toBeCalledTimes(2); + expect(postResource).toHaveBeenCalledTimes(2); + expect(mockDatasource.azureMonitorDatasource.getMetricNamespaces).toHaveBeenCalledWith( + { + resourceUri: '/subscriptions/1', + }, + false, + 'westeurope' + ); + expect(mockDatasource.azureMonitorDatasource.getMetricNamespaces).toHaveBeenCalledWith( + { + resourceUri: '/subscriptions/1', + }, + false, + 'eastus' + ); + expect(mockDatasource.azureMonitorDatasource.getMetricNamespaces).toHaveBeenCalledWith( + { + resourceUri: '/subscriptions/1', + }, + false, + 'japaneast' + ); const secondCall = postResource.mock.calls[1]; const [_, postBody] = secondCall; expect(postBody.query).not.toContain('union resourcecontainers'); diff --git a/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.ts b/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.ts index 92c051c97c963..80282cb71a79c 100644 --- a/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.ts +++ b/public/app/plugins/datasource/azuremonitor/resourcePicker/resourcePickerData.ts @@ -1,5 +1,3 @@ -import { uniq } from 'lodash'; - import { DataSourceInstanceSettings } from '@grafana/data'; import { DataSourceWithBackend, reportInteraction } from '@grafana/runtime'; @@ -93,7 +91,7 @@ export default class ResourcePickerData extends DataSourceWithBackend { + // We can use subscription ID for the filtering here as they're unique const query = ` resources | join kind=inner ( @@ -232,12 +231,15 @@ export default class ResourcePickerData extends DataSourceWithBackend { + // We use resource group URI for the filtering here because resource group names are not unique across subscriptions + // We also add a slash at the end of the resource group URI to ensure we do not pull resources from a resource group + // that has a similar naming prefix e.g. resourceGroup1 and resourceGroup10 const { data: response } = await this.makeResourceGraphRequest(` resources - | where id hasprefix "${resourceGroupId}" + | where id hasprefix "${resourceGroupUri}/" ${await this.filterByType(type)} `); @@ -359,28 +361,40 @@ export default class ResourcePickerData extends DataSourceWithBackend = new Set(); + // We make use of these three regions as they *should* contain every possible namespace + const regions = ['westeurope', 'eastus', 'japaneast']; + const getNamespacesForRegion = async (region: string) => { const namespaces = await this.azureMonitorDatasource.getMetricNamespaces( { - resourceUri: `/subscriptions/${subscription.id}`, + // We only need to run this request against the first available subscription + resourceUri: `/subscriptions/${subscriptions[0].id}`, }, - true + false, + region ); if (namespaces) { - const namespaceVals = namespaces.map((namespace) => `"${namespace.value.toLocaleLowerCase()}"`); - supportedMetricNamespaces = supportedMetricNamespaces.concat(namespaceVals); + for (const namespace of namespaces) { + supportedMetricNamespaces.add(`"${namespace.value.toLocaleLowerCase()}"`); + } } - } + }; - if (supportedMetricNamespaces.length === 0) { + const promises = regions.map((region) => getNamespacesForRegion(region)); + await Promise.all(promises); + + if (supportedMetricNamespaces.size === 0) { throw new Error( 'Unable to resolve a list of valid metric namespaces. Validate the datasource configuration is correct and required permissions have been granted for all subscriptions. Grafana requires at least the Reader role to be assigned.' ); } - this.supportedMetricNamespaces = uniq( - supportedMetricNamespaces.concat(resourceTypes.map((namespace) => `"${namespace}"`)) - ).join(','); + + resourceTypes.forEach((namespace) => { + supportedMetricNamespaces.add(`"${namespace}"`); + }); + + this.supportedMetricNamespaces = Array.from(supportedMetricNamespaces).join(','); } parseRows(resources: Array): ResourceRow[] { diff --git a/public/app/plugins/datasource/cloud-monitoring/package.json b/public/app/plugins/datasource/cloud-monitoring/package.json index b91d1f07481b9..106c9caf76cf3 100644 --- a/public/app/plugins/datasource/cloud-monitoring/package.json +++ b/public/app/plugins/datasource/cloud-monitoring/package.json @@ -2,15 +2,15 @@ "name": "@grafana-plugins/stackdriver", "description": "Grafana data source for Google Cloud Monitoring", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", + "@grafana/data": "11.1.8", "@grafana/experimental": "1.7.11", "@grafana/google-sdk": "0.1.2", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "debounce-promise": "3.1.2", "fast-deep-equal": "^3.1.3", "i18next": "^23.0.0", @@ -26,8 +26,8 @@ "tslib": "2.6.3" }, "devDependencies": { - "@grafana/e2e-selectors": "11.1.5", - "@grafana/plugin-configs": "11.1.5", + "@grafana/e2e-selectors": "11.1.8", + "@grafana/plugin-configs": "11.1.8", "@testing-library/dom": "10.0.0", "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", diff --git a/public/app/plugins/datasource/grafana-postgresql-datasource/package.json b/public/app/plugins/datasource/grafana-postgresql-datasource/package.json index 32068c606f74f..88d9dd331c86f 100644 --- a/public/app/plugins/datasource/grafana-postgresql-datasource/package.json +++ b/public/app/plugins/datasource/grafana-postgresql-datasource/package.json @@ -2,22 +2,22 @@ "name": "@grafana-plugins/grafana-postgresql-datasource", "description": "PostgreSQL data source plugin", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", + "@grafana/data": "11.1.8", "@grafana/experimental": "1.7.11", - "@grafana/runtime": "11.1.5", - "@grafana/sql": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/sql": "11.1.8", + "@grafana/ui": "11.1.8", "lodash": "4.17.21", "react": "18.2.0", "rxjs": "7.8.1", "tslib": "2.6.3" }, "devDependencies": { - "@grafana/e2e-selectors": "11.1.5", - "@grafana/plugin-configs": "11.1.5", + "@grafana/e2e-selectors": "11.1.8", + "@grafana/plugin-configs": "11.1.8", "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", "@types/jest": "29.5.12", diff --git a/public/app/plugins/datasource/grafana-pyroscope-datasource/package.json b/public/app/plugins/datasource/grafana-pyroscope-datasource/package.json index e6e2b593cb2cd..b8d9443b9f468 100644 --- a/public/app/plugins/datasource/grafana-pyroscope-datasource/package.json +++ b/public/app/plugins/datasource/grafana-pyroscope-datasource/package.json @@ -2,13 +2,13 @@ "name": "@grafana-plugins/grafana-pyroscope-datasource", "description": "Continuous profiling for analysis of CPU and memory usage, down to the line number and throughout time. Saving infrastructure cost, improving performance, and increasing reliability.", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "fast-deep-equal": "^3.1.3", "lodash": "4.17.21", "monaco-editor": "0.34.1", @@ -20,7 +20,7 @@ "tslib": "2.6.3" }, "devDependencies": { - "@grafana/plugin-configs": "11.1.5", + "@grafana/plugin-configs": "11.1.8", "@testing-library/dom": "10.0.0", "@testing-library/jest-dom": "6.4.2", "@testing-library/react": "15.0.2", diff --git a/public/app/plugins/datasource/grafana-testdata-datasource/package.json b/public/app/plugins/datasource/grafana-testdata-datasource/package.json index 387eb6cffeb90..aa8afe46c1e18 100644 --- a/public/app/plugins/datasource/grafana-testdata-datasource/package.json +++ b/public/app/plugins/datasource/grafana-testdata-datasource/package.json @@ -2,14 +2,14 @@ "name": "@grafana-plugins/grafana-testdata-datasource", "description": "Generates test data in different forms", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", + "@grafana/data": "11.1.8", "@grafana/experimental": "1.7.11", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "d3-random": "^3.0.1", "lodash": "4.17.21", "micro-memoize": "^4.1.2", @@ -22,8 +22,8 @@ "uuid": "9.0.1" }, "devDependencies": { - "@grafana/e2e-selectors": "11.1.5", - "@grafana/plugin-configs": "11.1.5", + "@grafana/e2e-selectors": "11.1.8", + "@grafana/plugin-configs": "11.1.8", "@testing-library/dom": "10.0.0", "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", diff --git a/public/app/plugins/datasource/jaeger/package.json b/public/app/plugins/datasource/jaeger/package.json index d66f3bb0044f2..df81a354c05c3 100644 --- a/public/app/plugins/datasource/jaeger/package.json +++ b/public/app/plugins/datasource/jaeger/package.json @@ -2,7 +2,7 @@ "name": "@grafana-plugins/jaeger", "description": "Jaeger plugin for Grafana", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", "@grafana/data": "workspace:*", diff --git a/public/app/plugins/datasource/loki/LanguageProvider.test.ts b/public/app/plugins/datasource/loki/LanguageProvider.test.ts index 5a63f43c2e2c3..9371a451e3762 100644 --- a/public/app/plugins/datasource/loki/LanguageProvider.test.ts +++ b/public/app/plugins/datasource/loki/LanguageProvider.test.ts @@ -268,6 +268,24 @@ describe('Language completion provider', () => { end: expect.any(Number), }); }); + + it('should use a single promise to resolve values', async () => { + const datasource = setup({ testkey: ['label1_val1', 'label1_val2'], label2: [] }); + const provider = await getLanguageProvider(datasource); + const requestSpy = jest.spyOn(provider, 'request'); + const promise1 = provider.fetchLabelValues('testkey'); + const promise2 = provider.fetchLabelValues('testkey'); + const promise3 = provider.fetchLabelValues('testkeyNOPE'); + expect(requestSpy).toHaveBeenCalledTimes(2); + + const values1 = await promise1; + const values2 = await promise2; + const values3 = await promise3; + + expect(values1).toStrictEqual(values2); + expect(values2).not.toStrictEqual(values3); + expect(requestSpy).toHaveBeenCalledTimes(2); + }); }); describe('fetchLabels', () => { diff --git a/public/app/plugins/datasource/loki/LanguageProvider.ts b/public/app/plugins/datasource/loki/LanguageProvider.ts index b833308422db2..20f79dfa3757d 100644 --- a/public/app/plugins/datasource/loki/LanguageProvider.ts +++ b/public/app/plugins/datasource/loki/LanguageProvider.ts @@ -31,6 +31,7 @@ export default class LokiLanguageProvider extends LanguageProvider { */ private seriesCache = new LRUCache>({ max: 10 }); private labelsCache = new LRUCache({ max: 10 }); + private labelsPromisesCache = new LRUCache>({ max: 10 }); constructor(datasource: LokiDatasource, initialValues?: any) { super(); @@ -272,18 +273,34 @@ export default class LokiLanguageProvider extends LanguageProvider { const cacheKey = this.generateCacheKey(url, start, end, paramCacheKey); - let labelValues = this.labelsCache.get(cacheKey); - if (!labelValues) { - // Clear value when requesting new one. Empty object being truthy also makes sure we don't request twice. - this.labelsCache.set(cacheKey, []); - const res = await this.request(url, params); - if (Array.isArray(res)) { - labelValues = res.slice().sort(); - this.labelsCache.set(cacheKey, labelValues); - } + // Values in cache, return + const labelValues = this.labelsCache.get(cacheKey); + if (labelValues) { + return labelValues; + } + + // Promise in cache, return + let labelValuesPromise = this.labelsPromisesCache.get(cacheKey); + if (labelValuesPromise) { + return labelValuesPromise; } - return labelValues ?? []; + labelValuesPromise = new Promise(async (resolve) => { + try { + const data = await this.request(url, params); + if (Array.isArray(data)) { + const labelValues = data.slice().sort(); + this.labelsCache.set(cacheKey, labelValues); + this.labelsPromisesCache.delete(cacheKey); + resolve(labelValues); + } + } catch (error) { + console.error(error); + resolve([]); + } + }); + this.labelsPromisesCache.set(cacheKey, labelValuesPromise); + return labelValuesPromise; } /** diff --git a/public/app/plugins/datasource/mysql/package.json b/public/app/plugins/datasource/mysql/package.json index 8d8dcfa9d7c1e..3175c8dce2b45 100644 --- a/public/app/plugins/datasource/mysql/package.json +++ b/public/app/plugins/datasource/mysql/package.json @@ -2,22 +2,22 @@ "name": "@grafana-plugins/mysql", "description": "MySQL data source plugin", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", + "@grafana/data": "11.1.8", "@grafana/experimental": "1.7.11", - "@grafana/runtime": "11.1.5", - "@grafana/sql": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/runtime": "11.1.8", + "@grafana/sql": "11.1.8", + "@grafana/ui": "11.1.8", "lodash": "4.17.21", "react": "18.2.0", "rxjs": "7.8.1", "tslib": "2.6.3" }, "devDependencies": { - "@grafana/e2e-selectors": "11.1.5", - "@grafana/plugin-configs": "11.1.5", + "@grafana/e2e-selectors": "11.1.8", + "@grafana/plugin-configs": "11.1.8", "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", "@types/jest": "29.5.12", diff --git a/public/app/plugins/datasource/parca/package.json b/public/app/plugins/datasource/parca/package.json index d3417d5c23821..677b489013534 100644 --- a/public/app/plugins/datasource/parca/package.json +++ b/public/app/plugins/datasource/parca/package.json @@ -2,13 +2,13 @@ "name": "@grafana-plugins/parca", "description": "Continuous profiling for analysis of CPU and memory usage, down to the line number and throughout time. Saving infrastructure cost, improving performance, and increasing reliability.", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", - "@grafana/data": "11.1.5", - "@grafana/runtime": "11.1.5", - "@grafana/schema": "11.1.5", - "@grafana/ui": "11.1.5", + "@grafana/data": "11.1.8", + "@grafana/runtime": "11.1.8", + "@grafana/schema": "11.1.8", + "@grafana/ui": "11.1.8", "lodash": "4.17.21", "monaco-editor": "0.34.1", "react": "18.2.0", @@ -18,7 +18,7 @@ "tslib": "2.6.3" }, "devDependencies": { - "@grafana/plugin-configs": "11.1.5", + "@grafana/plugin-configs": "11.1.8", "@testing-library/dom": "10.0.0", "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", diff --git a/public/app/plugins/datasource/tempo/package.json b/public/app/plugins/datasource/tempo/package.json index 661a0b7182de2..8919db47093db 100644 --- a/public/app/plugins/datasource/tempo/package.json +++ b/public/app/plugins/datasource/tempo/package.json @@ -2,7 +2,7 @@ "name": "@grafana-plugins/tempo", "description": "Grafana plugin for the Tempo data source.", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", "@grafana/data": "workspace:*", @@ -39,7 +39,7 @@ "uuid": "9.0.1" }, "devDependencies": { - "@grafana/plugin-configs": "11.1.5", + "@grafana/plugin-configs": "11.1.8", "@testing-library/dom": "10.0.0", "@testing-library/jest-dom": "6.4.2", "@testing-library/react": "15.0.2", diff --git a/public/app/plugins/datasource/zipkin/package.json b/public/app/plugins/datasource/zipkin/package.json index 7f126522a61a3..66d49a9be69c6 100644 --- a/public/app/plugins/datasource/zipkin/package.json +++ b/public/app/plugins/datasource/zipkin/package.json @@ -2,7 +2,7 @@ "name": "@grafana-plugins/zipkin", "description": "Zipkin plugin for Grafana", "private": true, - "version": "11.1.5", + "version": "11.1.8", "dependencies": { "@emotion/css": "11.11.2", "@grafana/data": "workspace:*", diff --git a/public/app/plugins/panel/gettingstarted/components/TutorialCard.tsx b/public/app/plugins/panel/gettingstarted/components/TutorialCard.tsx index 029f8844a64a0..ff0d03ddecda9 100644 --- a/public/app/plugins/panel/gettingstarted/components/TutorialCard.tsx +++ b/public/app/plugins/panel/gettingstarted/components/TutorialCard.tsx @@ -36,7 +36,6 @@ export const TutorialCard = ({ card }: Props) => { }; const handleTutorialClick = (event: MouseEvent, card: TutorialCardType) => { - event.preventDefault(); const isSet = store.get(card.key); if (!isSet) { store.set(card.key, true); diff --git a/public/views/swagger.html b/public/views/swagger.html index 5b8fb35c99389..caf5c87a72d04 100644 --- a/public/views/swagger.html +++ b/public/views/swagger.html @@ -7,8 +7,8 @@ @@ -32,8 +32,10 @@ background: #fafafa; } - .topbar-wrapper img { + .swagger-ui .topbar a { content: url('public/img/grafana_icon.svg'); + height: 50px; + flex: 0; } @@ -43,17 +45,17 @@ @@ -105,4 +107,4 @@ }; - + \ No newline at end of file diff --git a/scripts/drone/events/release.star b/scripts/drone/events/release.star index 94af7b2335c44..1bf7680d91f01 100644 --- a/scripts/drone/events/release.star +++ b/scripts/drone/events/release.star @@ -112,22 +112,7 @@ def publish_artifacts_step(): "PRERELEASE_BUCKET": from_secret("prerelease_bucket"), }, "commands": [ - "./bin/build artifacts packages --tag $${DRONE_TAG} --src-bucket $${PRERELEASE_BUCKET}", - ], - "depends_on": ["compile-build-cmd"], - } - -def publish_static_assets_step(): - return { - "name": "publish-static-assets", - "image": images["publish"], - "environment": { - "GCP_KEY": from_secret(gcp_grafanauploads_base64), - "PRERELEASE_BUCKET": from_secret("prerelease_bucket"), - "STATIC_ASSET_EDITIONS": from_secret("static_asset_editions"), - }, - "commands": [ - "./bin/build artifacts static-assets --tag ${DRONE_TAG} --static-asset-editions=grafana-oss", + "./bin/build artifacts packages --artifacts-editions=oss --tag $${DRONE_TAG} --src-bucket $${PRERELEASE_BUCKET}", ], "depends_on": ["compile-build-cmd"], } @@ -163,9 +148,8 @@ def publish_artifacts_pipelines(mode): steps = [ compile_build_cmd(), publish_artifacts_step(), - publish_static_assets_step(), publish_storybook_step(), - release_pr_step(depends_on = ["publish-artifacts", "publish-static-assets"]), + release_pr_step(depends_on = ["publish-artifacts"]), ] return [ diff --git a/scripts/drone/pipelines/publish_images.star b/scripts/drone/pipelines/publish_images.star index 6fbcb7c475c86..a947d05d0b4ed 100644 --- a/scripts/drone/pipelines/publish_images.star +++ b/scripts/drone/pipelines/publish_images.star @@ -31,43 +31,44 @@ def publish_image_public_step(): """ command = """ bash -c ' + IMAGE_TAG=$(echo "$${TAG}" | sed -e "s/+/-/g") debug= if [[ -n $${DRY_RUN} ]]; then debug=echo; fi docker login -u $${DOCKER_USER} -p $${DOCKER_PASSWORD} # Push the grafana-image-tags images - $$debug docker push grafana/grafana-image-tags:$${TAG}-amd64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-arm64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-armv7 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 - $$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 + $$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 # Create the grafana manifests - $$debug docker manifest create grafana/grafana:${TAG} \ - grafana/grafana-image-tags:$${TAG}-amd64 \ - grafana/grafana-image-tags:$${TAG}-arm64 \ - grafana/grafana-image-tags:$${TAG}-armv7 + $$debug docker manifest create grafana/grafana:$${IMAGE_TAG} \ + grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 - $$debug docker manifest create grafana/grafana:${TAG}-ubuntu \ - grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 \ - grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 \ - grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + $$debug docker manifest create grafana/grafana:$${IMAGE_TAG}-ubuntu \ + grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 # Push the grafana manifests - $$debug docker manifest push grafana/grafana:$${TAG} - $$debug docker manifest push grafana/grafana:$${TAG}-ubuntu + $$debug docker manifest push grafana/grafana:$${IMAGE_TAG} + $$debug docker manifest push grafana/grafana:$${IMAGE_TAG}-ubuntu # if LATEST is set, then also create & push latest if [[ -n $${LATEST} ]]; then $$debug docker manifest create grafana/grafana:latest \ - grafana/grafana-image-tags:$${TAG}-amd64 \ - grafana/grafana-image-tags:$${TAG}-arm64 \ - grafana/grafana-image-tags:$${TAG}-armv7 + grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-armv7 $$debug docker manifest create grafana/grafana:latest-ubuntu \ - grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 \ - grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 \ - grafana/grafana-image-tags:$${TAG}-ubuntu-armv7 + grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 \ + grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7 $$debug docker manifest push grafana/grafana:latest $$debug docker manifest push grafana/grafana:latest-ubuntu diff --git a/scripts/drone/pipelines/shellcheck.star b/scripts/drone/pipelines/shellcheck.star index 7f56c553976f2..4b00a9314d9b7 100644 --- a/scripts/drone/pipelines/shellcheck.star +++ b/scripts/drone/pipelines/shellcheck.star @@ -2,7 +2,6 @@ This module returns a Drone step and pipeline for linting with shellcheck. """ -load("scripts/drone/steps/lib.star", "compile_build_cmd") load( "scripts/drone/utils/images.star", "images", @@ -39,7 +38,6 @@ def shellcheck_step(): def shellcheck_pipeline(): environment = {"EDITION": "oss"} steps = [ - compile_build_cmd(), shellcheck_step(), ] return pipeline( diff --git a/scripts/drone/pipelines/verify_starlark.star b/scripts/drone/pipelines/verify_starlark.star index 9e058893d7bec..e939cd598ef20 100644 --- a/scripts/drone/pipelines/verify_starlark.star +++ b/scripts/drone/pipelines/verify_starlark.star @@ -4,7 +4,6 @@ This module returns a Drone pipeline that verifies all Starlark files are linted load( "scripts/drone/steps/lib.star", - "compile_build_cmd", "identify_runner_step", "lint_starlark_step", ) @@ -17,7 +16,6 @@ def verify_starlark(trigger, ver_mode): environment = {"EDITION": "oss"} steps = [ identify_runner_step(), - compile_build_cmd(), lint_starlark_step(), ] return pipeline( diff --git a/scripts/drone/rgm.star b/scripts/drone/rgm.star index 264924d1b6699..7efbb35d88a32 100644 --- a/scripts/drone/rgm.star +++ b/scripts/drone/rgm.star @@ -226,7 +226,6 @@ def rgm_tag(): name = "rgm-tag-prerelease", trigger = tag_trigger, steps = rgm_run("rgm-build", "drone_build_tag_grafana.sh"), - depends_on = ["release-test-backend", "release-test-frontend"], ) def rgm_tag_windows(): @@ -247,7 +246,6 @@ def rgm_version_branch(): name = "rgm-version-branch-prerelease", trigger = version_branch_trigger, steps = rgm_run("rgm-build", "drone_build_tag_grafana.sh"), - depends_on = ["release-test-backend", "release-test-frontend"], ) def rgm_nightly_build(): @@ -303,8 +301,6 @@ def rgm_nightly_pipeline(): def rgm_tag_pipeline(): return [ whats_new_checker_pipeline(tag_trigger), - test_frontend(tag_trigger, "release"), - test_backend(tag_trigger, "release"), rgm_tag(), rgm_tag_windows(), verify_release_pipeline( diff --git a/scripts/drone/steps/lib.star b/scripts/drone/steps/lib.star index 6fac057e7f3a4..d9c49bb184ff8 100644 --- a/scripts/drone/steps/lib.star +++ b/scripts/drone/steps/lib.star @@ -156,9 +156,7 @@ def lint_starlark_step(): "go install github.com/bazelbuild/buildtools/buildifier@latest", "buildifier --lint=warn -mode=check -r .", ], - "depends_on": [ - "compile-build-cmd", - ], + "depends_on": [], } def enterprise_downstream_step(ver_mode): @@ -1202,7 +1200,8 @@ def publish_linux_packages_step(package_manager = "deb"): }, } -def retry_command(command, attempts = 5, delay = 60): +# This retry will currently continue for 30 minutes until fail, unless successful. +def retry_command(command, attempts = 60, delay = 30): return [ "for i in $(seq 1 %d); do" % attempts, " if %s; then" % command, @@ -1221,13 +1220,14 @@ def retry_command(command, attempts = 5, delay = 60): ] def verify_linux_DEB_packages_step(depends_on = []): - install_command = "apt-get update >/dev/null 2>&1 && DEBIAN_FRONTEND=noninteractive apt-get install -yq grafana=${TAG} >/dev/null 2>&1" + install_command = "apt-get update >/dev/null 2>&1 && DEBIAN_FRONTEND=noninteractive apt-get install -yq grafana=$version >/dev/null 2>&1" return { "name": "verify-linux-DEB-packages", "image": images["ubuntu"], "environment": {}, "commands": [ + 'export version=$(echo ${TAG} | sed -e "s/+security-/-/g")', 'echo "Step 1: Updating package lists..."', "apt-get update >/dev/null 2>&1", 'echo "Step 2: Installing prerequisites..."', @@ -1239,12 +1239,12 @@ def verify_linux_DEB_packages_step(depends_on = []): 'echo "deb [signed-by=/etc/apt/keyrings/grafana.gpg] https://apt.grafana.com stable main" | tee -a /etc/apt/sources.list.d/grafana.list', 'echo "Step 5: Installing Grafana..."', # The packages take a bit of time to propogate within the repo. This retry will check their availability within 10 minutes. - ] + retry_command(install_command, attempts = 10) + [ + ] + retry_command(install_command) + [ 'echo "Step 6: Verifying Grafana installation..."', - 'if dpkg -s grafana | grep -q "Version: ${TAG}"; then', - ' echo "Successfully verified Grafana version ${TAG}"', + 'if dpkg -s grafana | grep -q "Version: $version"; then', + ' echo "Successfully verified Grafana version $version"', "else", - ' echo "Failed to verify Grafana version ${TAG}"', + ' echo "Failed to verify Grafana version $version"', " exit 1", "fi", 'echo "Verification complete."', @@ -1265,7 +1265,7 @@ def verify_linux_RPM_packages_step(depends_on = []): "sslcacert=/etc/pki/tls/certs/ca-bundle.crt\n" ) - repo_install_command = "dnf install -y --nogpgcheck grafana-${TAG} >/dev/null 2>&1" + install_command = "dnf install -y --nogpgcheck grafana-$version >/dev/null 2>&1" return { "name": "verify-linux-RPM-packages", @@ -1281,24 +1281,25 @@ def verify_linux_RPM_packages_step(depends_on = []): 'echo "Step 4: Configuring Grafana repository..."', "echo -e '" + repo_config + "' > /etc/yum.repos.d/grafana.repo", 'echo "Step 5: Checking RPM repository..."', - "dnf list available grafana-${TAG}", + 'export version=$(echo "${TAG}" | sed -e "s/+security-/^security_/g")', + "dnf list available grafana-$version", "if [ $? -eq 0 ]; then", ' echo "Grafana package found in repository. Installing from repo..."', - ] + retry_command(repo_install_command, attempts = 5) + [ + ] + retry_command(install_command) + [ ' echo "Verifying GPG key..."', " rpm --import https://rpm.grafana.com/gpg.key", " rpm -qa gpg-pubkey* | xargs rpm -qi | grep -i grafana", "else", - ' echo "Grafana package version ${TAG} not found in repository."', + ' echo "Grafana package version $version not found in repository."', " dnf repolist", " dnf list available grafana*", " exit 1", "fi", 'echo "Step 6: Verifying Grafana installation..."', - 'if rpm -q grafana | grep -q "${TAG}"; then', - ' echo "Successfully verified Grafana version ${TAG}"', + 'if rpm -q grafana | grep -q "$verison"; then', + ' echo "Successfully verified Grafana version $version"', "else", - ' echo "Failed to verify Grafana version ${TAG}"', + ' echo "Failed to verify Grafana version $version"', " exit 1", "fi", 'echo "Verification complete."', diff --git a/scripts/drone/utils/images.star b/scripts/drone/utils/images.star index 7d39cbdea2c64..8ee8f14f667d4 100644 --- a/scripts/drone/utils/images.star +++ b/scripts/drone/utils/images.star @@ -16,7 +16,7 @@ images = { "node_deb": "node:{}-bookworm".format(nodejs_version[:2]), "cloudsdk": "google/cloud-sdk:431.0.0", "publish": "grafana/grafana-ci-deploy:1.3.3", - "alpine": "alpine:3.19.1", + "alpine": "alpine:3.20.3", "ubuntu": "ubuntu:22.04", "curl": "byrnedo/alpine-curl:0.1.8", "plugins_slack": "plugins/slack", diff --git a/scripts/drone/variables.star b/scripts/drone/variables.star index b655955589014..0171396effbdf 100644 --- a/scripts/drone/variables.star +++ b/scripts/drone/variables.star @@ -2,8 +2,8 @@ global variables """ -grabpl_version = "v3.0.50" -golang_version = "1.22.4" +grabpl_version = "v3.0.56" +golang_version = "1.22.7" # nodejs_version should match what's in ".nvmrc", but without the v prefix. nodejs_version = "20.9.0" diff --git a/scripts/drone/vault.star b/scripts/drone/vault.star index 7c90b89ca5dca..a15a17f50600b 100644 --- a/scripts/drone/vault.star +++ b/scripts/drone/vault.star @@ -46,7 +46,7 @@ def secrets(): vault_secret("grafana_api_key", "infra/data/ci/grafana-release-eng/grafanacom", "api_key"), vault_secret(gcr_pull_secret, "secret/data/common/gcr", ".dockerconfigjson"), vault_secret(gar_pull_secret, "secret/data/common/gar", ".dockerconfigjson"), - vault_secret("github_token", "infra/data/ci/github/grafanabot", "pat"), + vault_secret("github_token", "ci/data/repo/grafana/grafana/grafanabot", "pat"), vault_secret(drone_token, "infra/data/ci/drone", "machine-user-token"), vault_secret(prerelease_bucket, "infra/data/ci/grafana/prerelease", "bucket"), vault_secret(docker_username, "infra/data/ci/grafanaci-docker-hub", "username"), diff --git a/yarn.lock b/yarn.lock index 3959e9d406d9b..7533593c62354 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2645,13 +2645,13 @@ __metadata: resolution: "@grafana-plugins/grafana-azure-monitor-datasource@workspace:public/app/plugins/datasource/azuremonitor" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@kusto/monaco-kusto": "npm:^10.0.0" "@testing-library/dom": "npm:10.0.0" "@testing-library/react": "npm:15.0.2" @@ -2689,13 +2689,13 @@ __metadata: resolution: "@grafana-plugins/grafana-postgresql-datasource@workspace:public/app/plugins/datasource/grafana-postgresql-datasource" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/sql": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/sql": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@testing-library/react": "npm:15.0.2" "@testing-library/user-event": "npm:14.5.2" "@types/jest": "npm:29.5.12" @@ -2720,11 +2720,11 @@ __metadata: resolution: "@grafana-plugins/grafana-pyroscope-datasource@workspace:public/app/plugins/datasource/grafana-pyroscope-datasource" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@testing-library/dom": "npm:10.0.0" "@testing-library/jest-dom": "npm:6.4.2" "@testing-library/react": "npm:15.0.2" @@ -2761,13 +2761,13 @@ __metadata: resolution: "@grafana-plugins/grafana-testdata-datasource@workspace:public/app/plugins/datasource/grafana-testdata-datasource" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@testing-library/dom": "npm:10.0.0" "@testing-library/react": "npm:15.0.2" "@testing-library/user-event": "npm:14.5.2" @@ -2844,13 +2844,13 @@ __metadata: resolution: "@grafana-plugins/mysql@workspace:public/app/plugins/datasource/mysql" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/sql": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/sql": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@testing-library/react": "npm:15.0.2" "@testing-library/user-event": "npm:14.5.2" "@types/jest": "npm:29.5.12" @@ -2875,11 +2875,11 @@ __metadata: resolution: "@grafana-plugins/parca@workspace:public/app/plugins/datasource/parca" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@testing-library/dom": "npm:10.0.0" "@testing-library/react": "npm:15.0.2" "@testing-library/user-event": "npm:14.5.2" @@ -2907,14 +2907,14 @@ __metadata: resolution: "@grafana-plugins/stackdriver@workspace:public/app/plugins/datasource/cloud-monitoring" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" "@grafana/google-sdk": "npm:0.1.2" - "@grafana/plugin-configs": "npm:11.1.5" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" - "@grafana/ui": "npm:11.1.5" + "@grafana/plugin-configs": "npm:11.1.8" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" + "@grafana/ui": "npm:11.1.8" "@testing-library/dom": "npm:10.0.0" "@testing-library/react": "npm:15.0.2" "@testing-library/user-event": "npm:14.5.2" @@ -2962,7 +2962,7 @@ __metadata: "@grafana/lezer-traceql": "npm:0.0.17" "@grafana/monaco-logql": "npm:^0.0.7" "@grafana/o11y-ds-frontend": "workspace:*" - "@grafana/plugin-configs": "npm:11.1.5" + "@grafana/plugin-configs": "npm:11.1.8" "@grafana/runtime": "workspace:*" "@grafana/schema": "workspace:*" "@grafana/ui": "workspace:*" @@ -3072,12 +3072,12 @@ __metadata: languageName: node linkType: hard -"@grafana/data@npm:11.1.5, @grafana/data@workspace:*, @grafana/data@workspace:packages/grafana-data": +"@grafana/data@npm:11.1.8, @grafana/data@workspace:*, @grafana/data@workspace:packages/grafana-data": version: 0.0.0-use.local resolution: "@grafana/data@workspace:packages/grafana-data" dependencies: "@braintree/sanitize-url": "npm:7.0.1" - "@grafana/schema": "npm:11.1.5" + "@grafana/schema": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" "@rollup/plugin-node-resolve": "npm:15.2.3" "@types/d3-interpolate": "npm:^3.0.0" @@ -3125,7 +3125,7 @@ __metadata: languageName: unknown linkType: soft -"@grafana/e2e-selectors@npm:11.1.5, @grafana/e2e-selectors@npm:^11.0.0, @grafana/e2e-selectors@workspace:*, @grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors": +"@grafana/e2e-selectors@npm:11.1.8, @grafana/e2e-selectors@npm:^11.0.0, @grafana/e2e-selectors@workspace:*, @grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors": version: 0.0.0-use.local resolution: "@grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors" dependencies: @@ -3250,9 +3250,9 @@ __metadata: "@babel/preset-env": "npm:7.24.7" "@babel/preset-react": "npm:7.24.7" "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" - "@grafana/ui": "npm:11.1.5" + "@grafana/ui": "npm:11.1.8" "@leeoniya/ufuzzy": "npm:1.0.14" "@rollup/plugin-node-resolve": "npm:15.2.3" "@testing-library/dom": "npm:10.0.0" @@ -3334,13 +3334,13 @@ __metadata: resolution: "@grafana/o11y-ds-frontend@workspace:packages/grafana-o11y-ds-frontend" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" - "@grafana/ui": "npm:11.1.5" + "@grafana/ui": "npm:11.1.8" "@testing-library/dom": "npm:10.0.0" "@testing-library/jest-dom": "npm:^6.1.2" "@testing-library/react": "npm:15.0.2" @@ -3365,7 +3365,7 @@ __metadata: languageName: unknown linkType: soft -"@grafana/plugin-configs@npm:11.1.5, @grafana/plugin-configs@workspace:*, @grafana/plugin-configs@workspace:packages/grafana-plugin-configs": +"@grafana/plugin-configs@npm:11.1.8, @grafana/plugin-configs@workspace:*, @grafana/plugin-configs@workspace:packages/grafana-plugin-configs": version: 0.0.0-use.local resolution: "@grafana/plugin-configs@workspace:packages/grafana-plugin-configs" dependencies: @@ -3405,14 +3405,14 @@ __metadata: "@emotion/css": "npm:11.11.2" "@emotion/eslint-plugin": "npm:11.11.0" "@floating-ui/react": "npm:0.26.16" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" "@grafana/faro-web-sdk": "npm:1.7.3" - "@grafana/runtime": "npm:11.1.5" - "@grafana/schema": "npm:11.1.5" + "@grafana/runtime": "npm:11.1.8" + "@grafana/schema": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" - "@grafana/ui": "npm:11.1.5" + "@grafana/ui": "npm:11.1.8" "@leeoniya/ufuzzy": "npm:1.0.14" "@lezer/common": "npm:1.2.1" "@lezer/highlight": "npm:1.2.0" @@ -3510,16 +3510,16 @@ __metadata: languageName: unknown linkType: soft -"@grafana/runtime@npm:11.1.5, @grafana/runtime@workspace:*, @grafana/runtime@workspace:packages/grafana-runtime": +"@grafana/runtime@npm:11.1.8, @grafana/runtime@workspace:*, @grafana/runtime@workspace:packages/grafana-runtime": version: 0.0.0-use.local resolution: "@grafana/runtime@workspace:packages/grafana-runtime" dependencies: - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/faro-web-sdk": "npm:^1.3.6" - "@grafana/schema": "npm:11.1.5" + "@grafana/schema": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" - "@grafana/ui": "npm:11.1.5" + "@grafana/ui": "npm:11.1.8" "@rollup/plugin-node-resolve": "npm:15.2.3" "@rollup/plugin-terser": "npm:0.4.4" "@testing-library/dom": "npm:10.0.0" @@ -3608,7 +3608,7 @@ __metadata: languageName: node linkType: hard -"@grafana/schema@npm:11.1.5, @grafana/schema@workspace:*, @grafana/schema@workspace:packages/grafana-schema": +"@grafana/schema@npm:11.1.8, @grafana/schema@workspace:*, @grafana/schema@workspace:packages/grafana-schema": version: 0.0.0-use.local resolution: "@grafana/schema@workspace:packages/grafana-schema" dependencies: @@ -3626,17 +3626,17 @@ __metadata: languageName: unknown linkType: soft -"@grafana/sql@npm:11.1.5, @grafana/sql@workspace:*, @grafana/sql@workspace:packages/grafana-sql": +"@grafana/sql@npm:11.1.8, @grafana/sql@workspace:*, @grafana/sql@workspace:packages/grafana-sql": version: 0.0.0-use.local resolution: "@grafana/sql@workspace:packages/grafana-sql" dependencies: "@emotion/css": "npm:11.11.2" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/experimental": "npm:1.7.11" - "@grafana/runtime": "npm:11.1.5" + "@grafana/runtime": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" - "@grafana/ui": "npm:11.1.5" + "@grafana/ui": "npm:11.1.8" "@react-awesome-query-builder/ui": "npm:6.5.2" "@testing-library/dom": "npm:10.0.0" "@testing-library/jest-dom": "npm:^6.1.2" @@ -3679,7 +3679,7 @@ __metadata: languageName: node linkType: hard -"@grafana/ui@npm:11.1.5, @grafana/ui@workspace:*, @grafana/ui@workspace:packages/grafana-ui": +"@grafana/ui@npm:11.1.8, @grafana/ui@workspace:*, @grafana/ui@workspace:packages/grafana-ui": version: 0.0.0-use.local resolution: "@grafana/ui@workspace:packages/grafana-ui" dependencies: @@ -3688,10 +3688,10 @@ __metadata: "@emotion/react": "npm:11.11.4" "@faker-js/faker": "npm:^8.4.1" "@floating-ui/react": "npm:0.26.16" - "@grafana/data": "npm:11.1.5" - "@grafana/e2e-selectors": "npm:11.1.5" + "@grafana/data": "npm:11.1.8" + "@grafana/e2e-selectors": "npm:11.1.8" "@grafana/faro-web-sdk": "npm:^1.3.6" - "@grafana/schema": "npm:11.1.5" + "@grafana/schema": "npm:11.1.8" "@grafana/tsconfig": "npm:^1.3.0-rc1" "@leeoniya/ufuzzy": "npm:1.0.14" "@monaco-editor/react": "npm:4.6.0"