diff --git a/.circleci/config.yml b/.circleci/config.yml index 0cd47bfa6d1d..1d74e6e64316 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -69,11 +69,11 @@ jobs: - run: name: cache server tests command: './scripts/circle-test-cache-servers.sh' - + end-to-end-test: docker: - image: circleci/node:8-browsers - - image: grafana/grafana:master + - image: grafana/grafana-dev:master-$CIRCLE_SHA1 steps: - run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s - checkout @@ -91,6 +91,12 @@ jobs: name: run end-to-end tests command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests' no_output_timeout: 5m + - store_artifacts: + path: public/e2e-test/screenShots/theTruth + destination: expected-screenshots + - store_artifacts: + path: public/e2e-test/screenShots/theOutput + destination: output-screenshots codespell: docker: @@ -103,7 +109,7 @@ jobs: - run: # Important: all words have to be in lowercase, and separated by "\n". name: exclude known exceptions - command: 'echo -e "unknwon" > words_to_ignore.txt' + command: 'echo -e "unknwon\nreferer\nerrorstring" > words_to_ignore.txt' - run: name: check documentation spelling errors command: 'codespell -I ./words_to_ignore.txt docs/' @@ -588,6 +594,7 @@ jobs: root: . paths: - dist/grafana-*.msi + - dist/grafana-*.msi.sha256 store-build-artifacts: docker: @@ -628,7 +635,7 @@ workflows: - mysql-integration-test - postgres-integration-test - build-oss-msi - filters: *filter-only-master + filters: *filter-only-master - grafana-docker-master: requires: - build-all @@ -661,7 +668,10 @@ workflows: - mysql-integration-test - postgres-integration-test filters: *filter-only-master - + - end-to-end-test: + requires: + - grafana-docker-master + filters: *filter-only-master release: jobs: - build-all: @@ -722,7 +732,7 @@ workflows: - backend-lint - mysql-integration-test - postgres-integration-test - filters: *filter-only-master + filters: *filter-only-release build-branches-and-prs: jobs: diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e8da7947b7b..64d8d41d5f5b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,33 @@ -# 6.2.0 (unreleased) +# 6.3.0 (unreleased) + +# 6.2.1 (2019-05-27) + +### Features / Enhancements + * **CLI**: Add command to migrate all datasources to use encrypted password fields . [#17118](https://github.com/grafana/grafana/pull/17118), [@aocenas](https://github.com/aocenas) + * **Gauge/BarGauge**: Improvements to auto value font size . [#17292](https://github.com/grafana/grafana/pull/17292), [@torkelo](https://github.com/torkelo) + +### Bug Fixes + * **Auth Proxy**: Resolve database is locked errors. [#17274](https://github.com/grafana/grafana/pull/17274), [@marefr](https://github.com/marefr) + * **Database**: Retry transaction if sqlite returns database is locked error. [#17276](https://github.com/grafana/grafana/pull/17276), [@marefr](https://github.com/marefr) + * **Explore**: Fixes so clicking in a Prometheus Table the query is filtered by clicked value. [#17083](https://github.com/grafana/grafana/pull/17083), [@hugohaggmark](https://github.com/hugohaggmark) + * **Singlestat**: Fixes issue with value placement and line wraps. [#17249](https://github.com/grafana/grafana/pull/17249), [@torkelo](https://github.com/torkelo) + * **Tech**: Update jQuery to 3.4.1 to fix issue on iOS 10 based browers as well as Chrome 53.x . [#17290](https://github.com/grafana/grafana/pull/17290), [@timbutler](https://github.com/timbutler) + +# 6.2.0 (2019-05-22) + +### Bug Fixes +* **BarGauge**: Fix for negative min values. [#17192](https://github.com/grafana/grafana/pull/17192), [@torkelo](https://github.com/torkelo) +* **Gauge/BarGauge**: Fix for issues editing min & max options. [#17174](https://github.com/grafana/grafana/pull/17174) +* **Search**: Make only folder name only open search with current folder filter. [#17226](https://github.com/grafana/grafana/pull/17226) +* **AzureMonitor**: Revert to clearing chained dropdowns. [#17212](https://github.com/grafana/grafana/pull/17212) + +### Breaking Changes +* **Plugins**: Data source plugins that process hidden queries need to add a "hiddenQueries: true" attribute in plugin.json. [#17124](https://github.com/grafana/grafana/pull/17124), [@ryantxu](https://github.com/ryantxu) + +### Removal of old deprecated package repository + +5 months ago we deprecated our old package cloud repository and [replaced it](https://grafana.com/blog/2019/01/05/moving-to-packages.grafana.com/) with our own. We will remove the old depreciated +repo on July 1st. Make sure you have switched to the new repo by then. The new repository has all our old releases so you are not required to upgrade just to switch package repository. # 6.2.0-beta2 (2019-05-15) @@ -56,6 +85,7 @@ * **Provisioning**: Add API endpoint to reload provisioning configs. [#16579](https://github.com/grafana/grafana/pull/16579), [@aocenas](https://github.com/aocenas) * **Provisioning**: Do not allow deletion of provisioned dashboards. [#16211](https://github.com/grafana/grafana/pull/16211), [@aocenas](https://github.com/aocenas) * **Provisioning**: Interpolate env vars in provisioning files. [#16499](https://github.com/grafana/grafana/pull/16499), [@aocenas](https://github.com/aocenas) + * **Provisioning**: Support FolderUid in Dashboard Provisioning Config. [#16559](https://github.com/grafana/grafana/pull/16559), [@swtch1](https://github.com/swtch1) * **Security**: Add new setting allow_embedding. [#16853](https://github.com/grafana/grafana/pull/16853), [@marefr](https://github.com/marefr) * **Security**: Store datasource passwords encrypted in secureJsonData. [#16175](https://github.com/grafana/grafana/pull/16175), [@aocenas](https://github.com/aocenas) * **UX**: Improve Grafana usage for smaller screens. [#16783](https://github.com/grafana/grafana/pull/16783), [@torkelo](https://github.com/torkelo) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 272e53787601..db02d4134948 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,6 +31,8 @@ To setup a local development environment we recommend reading [Building Grafana * Add tests relevant to the fixed bug or new feature. +* Follow [PR and commit messages guidelines](#PR-and-commit-messages-guidelines) + ### Pull requests with new features Commits should be as small as possible, while ensuring that each commit is correct independently (i.e., each commit should compile and pass tests). @@ -38,12 +40,12 @@ Make sure to include `Closes #` or `Fixes #` in the ### Pull requests with bug fixes Please make all changes in one commit if possible. Include `Closes #` in bottom of the commit message. -A commit message for a bug fix should look something like this. +A commit message for a bug fix should look something like this: ``` -avoid infinite loop in the dashboard provisioner +Dashboard: Avoid infinite loop in the dashboard provisioner -if one dashboard with an uid is refered to by two +If one dashboard with an uid is refered to by two provsioners each provisioner overwrite each other. filling up dashboard_versions quite fast if using default settings. @@ -51,6 +53,8 @@ default settings. Closes #12864 ``` +For more details about PR naming and commit messages please see [PR and commit messages guidelines](#PR-and-commit-messages-guidelines) + If the pull request needs changes before its merged the new commits should be rebased into one commit before its merged. ## Backend dependency management @@ -80,3 +84,22 @@ GO111MODULE=on go mod vendor ``` You have to commit the changes to `go.mod`, `go.sum` and the `vendor/` directory before submitting the pull request. + +## PR and commit messages guidelines +PR title and squash commit messages should follow guidelines below: + +``` +Area of changes: Message + +Detailed description +``` + +The `Area of changes` is related either to functional domain (i.e. Build, Release) or feature domain (i.e. Explore, Plugins, BarGaugePanel). + + +`Message` should be concise, written in present tense and start with capitalised verb. Detailed description should be provided as commit message body, by entering a blank line between commit title and the description. + +### Examples of good PR titles/commit messages: +- `Explore: Adds Live option for supported datasources` +- `GraphPanel: Don't sort series when legend table & sort column is not visible` +- `Build: Support publishing MSI to grafana.com` diff --git a/Makefile b/Makefile index 8cfc7ce26812..1d71675dbaba 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ -include local/Makefile -.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive +.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive devenv devenv-down revive-alerting GO := GO111MODULE=on go GO_FILES := ./pkg/... @@ -84,6 +84,34 @@ revive: scripts/go/bin/revive -config ./scripts/go/configs/revive.toml \ $(GO_FILES) +revive-alerting: scripts/go/bin/revive + @scripts/go/bin/revive \ + -formatter stylish \ + ./pkg/services/alerting/... + +# create docker-compose file with provided sources and start them +# example: make devenv sources=postgres,openldap +ifeq ($(sources),) +devenv: + @printf 'You have to define sources for this command \nexample: make devenv sources=postgres,openldap\n' +else +devenv: devenv-down + $(eval targets := $(shell echo '$(sources)' | tr "," " ")) + + @cd devenv; \ + ./create_docker_compose.sh $(targets) || \ + (rm -rf docker-compose.yaml; exit 1) + + @cd devenv; \ + docker-compose up -d --build +endif + +# drop down the envs +devenv-down: + @cd devenv; \ + test -f docker-compose.yaml && \ + docker-compose down || exit 0; + # TODO recheck the rules and leave only necessary exclusions gosec: scripts/go/bin/gosec @scripts/go/bin/gosec -quiet \ diff --git a/conf/defaults.ini b/conf/defaults.ini index ca49f1212698..ec83a5ea1a5a 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -47,6 +47,9 @@ enforce_domain = false # The full public facing url root_url = %(protocol)s://%(domain)s:%(http_port)s/ +# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons. +serve_from_sub_path = false + # Log web requests router_logging = false diff --git a/conf/sample.ini b/conf/sample.ini index de684bc98f67..dc7d5bc54677 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -48,6 +48,9 @@ # If you use reverse proxy and sub path specify full url (with sub path) ;root_url = http://localhost:3000 +# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons. +;serve_from_sub_path = false + # Log web requests ;router_logging = false diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index e0f63bef2999..33dde611bdcd 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -1,10 +1,11 @@ + apiVersion: 1 datasources: - name: gdev-graphite type: graphite access: proxy - url: http://localhost:8080 + url: http://localhost:8180 jsonData: graphiteVersion: "1.1" @@ -142,6 +143,17 @@ datasources: timeField: "@timestamp" esVersion: 70 + - name: gdev-elasticsearch-v7-metricbeat + type: elasticsearch + access: proxy + database: "[metricbeat-]YYYY.MM.DD" + url: http://localhost:12200 + jsonData: + interval: Daily + timeField: "@timestamp" + esVersion: 70 + timeInterval: "10s" + - name: gdev-mysql type: mysql url: localhost:3306 diff --git a/devenv/dev-dashboards/home.json b/devenv/dev-dashboards/home.json index 8608b0b82647..f4cdfed9d53e 100644 --- a/devenv/dev-dashboards/home.json +++ b/devenv/dev-dashboards/home.json @@ -48,39 +48,39 @@ "y": 0 }, "headings": false, - "id": 8, + "id": 2, "limit": 1000, "links": [], "query": "", "recent": false, "search": true, "starred": false, - "tags": ["panel-demo"], + "tags": ["panel-tests"], "timeFrom": null, "timeShift": null, - "title": "tag: panel-demo", + "title": "tag: panel-tests", "type": "dashlist" }, { "folderId": null, "gridPos": { - "h": 13, + "h": 26, "w": 6, "x": 12, "y": 0 }, "headings": false, - "id": 2, + "id": 3, "limit": 1000, "links": [], "query": "", "recent": false, "search": true, "starred": false, - "tags": ["panel-tests"], + "tags": ["gdev", "demo"], "timeFrom": null, "timeShift": null, - "title": "tag: panel-tests", + "title": "tag: dashboard-demo", "type": "dashlist" }, { @@ -114,28 +114,6 @@ "y": 13 }, "headings": false, - "id": 3, - "limit": 1000, - "links": [], - "query": "", - "recent": false, - "search": true, - "starred": false, - "tags": ["gdev", "demo"], - "timeFrom": null, - "timeShift": null, - "title": "tag: dashboard-demo", - "type": "dashlist" - }, - { - "folderId": null, - "gridPos": { - "h": 13, - "w": 6, - "x": 12, - "y": 13 - }, - "headings": false, "id": 4, "limit": 1000, "links": [], @@ -146,7 +124,7 @@ "tags": ["templating", "gdev"], "timeFrom": null, "timeShift": null, - "title": "tag: templating", + "title": "tag: templating ", "type": "dashlist" } ], @@ -167,5 +145,5 @@ "timezone": "", "title": "Grafana Dev Overview & Home", "uid": "j6T00KRZz", - "version": 1 + "version": 2 } diff --git a/devenv/dev-dashboards/panel-bargauge/animated_demo.json b/devenv/dev-dashboards/panel-bargauge/bar_gauge_demo.json similarity index 62% rename from devenv/dev-dashboards/panel-bargauge/animated_demo.json rename to devenv/dev-dashboards/panel-bargauge/bar_gauge_demo.json index a061f5766d1f..dcbc7fd1343d 100644 --- a/devenv/dev-dashboards/panel-bargauge/animated_demo.json +++ b/devenv/dev-dashboards/panel-bargauge/bar_gauge_demo.json @@ -15,26 +15,27 @@ "editable": true, "gnetId": null, "graphTooltip": 0, + "id": 7501, "links": [], "panels": [ { + "datasource": "gdev-testdata", "gridPos": { "h": 7, - "w": 18, + "w": 24, "x": 0, "y": 0 }, - "id": 7, + "id": 2, "links": [], "options": { - "displayMode": "gradient", + "displayMode": "lcd", "fieldOptions": { "calcs": ["mean"], "defaults": { - "decimals": null, "max": 100, "min": 0, - "unit": "watt" + "unit": "decgbytes" }, "mappings": [], "override": {}, @@ -47,7 +48,7 @@ { "color": "orange", "index": 1, - "value": 40 + "value": 60 }, { "color": "red", @@ -59,92 +60,103 @@ }, "orientation": "vertical" }, - "pluginVersion": "6.2.0-pre", "targets": [ { + "alias": "sda1", "refId": "A", "scenarioId": "random_walk" }, { + "alias": "sda2", "refId": "B", "scenarioId": "random_walk" }, { + "alias": "sda3", "refId": "C", "scenarioId": "random_walk" }, { + "alias": "sda4", "refId": "D", "scenarioId": "random_walk" }, { + "alias": "sda5", "refId": "E", - "scenarioId": "csv_metric_values", - "stringInput": "10003,33333" + "scenarioId": "random_walk" }, { + "alias": "sda6", "refId": "F", "scenarioId": "random_walk" }, { + "alias": "sda7", "refId": "G", "scenarioId": "random_walk" }, { + "alias": "sda8", "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" + "scenarioId": "random_walk" }, { + "alias": "sda9", "refId": "I", "scenarioId": "random_walk" }, { + "alias": "sda10", "refId": "J", "scenarioId": "random_walk" }, { + "alias": "sda11", "refId": "K", "scenarioId": "random_walk" }, { + "alias": "sda12", "refId": "L", "scenarioId": "random_walk" }, { + "alias": "sda13", "refId": "M", "scenarioId": "random_walk" }, { + "alias": "sda14", "refId": "N", "scenarioId": "random_walk" }, { + "alias": "sda15", "refId": "O", "scenarioId": "random_walk" }, { + "alias": "sda16", "refId": "P", "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" } ], "timeFrom": null, "timeShift": null, - "title": "Usage", + "title": "", + "transparent": true, "type": "bargauge" }, { + "datasource": "gdev-testdata", "gridPos": { - "h": 22, - "w": 6, - "x": 18, - "y": 0 + "h": 10, + "w": 16, + "x": 0, + "y": 7 }, - "id": 8, + "id": 4, "links": [], "options": { "displayMode": "gradient", @@ -154,25 +166,30 @@ "decimals": null, "max": 100, "min": 0, - "unit": "watt" + "unit": "celsius" }, "mappings": [], "override": {}, "thresholds": [ { - "color": "green", + "color": "blue", "index": 0, "value": null }, { - "color": "orange", + "color": "green", "index": 1, - "value": 55 + "value": 20 }, { - "color": "red", + "color": "orange", "index": 2, - "value": 95 + "value": 40 + }, + { + "color": "red", + "index": 3, + "value": 80 } ], "values": false @@ -182,32 +199,94 @@ "pluginVersion": "6.2.0-pre", "targets": [ { - "refId": "E", - "scenarioId": "random_walk" - }, - { + "alias": "Inside", "refId": "H", "scenarioId": "csv_metric_values", "stringInput": "100,100,100" }, { + "alias": "Outhouse", "refId": "A", "scenarioId": "random_walk" }, { + "alias": "Area B", "refId": "B", "scenarioId": "random_walk" }, { + "alias": "Basement", "refId": "C", "scenarioId": "random_walk" }, { + "alias": "Garage", "refId": "D", "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Gradient mode", + "type": "bargauge" + }, + { + "datasource": "gdev-testdata", + "gridPos": { + "h": 10, + "w": 6, + "x": 16, + "y": 7 + }, + "id": 6, + "links": [], + "options": { + "displayMode": "basic", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "watt" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 42.5 + }, + { + "color": "orange", + "index": 2, + "value": 80 + }, + { + "color": "red", + "index": 3, + "value": 90 + } + ], + "values": false + }, + "orientation": "horizontal" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" }, { - "refId": "I", + "refId": "A", "scenarioId": "random_walk" }, { @@ -241,47 +320,78 @@ { "refId": "Q", "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Basic", + "type": "bargauge" + }, + { + "datasource": "gdev-testdata", + "gridPos": { + "h": 22, + "w": 2, + "x": 22, + "y": 7 + }, + "id": 8, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "max": 100, + "min": 0 + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "red", + "index": 0, + "value": null + }, + { + "color": "red", + "index": 1, + "value": 90 + } + ], + "values": false }, + "orientation": "vertical" + }, + "targets": [ { - "refId": "F", - "scenarioId": "random_walk" - }, - { - "refId": "G", - "scenarioId": "random_walk" - }, - { - "refId": "R", - "scenarioId": "random_walk" - }, - { - "refId": "S", + "refId": "A", "scenarioId": "random_walk" } ], "timeFrom": null, "timeShift": null, - "title": "Usage", + "title": "Completion", "type": "bargauge" }, { + "datasource": "gdev-testdata", "gridPos": { - "h": 15, - "w": 11, + "h": 12, + "w": 22, "x": 0, - "y": 7 + "y": 17 }, - "id": 6, + "id": 10, "links": [], "options": { "displayMode": "gradient", "fieldOptions": { "calcs": ["mean"], "defaults": { - "decimals": null, "max": 100, "min": 0, - "unit": "celsius" + "unit": "decgbytes" }, "mappings": [], "override": {}, @@ -294,12 +404,12 @@ { "color": "green", "index": 1, - "value": 20 + "value": 30 }, { "color": "orange", "index": 2, - "value": 40 + "value": 60 }, { "color": "red", @@ -309,69 +419,113 @@ ], "values": false }, - "orientation": "horizontal" + "orientation": "vertical" }, - "pluginVersion": "6.2.0-pre", "targets": [ { - "alias": "Inside", - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "alias": "Outhouse", + "alias": "sda1", "refId": "A", "scenarioId": "random_walk" }, { - "alias": "Area B", + "alias": "sda2", "refId": "B", "scenarioId": "random_walk" }, { - "alias": "Basement", + "alias": "sda3", "refId": "C", "scenarioId": "random_walk" }, { - "alias": "Garage", + "alias": "sda4", "refId": "D", "scenarioId": "random_walk" }, { - "alias": "Attic", + "alias": "sda5", "refId": "E", "scenarioId": "random_walk" }, { + "alias": "sda6", "refId": "F", "scenarioId": "random_walk" + }, + { + "alias": "sda7", + "refId": "G", + "scenarioId": "random_walk" + }, + { + "alias": "sda8", + "refId": "H", + "scenarioId": "random_walk" + }, + { + "alias": "sda9", + "refId": "I", + "scenarioId": "random_walk" + }, + { + "alias": "sda10", + "refId": "J", + "scenarioId": "random_walk" + }, + { + "alias": "sda11", + "refId": "K", + "scenarioId": "random_walk" + }, + { + "alias": "sda12", + "refId": "L", + "scenarioId": "random_walk" + }, + { + "alias": "sda13", + "refId": "M", + "scenarioId": "random_walk" + }, + { + "alias": "sda14", + "refId": "N", + "scenarioId": "random_walk" + }, + { + "alias": "sda15", + "refId": "O", + "scenarioId": "random_walk" + }, + { + "alias": "sda16", + "refId": "P", + "scenarioId": "random_walk" } ], "timeFrom": null, "timeShift": null, - "title": "Temperature", + "title": "", "type": "bargauge" }, { + "datasource": "gdev-testdata", "gridPos": { - "h": 15, - "w": 7, - "x": 11, - "y": 7 + "h": 8, + "w": 24, + "x": 0, + "y": 29 }, - "id": 9, + "id": 11, "links": [], "options": { "displayMode": "basic", "fieldOptions": { "calcs": ["mean"], "defaults": { - "decimals": null, "max": 100, "min": 0, - "unit": "celsius" + "unit": "decgbytes" }, "mappings": [], "override": {}, @@ -384,12 +538,12 @@ { "color": "green", "index": 1, - "value": 20 + "value": 30 }, { "color": "orange", "index": 2, - "value": 40 + "value": 60 }, { "color": "red", @@ -399,89 +553,113 @@ ], "values": false }, - "orientation": "horizontal" + "orientation": "vertical" }, - "pluginVersion": "6.2.0-pre", "targets": [ { - "alias": "Inside", - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "alias": "Outhouse", + "alias": "sda1", "refId": "A", "scenarioId": "random_walk" }, { - "alias": "Area B", + "alias": "sda2", "refId": "B", "scenarioId": "random_walk" }, { - "alias": "Basement", + "alias": "sda3", "refId": "C", "scenarioId": "random_walk" }, { - "alias": "Garage", + "alias": "sda4", "refId": "D", "scenarioId": "random_walk" }, { - "alias": "Attic", + "alias": "sda5", "refId": "E", "scenarioId": "random_walk" }, { + "alias": "sda6", "refId": "F", "scenarioId": "random_walk" }, { + "alias": "sda7", "refId": "G", "scenarioId": "random_walk" }, { + "alias": "sda8", + "refId": "H", + "scenarioId": "random_walk" + }, + { + "alias": "sda9", "refId": "I", "scenarioId": "random_walk" }, { + "alias": "sda10", "refId": "J", "scenarioId": "random_walk" }, { + "alias": "sda11", "refId": "K", "scenarioId": "random_walk" }, { + "alias": "sda12", "refId": "L", "scenarioId": "random_walk" + }, + { + "alias": "sda13", + "refId": "M", + "scenarioId": "random_walk" + }, + { + "alias": "sda14", + "refId": "N", + "scenarioId": "random_walk" + }, + { + "alias": "sda15", + "refId": "O", + "scenarioId": "random_walk" + }, + { + "alias": "sda16", + "refId": "P", + "scenarioId": "random_walk" } ], "timeFrom": null, "timeShift": null, - "title": "Temperature", + "title": "", "type": "bargauge" } ], - "refresh": false, + "refresh": "10s", "schemaVersion": 18, "style": "dark", - "tags": ["gdev", "bargauge", "panel-demo"], + "tags": ["gdev", "demo"], "templating": { "list": [] }, "time": { - "from": "now-30m", + "from": "now-6h", "to": "now" }, "timepicker": { - "refresh_intervals": ["1s", "5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], + "refresh_intervals": ["2s", "5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] }, "timezone": "", - "title": "Bar Gauge Animated Demo", - "uid": "k5IUwQeikaa", - "version": 1 + "title": "Bar Gauge Demo", + "uid": "vmie2cmWz", + "version": 3 } diff --git a/devenv/dev-dashboards/panel-bargauge/gradient_demo.json b/devenv/dev-dashboards/panel-bargauge/gradient_demo.json deleted file mode 100644 index ffe68eb7a863..000000000000 --- a/devenv/dev-dashboards/panel-bargauge/gradient_demo.json +++ /dev/null @@ -1,376 +0,0 @@ -{ - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": "-- Grafana --", - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "links": [], - "panels": [ - { - "gridPos": { - "h": 7, - "w": 18, - "x": 0, - "y": 0 - }, - "id": 7, - "links": [], - "options": { - "displayMode": "gradient", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "watt" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "orange", - "index": 1, - "value": 40 - }, - { - "color": "red", - "index": 2, - "value": 80 - } - ], - "values": false - }, - "orientation": "vertical" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "A", - "scenarioId": "random_walk" - }, - { - "refId": "B", - "scenarioId": "random_walk" - }, - { - "refId": "C", - "scenarioId": "random_walk" - }, - { - "refId": "D", - "scenarioId": "random_walk" - }, - { - "refId": "E", - "scenarioId": "csv_metric_values", - "stringInput": "10003,33333" - }, - { - "refId": "F", - "scenarioId": "random_walk" - }, - { - "refId": "G", - "scenarioId": "random_walk" - }, - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "refId": "I", - "scenarioId": "random_walk" - }, - { - "refId": "J", - "scenarioId": "random_walk" - }, - { - "refId": "K", - "scenarioId": "random_walk" - }, - { - "refId": "L", - "scenarioId": "random_walk" - }, - { - "refId": "M", - "scenarioId": "random_walk" - }, - { - "refId": "N", - "scenarioId": "random_walk" - }, - { - "refId": "O", - "scenarioId": "random_walk" - }, - { - "refId": "P", - "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Usage", - "type": "bargauge" - }, - { - "gridPos": { - "h": 20, - "w": 6, - "x": 18, - "y": 0 - }, - "id": 8, - "links": [], - "options": { - "displayMode": "gradient", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "watt" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "orange", - "index": 1, - "value": 65 - }, - { - "color": "red", - "index": 2, - "value": 95 - } - ], - "values": false - }, - "orientation": "horizontal" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "E", - "scenarioId": "random_walk" - }, - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "refId": "A", - "scenarioId": "random_walk" - }, - { - "refId": "B", - "scenarioId": "random_walk" - }, - { - "refId": "C", - "scenarioId": "random_walk" - }, - { - "refId": "D", - "scenarioId": "random_walk" - }, - { - "refId": "I", - "scenarioId": "random_walk" - }, - { - "refId": "J", - "scenarioId": "random_walk" - }, - { - "refId": "K", - "scenarioId": "random_walk" - }, - { - "refId": "L", - "scenarioId": "random_walk" - }, - { - "refId": "M", - "scenarioId": "random_walk" - }, - { - "refId": "N", - "scenarioId": "random_walk" - }, - { - "refId": "O", - "scenarioId": "random_walk" - }, - { - "refId": "P", - "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" - }, - { - "refId": "F", - "scenarioId": "random_walk" - }, - { - "refId": "G", - "scenarioId": "random_walk" - }, - { - "refId": "R", - "scenarioId": "random_walk" - }, - { - "refId": "S", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Usage", - "type": "bargauge" - }, - { - "gridPos": { - "h": 13, - "w": 18, - "x": 0, - "y": 7 - }, - "id": 6, - "links": [], - "options": { - "displayMode": "gradient", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "celsius" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "blue", - "index": 0, - "value": null - }, - { - "color": "green", - "index": 1, - "value": 20 - }, - { - "color": "orange", - "index": 2, - "value": 40 - }, - { - "color": "red", - "index": 3, - "value": 80 - } - ], - "values": false - }, - "orientation": "horizontal" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "alias": "Inside", - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "alias": "Outhouse", - "refId": "A", - "scenarioId": "random_walk" - }, - { - "alias": "Area B", - "refId": "B", - "scenarioId": "random_walk" - }, - { - "alias": "Basement", - "refId": "C", - "scenarioId": "random_walk" - }, - { - "alias": "Garage", - "refId": "D", - "scenarioId": "random_walk" - }, - { - "alias": "Attic", - "refId": "E", - "scenarioId": "random_walk" - }, - { - "refId": "F", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Temperature", - "type": "bargauge" - } - ], - "schemaVersion": 18, - "style": "dark", - "tags": ["gdev", "bargauge", "panel-demo"], - "templating": { - "list": [] - }, - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], - "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] - }, - "timezone": "", - "title": "Bar Gauge Gradient Demo", - "uid": "RndRQw6mz", - "version": 1 -} diff --git a/devenv/dev-dashboards/panel-bargauge/many_modes_demo.json b/devenv/dev-dashboards/panel-bargauge/many_modes_demo.json deleted file mode 100644 index 8b498e78ef15..000000000000 --- a/devenv/dev-dashboards/panel-bargauge/many_modes_demo.json +++ /dev/null @@ -1,405 +0,0 @@ -{ - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": "-- Grafana --", - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "links": [], - "panels": [ - { - "gridPos": { - "h": 7, - "w": 22, - "x": 0, - "y": 0 - }, - "id": 7, - "links": [], - "options": { - "displayMode": "lcd", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "watt" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "orange", - "index": 1, - "value": 40 - }, - { - "color": "red", - "index": 2, - "value": 80 - } - ], - "values": false - }, - "orientation": "vertical" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "A", - "scenarioId": "random_walk" - }, - { - "refId": "B", - "scenarioId": "random_walk" - }, - { - "refId": "C", - "scenarioId": "random_walk" - }, - { - "refId": "D", - "scenarioId": "random_walk" - }, - { - "refId": "E", - "scenarioId": "csv_metric_values", - "stringInput": "10003,33333" - }, - { - "refId": "F", - "scenarioId": "random_walk" - }, - { - "refId": "G", - "scenarioId": "random_walk" - }, - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "refId": "I", - "scenarioId": "random_walk" - }, - { - "refId": "J", - "scenarioId": "random_walk" - }, - { - "refId": "K", - "scenarioId": "random_walk" - }, - { - "refId": "L", - "scenarioId": "random_walk" - }, - { - "refId": "M", - "scenarioId": "random_walk" - }, - { - "refId": "N", - "scenarioId": "random_walk" - }, - { - "refId": "O", - "scenarioId": "random_walk" - }, - { - "refId": "P", - "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Usage", - "type": "bargauge" - }, - { - "gridPos": { - "h": 20, - "w": 2, - "x": 22, - "y": 0 - }, - "id": 11, - "links": [], - "options": { - "displayMode": "lcd", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "percent" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "red", - "index": 1, - "value": 80 - } - ], - "values": false - }, - "orientation": "vertical" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "A", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Progress", - "type": "bargauge" - }, - { - "gridPos": { - "h": 13, - "w": 10, - "x": 0, - "y": 7 - }, - "id": 6, - "links": [], - "options": { - "displayMode": "gradient", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "celsius" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "blue", - "index": 0, - "value": null - }, - { - "color": "green", - "index": 1, - "value": 20 - }, - { - "color": "orange", - "index": 2, - "value": 40 - }, - { - "color": "red", - "index": 3, - "value": 80 - } - ], - "values": false - }, - "orientation": "horizontal" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "alias": "Inside", - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "alias": "Outhouse", - "refId": "A", - "scenarioId": "random_walk" - }, - { - "alias": "Area B", - "refId": "B", - "scenarioId": "random_walk" - }, - { - "alias": "Basement", - "refId": "C", - "scenarioId": "random_walk" - }, - { - "alias": "Garage", - "refId": "D", - "scenarioId": "random_walk" - }, - { - "alias": "Attic", - "refId": "E", - "scenarioId": "random_walk" - }, - { - "refId": "F", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Temperature", - "type": "bargauge" - }, - { - "gridPos": { - "h": 13, - "w": 12, - "x": 10, - "y": 7 - }, - "id": 8, - "links": [], - "options": { - "displayMode": "basic", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "watt" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "purple", - "index": 1, - "value": 50 - }, - { - "color": "blue", - "index": 2, - "value": 70 - } - ], - "values": false - }, - "orientation": "horizontal" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "refId": "A", - "scenarioId": "random_walk" - }, - { - "refId": "B", - "scenarioId": "random_walk" - }, - { - "refId": "C", - "scenarioId": "random_walk" - }, - { - "refId": "D", - "scenarioId": "random_walk" - }, - { - "refId": "I", - "scenarioId": "random_walk" - }, - { - "refId": "J", - "scenarioId": "random_walk" - }, - { - "refId": "K", - "scenarioId": "random_walk" - }, - { - "refId": "L", - "scenarioId": "random_walk" - }, - { - "refId": "M", - "scenarioId": "random_walk" - }, - { - "refId": "N", - "scenarioId": "random_walk" - }, - { - "refId": "O", - "scenarioId": "random_walk" - }, - { - "refId": "P", - "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Usage", - "type": "bargauge" - } - ], - "schemaVersion": 18, - "style": "dark", - "tags": ["gdev", "bargauge", "panel-demo"], - "templating": { - "list": [] - }, - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], - "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] - }, - "timezone": "", - "title": "Bar Gauge All Modes Demo", - "uid": "zt2f6NgZzaa", - "version": 1 -} diff --git a/devenv/dev-dashboards/panel-bargauge/panel_tests_bar_gauge.json b/devenv/dev-dashboards/panel-bargauge/panel_tests_bar_gauge.json new file mode 100644 index 000000000000..6230bb6c3055 --- /dev/null +++ b/devenv/dev-dashboards/panel-bargauge/panel_tests_bar_gauge.json @@ -0,0 +1,829 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "gridPos": { + "h": 7, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 6, + "links": [], + "options": { + "displayMode": "gradient", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "celsius" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 20 + }, + { + "color": "orange", + "index": 2, + "value": 40 + }, + { + "color": "red", + "index": 3, + "value": 80 + } + ], + "values": false + }, + "orientation": "horizontal" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "alias": "Inside", + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" + }, + { + "alias": "Outhouse", + "refId": "A", + "scenarioId": "random_walk" + }, + { + "refId": "F", + "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Title above bar", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 5, + "x": 6, + "y": 0 + }, + "id": 12, + "links": [], + "options": { + "displayMode": "gradient", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "celsius" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 20 + }, + { + "color": "orange", + "index": 2, + "value": 40 + }, + { + "color": "red", + "index": 3, + "value": 80 + } + ], + "values": false + }, + "orientation": "horizontal" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "alias": "Inside", + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" + }, + { + "alias": "Outhouse", + "refId": "A", + "scenarioId": "random_walk" + }, + { + "refId": "F", + "scenarioId": "random_walk" + }, + { + "refId": "B", + "scenarioId": "random_walk" + }, + { + "refId": "C", + "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Title to left of bar", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 7, + "x": 11, + "y": 0 + }, + "id": 13, + "links": [], + "options": { + "displayMode": "basic", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "celsius" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 20 + }, + { + "color": "orange", + "index": 2, + "value": 40 + }, + { + "color": "red", + "index": 3, + "value": 80 + } + ], + "values": false + }, + "orientation": "horizontal" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "alias": "Inside", + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" + }, + { + "alias": "Outhouse", + "refId": "A", + "scenarioId": "random_walk" + }, + { + "refId": "F", + "scenarioId": "random_walk" + }, + { + "refId": "B", + "scenarioId": "random_walk" + }, + { + "refId": "C", + "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Basic mode", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 14, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "celsius" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 20 + }, + { + "color": "orange", + "index": 2, + "value": 40 + }, + { + "color": "red", + "index": 3, + "value": 80 + } + ], + "values": false + }, + "orientation": "horizontal" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "alias": "Inside", + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" + }, + { + "alias": "Outhouse", + "refId": "A", + "scenarioId": "random_walk" + }, + { + "refId": "F", + "scenarioId": "random_walk" + }, + { + "refId": "B", + "scenarioId": "random_walk" + }, + { + "refId": "C", + "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "LED", + "type": "bargauge" + }, + { + "gridPos": { + "h": 9, + "w": 11, + "x": 0, + "y": 7 + }, + "id": 7, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "watt" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "green", + "index": 0, + "value": null + }, + { + "color": "orange", + "index": 1, + "value": 40 + }, + { + "color": "red", + "index": 2, + "value": 80 + } + ], + "values": false + }, + "orientation": "vertical" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "10003,33333" + }, + { + "refId": "F", + "scenarioId": "random_walk" + }, + { + "refId": "G", + "scenarioId": "random_walk" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" + }, + { + "refId": "I", + "scenarioId": "random_walk" + }, + { + "refId": "J", + "scenarioId": "random_walk" + }, + { + "refId": "K", + "scenarioId": "random_walk" + }, + { + "refId": "L", + "scenarioId": "random_walk" + }, + { + "refId": "M", + "scenarioId": "random_walk" + }, + { + "refId": "N", + "scenarioId": "random_walk" + }, + { + "refId": "O", + "scenarioId": "random_walk" + }, + { + "refId": "P", + "scenarioId": "random_walk" + }, + { + "refId": "Q", + "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "LED Vertical", + "type": "bargauge" + }, + { + "gridPos": { + "h": 9, + "w": 13, + "x": 11, + "y": 7 + }, + "id": 8, + "links": [], + "options": { + "displayMode": "basic", + "fieldOptions": { + "calcs": ["mean"], + "defaults": { + "decimals": null, + "max": 100, + "min": 0, + "unit": "watt" + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "green", + "index": 0, + "value": null + }, + { + "color": "purple", + "index": 1, + "value": 50 + }, + { + "color": "blue", + "index": 2, + "value": 70 + } + ], + "values": false + }, + "orientation": "vertical" + }, + "pluginVersion": "6.2.0-pre", + "targets": [ + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "100,100,100" + }, + { + "refId": "A", + "scenarioId": "random_walk" + }, + { + "refId": "B", + "scenarioId": "random_walk" + }, + { + "refId": "C", + "scenarioId": "random_walk" + }, + { + "refId": "D", + "scenarioId": "random_walk" + }, + { + "refId": "I", + "scenarioId": "random_walk" + }, + { + "refId": "J", + "scenarioId": "random_walk" + }, + { + "refId": "K", + "scenarioId": "random_walk" + }, + { + "refId": "L", + "scenarioId": "random_walk" + }, + { + "refId": "M", + "scenarioId": "random_walk" + }, + { + "refId": "N", + "scenarioId": "random_walk" + }, + { + "refId": "O", + "scenarioId": "random_walk" + }, + { + "refId": "P", + "scenarioId": "random_walk" + }, + { + "refId": "Q", + "scenarioId": "random_walk" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Basic vertical ", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 11, + "x": 0, + "y": 16 + }, + "id": 16, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["last"], + "defaults": { + "max": 100, + "min": 0 + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "green", + "index": 0, + "value": null + }, + { + "color": "blue", + "index": 1, + "value": 40 + }, + { + "color": "red", + "index": 2, + "value": 80 + } + ], + "values": false + }, + "orientation": "horizontal" + }, + "pluginVersion": "6.3.0-pre", + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,-100" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Negative value below min", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 3, + "x": 11, + "y": 16 + }, + "id": 17, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["last"], + "defaults": { + "max": 100, + "min": 0 + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "green", + "index": 0, + "value": null + }, + { + "color": "blue", + "index": 1, + "value": 40 + }, + { + "color": "red", + "index": 2, + "value": 80 + } + ], + "values": false + }, + "orientation": "vertical" + }, + "pluginVersion": "6.3.0-pre", + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,-100" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Negative value below min", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 3, + "x": 14, + "y": 16 + }, + "id": 18, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["last"], + "defaults": { + "max": 100, + "min": -10 + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "green", + "index": 0, + "value": null + }, + { + "color": "blue", + "index": 1, + "value": 40 + }, + { + "color": "red", + "index": 2, + "value": 80 + } + ], + "values": false + }, + "orientation": "vertical" + }, + "pluginVersion": "6.3.0-pre", + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,6" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Positive value above min", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 3, + "x": 17, + "y": 16 + }, + "id": 19, + "links": [], + "options": { + "displayMode": "lcd", + "fieldOptions": { + "calcs": ["last"], + "defaults": { + "max": 35, + "min": -20 + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 5 + }, + { + "color": "#EAB839", + "index": 2, + "value": 25 + }, + { + "color": "red", + "index": 3, + "value": 30 + } + ], + "values": false + }, + "orientation": "vertical" + }, + "pluginVersion": "6.3.0-pre", + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,6" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Negative min ", + "type": "bargauge" + }, + { + "gridPos": { + "h": 7, + "w": 4, + "x": 20, + "y": 16 + }, + "id": 20, + "links": [], + "options": { + "displayMode": "gradient", + "fieldOptions": { + "calcs": ["last"], + "defaults": { + "max": 35, + "min": -20 + }, + "mappings": [], + "override": {}, + "thresholds": [ + { + "color": "blue", + "index": 0, + "value": null + }, + { + "color": "green", + "index": 1, + "value": 5 + }, + { + "color": "#EAB839", + "index": 2, + "value": 25 + }, + { + "color": "red", + "index": 3, + "value": 30 + } + ], + "values": false + }, + "orientation": "vertical" + }, + "pluginVersion": "6.3.0-pre", + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "30,30" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Negative min", + "type": "bargauge" + } + ], + "schemaVersion": 18, + "style": "dark", + "tags": ["gdev", "panel-tests"], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], + "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] + }, + "timezone": "", + "title": "Panel Tests - Bar Gauge", + "uid": "O6f11TZWk", + "version": 12 +} diff --git a/devenv/dev-dashboards/panel-bargauge/retro_led_demo.json b/devenv/dev-dashboards/panel-bargauge/retro_led_demo.json deleted file mode 100644 index 3fe8272c4dfe..000000000000 --- a/devenv/dev-dashboards/panel-bargauge/retro_led_demo.json +++ /dev/null @@ -1,400 +0,0 @@ -{ - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": "-- Grafana --", - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "links": [], - "panels": [ - { - "gridPos": { - "h": 8, - "w": 22, - "x": 0, - "y": 0 - }, - "id": 7, - "links": [], - "options": { - "displayMode": "lcd", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "watt" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "orange", - "index": 1, - "value": 40 - }, - { - "color": "red", - "index": 2, - "value": 80 - } - ], - "values": false - }, - "orientation": "vertical" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "A", - "scenarioId": "random_walk" - }, - { - "refId": "B", - "scenarioId": "random_walk" - }, - { - "refId": "C", - "scenarioId": "random_walk" - }, - { - "refId": "D", - "scenarioId": "random_walk" - }, - { - "refId": "E", - "scenarioId": "csv_metric_values", - "stringInput": "10003,33333" - }, - { - "refId": "F", - "scenarioId": "random_walk" - }, - { - "refId": "G", - "scenarioId": "random_walk" - }, - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "refId": "I", - "scenarioId": "random_walk" - }, - { - "refId": "J", - "scenarioId": "random_walk" - }, - { - "refId": "K", - "scenarioId": "random_walk" - }, - { - "refId": "L", - "scenarioId": "random_walk" - }, - { - "refId": "M", - "scenarioId": "random_walk" - }, - { - "refId": "N", - "scenarioId": "random_walk" - }, - { - "refId": "O", - "scenarioId": "random_walk" - }, - { - "refId": "P", - "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Usage", - "type": "bargauge" - }, - { - "gridPos": { - "h": 21, - "w": 2, - "x": 22, - "y": 0 - }, - "id": 11, - "links": [], - "options": { - "displayMode": "lcd", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "percent" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "red", - "index": 1, - "value": 80 - } - ], - "values": false - }, - "orientation": "vertical" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "A", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Progress", - "type": "bargauge" - }, - { - "gridPos": { - "h": 13, - "w": 10, - "x": 0, - "y": 8 - }, - "id": 6, - "links": [], - "options": { - "displayMode": "lcd", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "celsius" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "orange", - "index": 1, - "value": 40 - }, - { - "color": "red", - "index": 2, - "value": 80 - } - ], - "values": false - }, - "orientation": "horizontal" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "alias": "Inside", - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "alias": "Outhouse", - "refId": "A", - "scenarioId": "random_walk" - }, - { - "alias": "Area B", - "refId": "B", - "scenarioId": "random_walk" - }, - { - "alias": "Basement", - "refId": "C", - "scenarioId": "random_walk" - }, - { - "alias": "Garage", - "refId": "D", - "scenarioId": "random_walk" - }, - { - "alias": "Attic", - "refId": "E", - "scenarioId": "random_walk" - }, - { - "refId": "F", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Temperature", - "type": "bargauge" - }, - { - "gridPos": { - "h": 13, - "w": 12, - "x": 10, - "y": 8 - }, - "id": 8, - "links": [], - "options": { - "displayMode": "lcd", - "fieldOptions": { - "calcs": ["mean"], - "defaults": { - "decimals": null, - "max": 100, - "min": 0, - "unit": "watt" - }, - "mappings": [], - "override": {}, - "thresholds": [ - { - "color": "green", - "index": 0, - "value": null - }, - { - "color": "orange", - "index": 1, - "value": 85 - }, - { - "color": "red", - "index": 2, - "value": 95 - } - ], - "values": false - }, - "orientation": "horizontal" - }, - "pluginVersion": "6.2.0-pre", - "targets": [ - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "100,100,100" - }, - { - "refId": "A", - "scenarioId": "random_walk" - }, - { - "refId": "B", - "scenarioId": "random_walk" - }, - { - "refId": "C", - "scenarioId": "random_walk" - }, - { - "refId": "D", - "scenarioId": "random_walk" - }, - { - "refId": "I", - "scenarioId": "random_walk" - }, - { - "refId": "J", - "scenarioId": "random_walk" - }, - { - "refId": "K", - "scenarioId": "random_walk" - }, - { - "refId": "L", - "scenarioId": "random_walk" - }, - { - "refId": "M", - "scenarioId": "random_walk" - }, - { - "refId": "N", - "scenarioId": "random_walk" - }, - { - "refId": "O", - "scenarioId": "random_walk" - }, - { - "refId": "P", - "scenarioId": "random_walk" - }, - { - "refId": "Q", - "scenarioId": "random_walk" - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Usage", - "type": "bargauge" - } - ], - "schemaVersion": 18, - "style": "dark", - "tags": ["gdev", "bargauge", "panel-demo"], - "templating": { - "list": [] - }, - "time": { - "from": "now-6h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], - "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] - }, - "timezone": "", - "title": "Bar Gauge LED Demo", - "uid": "0G3rbkqmkaa", - "version": 1 -} diff --git a/devenv/docker/blocks/elastic7/docker-compose.yaml b/devenv/docker/blocks/elastic7/docker-compose.yaml index 3ef922c890c3..45e2836f870e 100644 --- a/devenv/docker/blocks/elastic7/docker-compose.yaml +++ b/devenv/docker/blocks/elastic7/docker-compose.yaml @@ -21,3 +21,19 @@ - ./docker/blocks/elastic7/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro - /var/log:/var/log:ro - ../data/log:/var/log/grafana:ro + + metricbeat7: + image: docker.elastic.co/beats/metricbeat-oss:7.0.0 + network_mode: host + command: metricbeat -e -strict.perms=false + user: root + volumes: + - ./docker/blocks/elastic7/metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + + kibana7: + image: docker.elastic.co/kibana/kibana-oss:7.0.0 + ports: + - "5601:5601" + environment: + ELASTICSEARCH_HOSTS: http://elasticsearch7:9200 diff --git a/devenv/docker/blocks/elastic7/metricbeat.yml b/devenv/docker/blocks/elastic7/metricbeat.yml new file mode 100644 index 000000000000..4788c0cdd9a5 --- /dev/null +++ b/devenv/docker/blocks/elastic7/metricbeat.yml @@ -0,0 +1,38 @@ +metricbeat.config: + modules: + path: ${path.config}/modules.d/*.yml + # Reload module configs as they change: + reload.enabled: false + +metricbeat.autodiscover: + providers: + - type: docker + hints.enabled: true + +metricbeat.modules: +- module: docker + metricsets: + - "container" + - "cpu" + - "diskio" + - "healthcheck" + - "info" + #- "image" + - "memory" + - "network" + hosts: ["unix:///var/run/docker.sock"] + period: 10s + enabled: true + +processors: + - add_cloud_metadata: ~ + +output.elasticsearch: + hosts: ["localhost:12200"] + index: "metricbeat-%{+yyyy.MM.dd}" + +setup.template.name: "metricbeat" +setup.template.pattern: "metricbeat-*" +setup.template.settings: + index.number_of_shards: 1 + index.number_of_replicas: 1 \ No newline at end of file diff --git a/devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile b/devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile similarity index 74% rename from devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile rename to devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile index 979d01c7dad4..29e581d2b137 100644 --- a/devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile +++ b/devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile @@ -19,11 +19,11 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] -COPY modules/ /etc/ldap.dist/modules -COPY prepopulate/ /etc/ldap.dist/prepopulate +COPY admins-ldap-server/modules/ /etc/ldap.dist/modules +COPY admins-ldap-server/prepopulate/ /etc/ldap.dist/prepopulate -COPY ../entrypoint.sh /entrypoint.sh -COPY ../prepopulate.sh /prepopulate.sh +COPY ./entrypoint.sh /entrypoint.sh +COPY ./prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/devenv/docker/blocks/multiple-openldap/docker-compose.yaml b/devenv/docker/blocks/multiple-openldap/docker-compose.yaml index 74f5d29a90ff..7ed0ca2e840a 100644 --- a/devenv/docker/blocks/multiple-openldap/docker-compose.yaml +++ b/devenv/docker/blocks/multiple-openldap/docker-compose.yaml @@ -1,5 +1,7 @@ admins-openldap: - build: docker/blocks/multiple-openldap/admins-ldap-server + build: + context: docker/blocks/multiple-openldap + dockerfile: ./admins-ldap-server.Dockerfile environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org @@ -8,7 +10,9 @@ - "389:389" openldap: - build: docker/blocks/multiple-openldap/ldap-server + build: + context: docker/blocks/multiple-openldap + dockerfile: ./ldap-server.Dockerfile environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org diff --git a/devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile b/devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile similarity index 75% rename from devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile rename to devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile index 979d01c7dad4..7604d1118a3f 100644 --- a/devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile +++ b/devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile @@ -19,11 +19,11 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] -COPY modules/ /etc/ldap.dist/modules -COPY prepopulate/ /etc/ldap.dist/prepopulate +COPY ldap-server/modules/ /etc/ldap.dist/modules +COPY ldap-server/prepopulate/ /etc/ldap.dist/prepopulate -COPY ../entrypoint.sh /entrypoint.sh -COPY ../prepopulate.sh /prepopulate.sh +COPY ./entrypoint.sh /entrypoint.sh +COPY ./prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/devenv/docker/loadtest/README.md b/devenv/docker/loadtest/README.md index ca70a77dc74f..95f5c38dc0eb 100644 --- a/devenv/docker/loadtest/README.md +++ b/devenv/docker/loadtest/README.md @@ -6,6 +6,9 @@ Runs load tests and checks using [k6](https://k6.io/). Docker +To run the auth proxy test you'll need to setup nginx proxy from docker block and +enable auth proxy together with configuring Grafana for auth proxy. + ## Run Run load test for 15 minutes using 2 virtual users and targeting http://localhost:3000. @@ -32,6 +35,13 @@ Run load test for 10 virtual users: $ ./run.sh -v 10 ``` +Run auth proxy test: + +```bash +$ ./run.sh -c auth_proxy_test +``` + + Example output: ```bash diff --git a/devenv/docker/loadtest/auth_proxy_test.js b/devenv/docker/loadtest/auth_proxy_test.js new file mode 100644 index 000000000000..f506701e44e0 --- /dev/null +++ b/devenv/docker/loadtest/auth_proxy_test.js @@ -0,0 +1,56 @@ +import { sleep, check, group } from 'k6'; +import { createBasicAuthClient } from './modules/client.js'; + +export let options = { + noCookiesReset: true +}; + +let endpoint = __ENV.URL || 'http://localhost:10080/grafana'; +const client = createBasicAuthClient(endpoint, 'user1', 'grafana'); +client.withOrgId(1); + +export const setup = () => { + const adminClient = createBasicAuthClient(endpoint, 'admin', 'admin'); + let res = adminClient.datasources.getByName('gdev-prometheus'); + if (res.status !== 200) { + throw new Error('Expected 200 response status when creating datasource'); + } + + return { + datasourceId: res.json().id, + }; +} + +export default (data) => { + group("auth proxy test", () => { + group("batch proxy requests", () => { + const d = new Date(); + const batchCount = 300; + const requests = []; + const query = encodeURI('topk(5, max(scrape_duration_seconds) by (job))'); + const start = (d.getTime() / 1000) - 3600; + const end = (d.getTime() / 1000); + const step = 20; + + requests.push({ method: 'GET', url: '/api/annotations?dashboardId=8&from=1558670300607&to=1558691900607' }); + + for (let n = 0; n < batchCount; n++) { + requests.push({ + method: 'GET', + url: `/api/datasources/proxy/${data.datasourceId}/api/v1/query_range?query=${query}&start=${start}&end=${end}&step=${step}`, + }); + } + + let responses = client.batch(requests); + for (let n = 0; n < batchCount; n++) { + check(responses[n], { + 'response status is 200': (r) => r.status === 200, + }); + } + }); + }); + + sleep(5) +} + +export const teardown = (data) => {} diff --git a/devenv/docker/loadtest/run.sh b/devenv/docker/loadtest/run.sh index 9517edf5d743..0d5d9cc441c8 100755 --- a/devenv/docker/loadtest/run.sh +++ b/devenv/docker/loadtest/run.sh @@ -6,8 +6,9 @@ run() { duration='15m' url='http://localhost:3000' vus='2' + testcase='auth_token_test' - while getopts ":d:u:v:" o; do + while getopts ":d:u:v:c:" o; do case "${o}" in d) duration=${OPTARG} @@ -18,11 +19,14 @@ run() { v) vus=${OPTARG} ;; + c) + testcase=${OPTARG} + ;; esac done shift $((OPTIND-1)) - docker run -t --network=host -v $PWD:/src -e URL=$url --rm -i loadimpact/k6:master run --vus $vus --duration $duration src/auth_token_test.js + docker run -t --network=host -v $PWD:/src -e URL=$url --rm -i loadimpact/k6:master run --vus $vus --duration $duration src/$testcase.js } run "$@" diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index da95a311635b..d09fb0bbc51b 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -179,6 +179,24 @@ Secure json data is a map of settings that will be encrypted with [secret key](/ | accessKey | string | Cloudwatch | Access key for connecting to Cloudwatch | | secretKey | string | Cloudwatch | Secret key for connecting to Cloudwatch | +#### Custom HTTP headers for datasources +Datasources managed by Grafanas provisioning can be configured to add HTTP headers to all requests +going to that datasource. The header name is configured in the `jsonData` field and the header value should be +configured in `secureJsonData`. + +```yaml +apiVersion: 1 + +datasources: +- name: Graphite + jsonData: + httpHeaderName1: "HeaderName" + httpHeaderName2: "Authorization" + secureJsonData: + httpHeaderValue1: "HeaderValue" + httpHeaderValue2: "Bearer XXXXXXXXX" +``` + ### Dashboards It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into Grafana from the local filesystem. @@ -204,7 +222,7 @@ providers: # enable dashboard editing editable: true # how often Grafana will scan for changed dashboards - updateIntervalSeconds: 10 + updateIntervalSeconds: 10 options: # path to dashboard files on disk. Required path: /var/lib/grafana/dashboards @@ -274,7 +292,7 @@ notifiers: # or org_name: Main Org. is_default: true - send_reminders: true + send_reminder: true frequency: 1h disable_resolve_message: false # See `Supported Settings` section for settings supporter for each diff --git a/docs/sources/auth/ldap.md b/docs/sources/auth/ldap.md index d0ae9094d66a..a3a01e6b6c67 100644 --- a/docs/sources/auth/ldap.md +++ b/docs/sources/auth/ldap.md @@ -215,6 +215,67 @@ email = "email" # [[servers.group_mappings]] omitted for clarity ``` +### Multiple LDAP servers + +Grafana does support receiving information from multiple LDAP servers. + +**LDAP specific configuration file (ldap.toml):** +```bash +# --- First LDAP Server --- + +[[servers]] +host = "10.0.0.1" +port = 389 +use_ssl = false +start_tls = false +ssl_skip_verify = false +bind_dn = "cn=admin,dc=grafana,dc=org" +bind_password = 'grafana' +search_filter = "(cn=%s)" +search_base_dns = ["ou=users,dc=grafana,dc=org"] + +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" + +[[servers.group_mappings]] +group_dn = "cn=admins,ou=groups,dc=grafana,dc=org" +org_role = "Admin" +grafana_admin = true + +# --- Second LDAP Server --- + +[[servers]] +host = "10.0.0.2" +port = 389 +use_ssl = false +start_tls = false +ssl_skip_verify = false + +bind_dn = "cn=admin,dc=grafana,dc=org" +bind_password = 'grafana' +search_filter = "(cn=%s)" +search_base_dns = ["ou=users,dc=grafana,dc=org"] + +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" + +[[servers.group_mappings]] +group_dn = "cn=editors,ou=groups,dc=grafana,dc=org" +org_role = "Editor" + +[[servers.group_mappings]] +group_dn = "*" +org_role = "Viewer" +``` + ### Active Directory [Active Directory](https://technet.microsoft.com/en-us/library/hh831484(v=ws.11).aspx) is a directory service which is commonly used in Windows environments. @@ -247,6 +308,8 @@ email = "mail" # [[servers.group_mappings]] omitted for clarity ``` + + #### Port requirements In above example SSL is enabled and an encrypted port have been configured. If your Active Directory don't support SSL please change `enable_ssl = false` and `port = 389`. diff --git a/docs/sources/features/datasources/azuremonitor.md b/docs/sources/features/datasources/azuremonitor.md index d72f5273925f..114187499f17 100644 --- a/docs/sources/features/datasources/azuremonitor.md +++ b/docs/sources/features/datasources/azuremonitor.md @@ -31,34 +31,37 @@ The datasource can access metrics from four different services. You can configur - [Guide to setting up an Azure Active Directory Application for Azure Log Analytics.](https://dev.loganalytics.io/documentation/Authorization/AAD-Setup) - [Quickstart Guide for Application Insights.](https://dev.applicationinsights.io/quickstart/) -1. Accessed from the Grafana main menu, newly installed data sources can be added immediately within the Data Sources section. Next, click the "Add data source" button in the upper right. The data source will be available for selection in the Type select box. +1. Accessed from the Grafana main menu, newly installed data sources can be added immediately within the Data Sources section. Next, click the "Add data source" button in the upper right. The Azure Monitor data source will be available for selection in the Cloud section in the list of data sources. -2. Select Azure Monitor from the Type dropdown:
-![Data Source Type](https://raw.githubusercontent.com/grafana/azure-monitor-datasource/master/src/img/config_1_select_type.png) -3. In the name field, fill in a name for the data source. It can be anything. Some suggestions are Azure Monitor or App Insights. +2. In the name field, Grafana will automatically fill in a name for the data source - `Azure Monitor` or something like `Azure Monitor - 3`. If you are going to configure multiple data sources then change the name to something more informative. -4. If you are using Azure Monitor, then you need 4 pieces of information from the Azure portal (see link above for detailed instructions): - - **Tenant Id** (Azure Active Directory -> Properties -> Directory ID) - - **Subscription Id** (Subscriptions -> Choose subscription -> Overview -> Subscription ID) - - **Client Id** (Azure Active Directory -> App Registrations -> Choose your app -> Application ID) - - **Client Secret** ( Azure Active Directory -> App Registrations -> Choose your app -> Keys) +3. If you are using Azure Monitor, you need 4 pieces of information from the Azure portal (see link above for detailed instructions): -5. Paste these four items into the fields in the Azure Monitor API Details section:
-![Azure Monitor API Details](https://raw.githubusercontent.com/grafana/azure-monitor-datasource/master/src/img/config_2_azure_monitor_api_details.png) + - **Tenant Id** (Azure Active Directory -> Properties -> Directory ID) + - **Client Id** (Azure Active Directory -> App Registrations -> Choose your app -> Application ID) + - **Client Secret** ( Azure Active Directory -> App Registrations -> Choose your app -> Keys) + - **Default Subscription Id** (Subscriptions -> Choose subscription -> Overview -> Subscription ID) -6. If you are also using the Azure Log Analytics service, then you need to specify these two config values (or you can reuse the Client Id and Secret from the previous step). - - Client Id (Azure Active Directory -> App Registrations -> Choose your app -> Application ID) - - Client Secret ( Azure Active Directory -> App Registrations -> Choose your app -> Keys -> Create a key -> Use client secret) +4. Paste these four items into the fields in the Azure Monitor API Details section: + {{< docs-imagebox img="/img/docs/v62/config_1_azure_monitor_details.png" class="docs-image--no-shadow" caption="Azure Monitor Configuration Details" >}} -7. If you are are using Application Insights, then you need two pieces of information from the Azure Portal (see link above for detailed instructions): - - Application ID - - API Key + - The Subscription Id can be changed per query. Save the datasource and refresh the page to see the list of subscriptions available for the specified Client Id. -8. Paste these two items into the appropriate fields in the Application Insights API Details section:
-![Application Insights API Details](https://raw.githubusercontent.com/grafana/azure-monitor-datasource/master/src/img/config_3_app_insights_api_details.png) +5. If you are also using the Azure Log Analytics service, then you need to specify these two config values (or you can reuse the Client Id and Secret from the previous step). -9. Test that the configuration details are correct by clicking on the "Save & Test" button:
-![Azure Monitor API Details](https://raw.githubusercontent.com/grafana/azure-monitor-datasource/master/src/img/config_4_save_and_test.png) + - Client Id (Azure Active Directory -> App Registrations -> Choose your app -> Application ID) + - Client Secret (Azure Active Directory -> App Registrations -> Choose your app -> Keys -> Create a key -> Use client secret) + +6. If you are using Application Insights, then you need two pieces of information from the Azure Portal (see link above for detailed instructions): + + - Application ID + - API Key + +7. Paste these two items into the appropriate fields in the Application Insights API Details section: + {{< docs-imagebox img="/img/docs/v62/config_2_app_insights_api_details.png" class="docs-image--no-shadow" caption="Application Insights Configuration Details" >}} + +8. Test that the configuration details are correct by clicking on the "Save & Test" button: + {{< docs-imagebox img="/img/docs/v62/config_3_save_and_test.png" class="docs-image--no-shadow" caption="Save and Test" >}} Alternatively on step 4 if creating a new Azure Active Directory App, use the [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/?view=azure-cli-latest): @@ -68,17 +71,25 @@ az ad sp create-for-rbac -n "http://localhost:3000" ## Choose a Service -In the query editor for a panel, after choosing your Azure Monitor datasource, the first option is to choose a service. There are three options here: Azure Monitor, Application Insights and Azure Log Analytics. The query editor will change depending on which one you pick. Azure Monitor is the default. +In the query editor for a panel, after choosing your Azure Monitor datasource, the first option is to choose a service. There are three options here: + +- `Azure Monitor` +- `Application Insights` +- `Azure Log Analytics` + +The query editor will change depending on which one you pick. Azure Monitor is the default. ## Querying the Azure Monitor Service The Azure Monitor service provides metrics for all the Azure services that you have running. It helps you understand how your applications on Azure are performing and to proactively find issues affecting your applications. +If your Azure Monitor credentials give you access to multiple subscriptions then choose the appropriate subscription first. + Examples of metrics that you can get from the service are: -- Microsoft.Compute/virtualMachines - Percentage CPU -- Microsoft.Network/networkInterfaces - Bytes sent -- Microsoft.Storage/storageAccounts - Used Capacity +- `Microsoft.Compute/virtualMachines - Percentage CPU` +- `Microsoft.Network/networkInterfaces - Bytes sent` +- `Microsoft.Storage/storageAccounts - Used Capacity` {{< docs-imagebox img="/img/docs/v60/azuremonitor-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Monitor Query Editor" >}} @@ -112,12 +123,17 @@ Note that the Azure Monitor service does not support multiple values yet. If you The Azure Monitor Datasource Plugin provides the following queries you can specify in the `Query` field in the Variable edit view. They allow you to fill a variable's options list. -| Name | Description | -| -------------------------------------------------------- | -------------------------------------------------------------- | -| *ResourceGroups()* | Returns a list of resource groups. | -| *Namespaces(aResourceGroup)* | Returns a list of namespaces for the specified resource group. | -| *ResourceNames(aResourceGroup, aNamespace)* | Returns a list of resource names. | -| *MetricNames(aResourceGroup, aNamespace, aResourceName)* | Returns a list of metric names. | +| Name | Description | +| -------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | +| _Subscriptions()_ | Returns a list of subscriptions. | +| _ResourceGroups()_ | Returns a list of resource groups. | +| _ResourceGroups(12345678-aaaa-bbbb-cccc-123456789aaa)_ | Returns a list of resource groups for a specified subscription. | +| _Namespaces(aResourceGroup)_ | Returns a list of namespaces for the specified resource group. | +| _Namespaces(12345678-aaaa-bbbb-cccc-123456789aaa, aResourceGroup)_ | Returns a list of namespaces for the specified resource group and subscription. | +| _ResourceNames(aResourceGroup, aNamespace)_ | Returns a list of resource names. | +| _ResourceNames(12345678-aaaa-bbbb-cccc-123456789aaaaResourceGroup, aNamespace)_ | Returns a list of resource names for a specified subscription. | +| _MetricNames(aResourceGroup, aNamespace, aResourceName)_ | Returns a list of metric names. | +| _MetricNames(12345678-aaaa-bbbb-cccc-123456789aaaaResourceGroup, aNamespace, aResourceName)_ | Returns a list of metric names for a specified subscription. | Examples: @@ -185,8 +201,8 @@ types of template variables. | Name | Description | | ---------------------------------- | ---------------------------------------------------------- | -| *AppInsightsMetricNames()* | Returns a list of metric names. | -| *AppInsightsGroupBys(aMetricName)* | Returns a list of group bys for the specified metric name. | +| _AppInsightsMetricNames()_ | Returns a list of metric names. | +| _AppInsightsGroupBys(aMetricName)_ | Returns a list of group bys for the specified metric name. | Examples: @@ -222,6 +238,8 @@ AzureActivity | order by TimeGenerated desc ``` +If your credentials give you access to multiple subscriptions then choose the appropriate subscription first. + {{< docs-imagebox img="/img/docs/v60/azureloganalytics-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Log Analytics Query Editor" >}} ### Azure Log Analytics Macros @@ -229,25 +247,27 @@ AzureActivity To make writing queries easier there are several Grafana macros that can be used in the where clause of a query: - `$__timeFilter()` - Expands to - `TimeGenerated ≥ datetime(2018-06-05T18:09:58.907Z) and` - `TimeGenerated ≤ datetime(2018-06-05T20:09:58.907Z)` where the from and to datetimes are from the Grafana time picker. + `TimeGenerated ≥ datetime(2018-06-05T18:09:58.907Z) and` + `TimeGenerated ≤ datetime(2018-06-05T20:09:58.907Z)` where the from and to datetimes are from the Grafana time picker. - `$__timeFilter(datetimeColumn)` - Expands to - `datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and` - `datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z)` where the from and to datetimes are from the Grafana time picker. + `datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and` + `datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z)` where the from and to datetimes are from the Grafana time picker. + +- `$__timeFrom()` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T18:09:58.907Z)`. + +- `$__timeTo()` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T20:09:58.907Z)`. -- `$__escapeMulti($myVar)` - is to be used with multi-value template variables that contains illegal characters. If $myVar has the value `'\\grafana-vm\Network(eth0)\Total','\\hello!'`, it expands to: `@'\\grafana-vm\Network(eth0)\Total', @'\\hello!'`. If using single value variables there no need for this macro, simply escape the variable inline instead - `@'\$myVar'` +- `$__escapeMulti($myVar)` - is to be used with multi-value template variables that contain illegal characters. If `$myVar` has the following two values as a string `'\\grafana-vm\Network(eth0)\Total','\\hello!'`, then it expands to: `@'\\grafana-vm\Network(eth0)\Total', @'\\hello!'`. If using single value variables there is no need for this macro, simply escape the variable inline instead - `@'\$myVar'`. -- `$__contains(colName, $myVar)` - is to be used with multi-value template variables. If $myVar has the value `'value1','value2'`, it expands to: `colName in ('value1','value2')`. +- `$__contains(colName, $myVar)` - is to be used with multi-value template variables. If `$myVar` has the value `'value1','value2'`, it expands to: `colName in ('value1','value2')`. - If using the `All` option, then check the `Include All Option` checkbox and in the `Custom all value` field type in the following value: `all`. If $myVar has value `all` then the macro will instead expand to `1 == 1`. For template variables with a lot of options, this will increase the query performance by not building a large where..in clause. + If using the `All` option, then check the `Include All Option` checkbox and in the `Custom all value` field type in the following value: `all`. If `$myVar` has value `all` then the macro will instead expand to `1 == 1`. For template variables with a lot of options, this will increase the query performance by not building a large where..in clause. ### Azure Log Analytics Builtin Variables There are also some Grafana variables that can be used in Azure Log Analytics queries: -- `$__from` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T18:09:58.907Z)`. -- `$__to` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T20:09:58.907Z)`. - `$__interval` - Grafana calculates the minimum time grain that can be used to group by time in queries. More details on how it works [here]({{< relref "reference/templating.md#interval-variables" >}}). It returns a time grain like `5m` or `1h` that can be used in the bin function. E.g. `summarize count() by bin(TimeGenerated, $__interval)` ### Azure Log Analytics Alerting diff --git a/docs/sources/guides/basic_concepts.md b/docs/sources/guides/basic_concepts.md index d3f8dd0ba633..3c610e21ce23 100644 --- a/docs/sources/guides/basic_concepts.md +++ b/docs/sources/guides/basic_concepts.md @@ -66,7 +66,7 @@ There are a wide variety of styling and formatting options that each Panel expos Panels can be dragged and dropped and rearranged on the Dashboard. They can also be resized. -There are currently four Panel types: [Graph](/reference/graph/), [Singlestat](/reference/singlestat/), [Dashlist](/reference/dashlist/), [Table](/reference/table_panel/),and [Text](/reference/text/). +There are currently five Panel types: [Graph](/reference/graph/), [Singlestat](/reference/singlestat/), [Dashlist](/reference/dashlist/), [Table](/reference/table_panel/), and [Text](/reference/text/). Panels like the [Graph](/reference/graph/) panel allow you to graph as many metrics and series as you want. Other panels like [Singlestat](/reference/singlestat/) require a reduction of a single query into a single number. [Dashlist](/reference/dashlist/) and [Text](/reference/text/) are special panels that do not connect to any Data Source. diff --git a/docs/sources/guides/whats-new-in-v6-2.md b/docs/sources/guides/whats-new-in-v6-2.md index 00337c8ef2a2..8ee2e7a66c70 100644 --- a/docs/sources/guides/whats-new-in-v6-2.md +++ b/docs/sources/guides/whats-new-in-v6-2.md @@ -12,19 +12,21 @@ weight = -13 # What's New in Grafana v6.2 -> More content will be added to this guide before the stable release. - -Grafana v6.2 Beta is now [available for download](https://grafana.com/grafana/download/beta). - For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) If you use a password for your datasources please read the [upgrade notes](/installation/upgrading/#upgrading-to-v6-2). +Checkout the [demo dashboard](https://play.grafana.org/d/ZvPm55mWk/new-features-in-v6-2?orgId=1) of some the new features in v6.2. + ## Improved security -- Ensure encryption of datasource secrets -- Embedding Grafana not allowed per default -- Disable browser caching for full page requests +Datasources now store passwords and basic auth passwords in `secureJsonData` encrypted by default. Existing datasource with unencrypted passwords will keep working. +Read the [upgrade notes](/installation/upgrading/#upgrading-to-v6-2) on how to migrate existing datasources to use encrypted storage. + +To mitigate the risk of [Clickjacking](https://www.owasp.org/index.php/Clickjacking), embedding Grafana is no longer allowed per default. +Read the [upgrade notes](/installation/upgrading/#upgrading-to-v6-2) for further details of how this may affect you. + +To mitigate the risk of sensitive information being cached in browser after a user has logged out, browser caching is now disabled for full page requests. ## Provisioning @@ -79,10 +81,11 @@ This release contains a lot of small features and fixes: - Explore - Adds user time zone support, reconnect for failing datasources and a fix that prevents killing Prometheus instances when Histogram metrics are loaded. - Alerting - Adds support for configuring timeout durations and retries, see [configuration](/installation/configuration/#evaluation-timeout-seconds) for more information. +- Azure Monitor - Adds support for multiple subscriptions per datasource. - Elasticsearch - A small bug fix to properly display percentiles metrics in table panel. - InfluxDB - Support for POST HTTP verb. - CloudWatch - Important fix for default alias disappearing in v6.1. -- Search - Works in a scope of dashboard's folder by default when viewing dashboard +- Search - Works in a scope of dashboard's folder by default when viewing dashboard. Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list of new features, changes, and bug fixes. diff --git a/docs/sources/http_api/alerting_notification_channels.md b/docs/sources/http_api/alerting_notification_channels.md index b8db1595aaa1..aa6e7297ac2d 100644 --- a/docs/sources/http_api/alerting_notification_channels.md +++ b/docs/sources/http_api/alerting_notification_channels.md @@ -54,7 +54,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -93,7 +93,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -130,7 +130,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -158,7 +158,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "isDefault": false, "sendReminder": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -177,7 +177,7 @@ Content-Type: application/json "isDefault": false, "sendReminder": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -206,7 +206,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -226,7 +226,7 @@ Content-Type: application/json "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" @@ -256,7 +256,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -276,7 +276,7 @@ Content-Type: application/json "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" @@ -353,7 +353,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "type": "email", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` diff --git a/docs/sources/http_api/data_source.md b/docs/sources/http_api/data_source.md index 364b55b0cfcb..4ddeb5a520ed 100644 --- a/docs/sources/http_api/data_source.md +++ b/docs/sources/http_api/data_source.md @@ -51,7 +51,7 @@ Content-Type: application/json ] ``` -## Get a single data sources by Id +## Get a single data source by Id `GET /api/datasources/:datasourceId` @@ -149,7 +149,7 @@ Content-Type: application/json } ``` -## Create data source +## Create a data source `POST /api/datasources` diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index a865234ebeba..af0261032d4b 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -154,6 +154,15 @@ callback URL to be correct). > in front of Grafana that exposes it through a subpath. In that > case add the subpath to the end of this URL setting. +### serve_from_sub_path + +Serve Grafana from subpath specified in `root_url` setting. By +default it is set to `false` for compatibility reasons. + +By enabling this setting and using a subpath in `root_url` above, e.g. +`root_url = http://localhost:3000/grafana`, Grafana will be accessible on +`http://localhost:3000/grafana`. + ### static_root_path The path to the directory where the front end files (HTML, JS, and CSS diff --git a/docs/versions.json b/docs/versions.json index a0857a7f3afa..e4960ff98ebe 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,5 +1,6 @@ [ - { "version": "v6.1", "path": "/", "archived": false, "current": true }, + { "version": "v6.2", "path": "/", "archived": false, "current": true }, + { "version": "v6.1", "path": "/v6.1", "archived": true }, { "version": "v6.0", "path": "/v6.0", "archived": true }, { "version": "v5.4", "path": "/v5.4", "archived": true }, { "version": "v5.3", "path": "/v5.3", "archived": true }, diff --git a/go.mod b/go.mod index 106d8b726617..1730235d6173 100644 --- a/go.mod +++ b/go.mod @@ -11,7 +11,6 @@ require ( github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd // indirect github.com/codegangsta/cli v1.20.0 - github.com/davecgh/go-spew v1.1.1 github.com/denisenkom/go-mssqldb v0.0.0-20190315220205-a8ed825ac853 github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 // indirect github.com/facebookgo/inject v0.0.0-20180706035515-f23751cae28b diff --git a/latest.json b/latest.json index ff5fac8e6d55..97724801882c 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "6.1.6", - "testing": "6.2.0-beta1" + "stable": "6.2.1", + "testing": "6.2.1" } diff --git a/package.json b/package.json index c7cc8a83644e..b4e4ee4f63ee 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "@types/react-window": "1.7.0", "angular-mocks": "1.6.6", "autoprefixer": "9.5.0", - "axios": "0.18.0", + "axios": "0.19.0", "babel-core": "7.0.0-bridge.0", "babel-jest": "24.8.0", "babel-loader": "8.0.5", @@ -131,7 +131,7 @@ "tslint-react": "3.6.0", "typescript": "3.4.1", "webpack": "4.29.6", - "webpack-bundle-analyzer": "3.1.0", + "webpack-bundle-analyzer": "3.3.2", "webpack-cleanup-plugin": "0.5.1", "webpack-cli": "3.2.3", "webpack-dev-server": "3.2.1", @@ -140,9 +140,9 @@ }, "scripts": { "dev": "webpack --progress --colors --mode development --config scripts/webpack/webpack.dev.js", - "start": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --watchTheme", - "start:hot": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot --watchTheme", - "start:ignoreTheme": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot", + "start": "npm run cli -- core:start --watchTheme", + "start:hot": "npm run cli -- core:start --hot --watchTheme", + "start:ignoreTheme": "npm run cli -- core:start --hot", "watch": "yarn start -d watch,start core:start --watchTheme ", "build": "grunt build", "test": "grunt test", @@ -155,13 +155,14 @@ "storybook:build": "cd packages/grafana-ui && yarn storybook:build", "themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts", "prettier:check": "prettier --list-different \"**/*.{ts,tsx,scss}\"", + "prettier:write": "prettier --list-different \"**/*.{ts,tsx,scss}\" --write", + "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts", "gui:tslint": "tslint -c ./packages/grafana-ui/tslint.json --project ./packages/grafana-ui/tsconfig.json", - "gui:build": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:build", - "gui:releasePrepare": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release", + "gui:build": "npm run cli -- gui:build", + "gui:releasePrepare": "npm run cli -- gui:release", "gui:publish": "cd packages/grafana-ui/dist && npm publish --access public", - "gui:release": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release -p --createVersionCommit", - "precommit": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts precommit", - "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts" + "gui:release": "npm run cli -- gui:release -p --createVersionCommit", + "precommit": "npm run cli -- precommit" }, "husky": { "hooks": { @@ -205,7 +206,7 @@ "fast-text-encoding": "^1.0.0", "file-saver": "1.3.8", "immutable": "3.8.2", - "jquery": "3.4.0", + "jquery": "3.4.1", "lodash": "4.17.11", "moment": "2.24.0", "mousetrap": "1.6.3", @@ -224,10 +225,12 @@ "react-sizeme": "2.5.2", "react-table": "6.9.2", "react-transition-group": "2.6.1", + "react-use": "9.0.0", "react-virtualized": "9.21.0", "react-window": "1.7.1", "redux": "4.0.1", "redux-logger": "3.0.6", + "redux-observable": "1.1.0", "redux-thunk": "2.3.0", "remarkable": "1.7.1", "reselect": "4.0.0", diff --git a/packages/grafana-runtime/README.md b/packages/grafana-runtime/README.md new file mode 100644 index 000000000000..f01cd35537c7 --- /dev/null +++ b/packages/grafana-runtime/README.md @@ -0,0 +1,3 @@ +# Grafana Runtime library + +Interfaces that let you use the runtime... \ No newline at end of file diff --git a/packages/grafana-runtime/index.js b/packages/grafana-runtime/index.js new file mode 100644 index 000000000000..d1a4363350e9 --- /dev/null +++ b/packages/grafana-runtime/index.js @@ -0,0 +1,7 @@ +'use strict' + +if (process.env.NODE_ENV === 'production') { + module.exports = require('./index.production.js'); +} else { + module.exports = require('./index.development.js'); +} diff --git a/packages/grafana-runtime/package.json b/packages/grafana-runtime/package.json new file mode 100644 index 000000000000..ed390d63b6af --- /dev/null +++ b/packages/grafana-runtime/package.json @@ -0,0 +1,37 @@ +{ + "name": "@grafana/runtime", + "version": "6.0.1-alpha.0", + "description": "Grafana Runtime Library", + "keywords": [ + "typescript", + "react", + "react-component" + ], + "main": "src/index.ts", + "scripts": { + "tslint": "tslint -c tslint.json --project tsconfig.json", + "typecheck": "tsc --noEmit", + "clean": "rimraf ./dist ./compiled", + "build": "rollup -c rollup.config.ts" + }, + "author": "Grafana Labs", + "license": "Apache-2.0", + "dependencies": { + }, + "devDependencies": { + "awesome-typescript-loader": "^5.2.1", + "lodash": "^4.17.10", + "pretty-format": "^24.5.0", + "rollup": "1.6.0", + "rollup-plugin-commonjs": "9.2.1", + "rollup-plugin-node-resolve": "4.0.1", + "rollup-plugin-sourcemaps": "0.4.2", + "rollup-plugin-terser": "4.0.4", + "rollup-plugin-typescript2": "0.19.3", + "rollup-plugin-visualizer": "0.9.2", + "typescript": "3.4.1" + }, + "resolutions": { + "@types/lodash": "4.14.119" + } +} diff --git a/packages/grafana-runtime/rollup.config.ts b/packages/grafana-runtime/rollup.config.ts new file mode 100644 index 000000000000..a2d6da109d96 --- /dev/null +++ b/packages/grafana-runtime/rollup.config.ts @@ -0,0 +1,50 @@ +import resolve from 'rollup-plugin-node-resolve'; +import commonjs from 'rollup-plugin-commonjs'; +import sourceMaps from 'rollup-plugin-sourcemaps'; +import { terser } from 'rollup-plugin-terser'; + +const pkg = require('./package.json'); + +const libraryName = pkg.name; + +const buildCjsPackage = ({ env }) => { + return { + input: `compiled/index.js`, + output: [ + { + file: `dist/index.${env}.js`, + name: libraryName, + format: 'cjs', + sourcemap: true, + exports: 'named', + globals: {}, + }, + ], + external: ['lodash'], // Use Lodash from grafana + plugins: [ + commonjs({ + include: /node_modules/, + namedExports: { + '../../node_modules/lodash/lodash.js': [ + 'flatten', + 'find', + 'upperFirst', + 'debounce', + 'isNil', + 'isNumber', + 'flattenDeep', + 'map', + 'chunk', + 'sortBy', + 'uniqueId', + 'zip', + ], + }, + }), + resolve(), + sourceMaps(), + env === 'production' && terser(), + ], + }; +}; +export default [buildCjsPackage({ env: 'development' }), buildCjsPackage({ env: 'production' })]; diff --git a/packages/grafana-runtime/src/index.ts b/packages/grafana-runtime/src/index.ts new file mode 100644 index 000000000000..e371345e62d8 --- /dev/null +++ b/packages/grafana-runtime/src/index.ts @@ -0,0 +1 @@ +export * from './services'; diff --git a/packages/grafana-runtime/src/services/AngularLoader.ts b/packages/grafana-runtime/src/services/AngularLoader.ts new file mode 100644 index 000000000000..9565a6d41f43 --- /dev/null +++ b/packages/grafana-runtime/src/services/AngularLoader.ts @@ -0,0 +1,19 @@ +export interface AngularComponent { + destroy(): void; + digest(): void; + getScope(): any; +} + +export interface AngularLoader { + load(elem: any, scopeProps: any, template: string): AngularComponent; +} + +let instance: AngularLoader; + +export function setAngularLoader(v: AngularLoader) { + instance = v; +} + +export function getAngularLoader(): AngularLoader { + return instance; +} diff --git a/packages/grafana-runtime/src/services/backendSrv.ts b/packages/grafana-runtime/src/services/backendSrv.ts new file mode 100644 index 000000000000..a30296eca8cc --- /dev/null +++ b/packages/grafana-runtime/src/services/backendSrv.ts @@ -0,0 +1,42 @@ +/** + * Currently implemented with: + * https://docs.angularjs.org/api/ng/service/$http#usage + * but that will likely change in the future + */ +export type BackendSrvRequest = { + url: string; + retry?: number; + headers?: any; + method?: string; + + // Show a message with the result + showSuccessAlert?: boolean; + + [key: string]: any; +}; + +export interface BackendSrv { + get(url: string, params?: any): Promise; + + delete(url: string): Promise; + + post(url: string, data: any): Promise; + + patch(url: string, data: any): Promise; + + put(url: string, data: any): Promise; + + // If there is an error, set: err.isHandled = true + // otherwise the backend will show a message for you + request(options: BackendSrvRequest): Promise; +} + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/packages/grafana-runtime/src/services/dataSourceSrv.ts b/packages/grafana-runtime/src/services/dataSourceSrv.ts new file mode 100644 index 000000000000..1f3bbbb8436b --- /dev/null +++ b/packages/grafana-runtime/src/services/dataSourceSrv.ts @@ -0,0 +1,15 @@ +import { ScopedVars, DataSourceApi } from '@grafana/ui'; + +export interface DataSourceSrv { + get(name?: string, scopedVars?: ScopedVars): Promise; +} + +let singletonInstance: DataSourceSrv; + +export function setDataSourceSrv(instance: DataSourceSrv) { + singletonInstance = instance; +} + +export function getDataSourceSrv(): DataSourceSrv { + return singletonInstance; +} diff --git a/packages/grafana-runtime/src/services/index.ts b/packages/grafana-runtime/src/services/index.ts new file mode 100644 index 000000000000..08517c0650b5 --- /dev/null +++ b/packages/grafana-runtime/src/services/index.ts @@ -0,0 +1,3 @@ +export * from './backendSrv'; +export * from './AngularLoader'; +export * from './dataSourceSrv'; diff --git a/packages/grafana-runtime/tsconfig.build.json b/packages/grafana-runtime/tsconfig.build.json new file mode 100644 index 000000000000..34e37b5d0b84 --- /dev/null +++ b/packages/grafana-runtime/tsconfig.build.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "node_modules", "**/*.test.ts", "**/*.test.tsx"] +} diff --git a/packages/grafana-runtime/tsconfig.json b/packages/grafana-runtime/tsconfig.json new file mode 100644 index 000000000000..dcc4fd974360 --- /dev/null +++ b/packages/grafana-runtime/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "../../tsconfig.json", + "include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"], + "exclude": ["dist", "node_modules"], + "compilerOptions": { + "rootDirs": ["."], + "module": "esnext", + "outDir": "compiled", + "declaration": true, + "declarationDir": "dist", + "strict": true, + "alwaysStrict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "typeRoots": ["./node_modules/@types", "types"], + "skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors, + "removeComments": false + } +} diff --git a/packages/grafana-runtime/tslint.json b/packages/grafana-runtime/tslint.json new file mode 100644 index 000000000000..f51293736244 --- /dev/null +++ b/packages/grafana-runtime/tslint.json @@ -0,0 +1,6 @@ +{ + "extends": "../../tslint.json", + "rules": { + "import-blacklist": [true, ["^@grafana/runtime.*"]] + } +} diff --git a/packages/grafana-ui/package.json b/packages/grafana-ui/package.json index a62de9ca003e..1e672eb46ebf 100644 --- a/packages/grafana-ui/package.json +++ b/packages/grafana-ui/package.json @@ -1,6 +1,6 @@ { "name": "@grafana/ui", - "version": "6.2.0-alpha.0", + "version": "6.3.0-alpha.0", "description": "Grafana Components Library", "keywords": [ "typescript", @@ -23,7 +23,7 @@ "@types/react-color": "2.17.0", "classnames": "2.2.6", "d3": "5.9.1", - "jquery": "3.4.0", + "jquery": "3.4.1", "lodash": "4.17.11", "moment": "2.24.0", "papaparse": "4.6.3", diff --git a/packages/grafana-ui/src/components/BarGauge/BarGauge.test.tsx b/packages/grafana-ui/src/components/BarGauge/BarGauge.test.tsx index 620222315fa8..0dea5e2c9b52 100644 --- a/packages/grafana-ui/src/components/BarGauge/BarGauge.test.tsx +++ b/packages/grafana-ui/src/components/BarGauge/BarGauge.test.tsx @@ -1,6 +1,14 @@ import React from 'react'; import { shallow } from 'enzyme'; -import { BarGauge, Props, getValueColor, getBasicAndGradientStyles, getBarGradient, getTitleStyles } from './BarGauge'; +import { + BarGauge, + Props, + getValueColor, + getBasicAndGradientStyles, + getBarGradient, + getTitleStyles, + getValuePercent, +} from './BarGauge'; import { VizOrientation, DisplayValue } from '../../types'; import { getTheme } from '../../themes'; @@ -63,6 +71,24 @@ describe('BarGauge', () => { }); }); + describe('Get value percent', () => { + it('0 to 100 and value 40', () => { + expect(getValuePercent(40, 0, 100)).toEqual(0.4); + }); + + it('50 to 100 and value 75', () => { + expect(getValuePercent(75, 50, 100)).toEqual(0.5); + }); + + it('-30 to 30 and value 0', () => { + expect(getValuePercent(0, -30, 30)).toEqual(0.5); + }); + + it('-30 to 30 and value 30', () => { + expect(getValuePercent(30, -30, 30)).toEqual(1); + }); + }); + describe('Vertical bar without title', () => { it('should not include title height in height', () => { const props = getProps({ diff --git a/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx b/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx index 9c5bf6563820..cb08d1b15aba 100644 --- a/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx +++ b/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx @@ -11,8 +11,9 @@ import { DisplayValue, Themeable, TimeSeriesValue, Threshold, VizOrientation } f const MIN_VALUE_HEIGHT = 18; const MAX_VALUE_HEIGHT = 50; const MIN_VALUE_WIDTH = 50; -const MAX_VALUE_WIDTH = 100; -const LINE_HEIGHT = 1.5; +const MAX_VALUE_WIDTH = 150; +const TITLE_LINE_HEIGHT = 1.5; +const VALUE_LINE_HEIGHT = 1; export interface Props extends Themeable { height: number; @@ -161,7 +162,7 @@ export class BarGauge extends PureComponent { const cells: JSX.Element[] = []; for (let i = 0; i < cellCount; i++) { - const currentValue = (valueRange / cellCount) * i; + const currentValue = minValue + (valueRange / cellCount) * i; const cellColor = this.getCellColor(currentValue); const cellStyles: CSSProperties = { borderRadius: '2px', @@ -227,7 +228,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { return { fontSize: 14, width: width, - height: 14 * LINE_HEIGHT, + height: 14 * TITLE_LINE_HEIGHT, placement: 'below', }; } @@ -238,7 +239,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { const titleHeight = Math.max(Math.min(height * maxTitleHeightRatio, MAX_VALUE_HEIGHT), 17); return { - fontSize: titleHeight / LINE_HEIGHT, + fontSize: titleHeight / TITLE_LINE_HEIGHT, width: 0, height: titleHeight, placement: 'above', @@ -251,7 +252,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { const titleHeight = Math.max(height * maxTitleHeightRatio, MIN_VALUE_HEIGHT); return { - fontSize: titleHeight / LINE_HEIGHT, + fontSize: titleHeight / TITLE_LINE_HEIGHT, height: 0, width: Math.min(Math.max(width * maxTitleWidthRatio, 50), 200), placement: 'left', @@ -345,11 +346,6 @@ function calculateBarAndValueDimensions(props: Props): BarAndValueDimensions { } } - // console.log('titleDim', titleDim); - // console.log('valueWidth', valueWidth); - // console.log('width', width); - // console.log('total', titleDim.width + maxBarWidth + valueWidth); - return { valueWidth, valueHeight, @@ -360,6 +356,10 @@ function calculateBarAndValueDimensions(props: Props): BarAndValueDimensions { }; } +export function getValuePercent(value: number, minValue: number, maxValue: number): number { + return Math.min((value - minValue) / (maxValue - minValue), 1); +} + /** * Only exported to for unit test */ @@ -367,7 +367,7 @@ export function getBasicAndGradientStyles(props: Props): BasicAndGradientStyles const { displayMode, maxValue, minValue, value } = props; const { valueWidth, valueHeight, maxBarHeight, maxBarWidth } = calculateBarAndValueDimensions(props); - const valuePercent = Math.min(value.numeric / (maxValue - minValue), 1); + const valuePercent = getValuePercent(value.numeric, minValue, maxValue); const valueColor = getValueColor(props); const valueStyles = getValueStyles(value.text, valueColor, valueWidth, valueHeight); const isBasic = displayMode === 'basic'; @@ -450,7 +450,7 @@ export function getBarGradient(props: Props, maxSize: number): string { for (let i = 0; i < thresholds.length; i++) { const threshold = thresholds[i]; const color = getColorFromHexRgbOrName(threshold.color); - const valuePercent = Math.min(threshold.value / (maxValue - minValue), 1); + const valuePercent = getValuePercent(threshold.value, minValue, maxValue); const pos = valuePercent * maxSize; const offset = Math.round(pos - (pos - lastpos) / 2); @@ -486,7 +486,7 @@ export function getValueColor(props: Props): string { * Only exported to for unit test */ function getValueStyles(value: string, color: string, width: number, height: number): CSSProperties { - const heightFont = height / LINE_HEIGHT; + const heightFont = height / VALUE_LINE_HEIGHT; const guess = width / (value.length * 1.1); const fontSize = Math.min(Math.max(guess, 14), heightFont); @@ -496,33 +496,15 @@ function getValueStyles(value: string, color: string, width: number, height: num width: `${width}px`, display: 'flex', alignItems: 'center', - fontSize: fontSize.toFixed(2) + 'px', + lineHeight: VALUE_LINE_HEIGHT, + fontSize: fontSize.toFixed(4) + 'px', }; } -// let canvasElement: HTMLCanvasElement | null = null; -// -// interface TextDimensions { -// width: number; -// height: number; -// } -// -// /** -// * Uses canvas.measureText to compute and return the width of the given text of given font in pixels. -// * -// * @param {String} text The text to be rendered. -// * @param {String} font The css font descriptor that text is to be rendered with (e.g. "bold 14px verdana"). -// * -// * @see https://stackoverflow.com/questions/118241/calculate-text-width-with-javascript/21015393#21015393 -// */ // function getTextWidth(text: string): number { -// // re-use canvas object for better performance -// canvasElement = canvasElement || document.createElement('canvas'); -// const context = canvasElement.getContext('2d'); -// if (context) { -// context.font = 'normal 16px Roboto'; -// const metrics = context.measureText(text); -// return metrics.width; -// } -// return 16; +// const canvas = getTextWidth.canvas || (getTextWidth.canvas = document.createElement("canvas")); +// var context = canvas.getContext("2d"); +// context.font = "'Roboto', 'Helvetica Neue', Arial, sans-serif"; +// var metrics = context.measureText(text); +// return metrics.width; // } diff --git a/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap b/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap index 4bb9395dd96e..1d341a9b0d4c 100644 --- a/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap +++ b/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap @@ -18,8 +18,9 @@ exports[`BarGauge Render with basic options should render 1`] = ` "alignItems": "center", "color": "#73BF69", "display": "flex", - "fontSize": "27.27px", + "fontSize": "27.2727px", "height": "300px", + "lineHeight": 1, "paddingLeft": "10px", "width": "60px", } diff --git a/packages/grafana-ui/src/components/Button/AbstractButton.tsx b/packages/grafana-ui/src/components/Button/AbstractButton.tsx index 38f225273adb..ee59272794e3 100644 --- a/packages/grafana-ui/src/components/Button/AbstractButton.tsx +++ b/packages/grafana-ui/src/components/Button/AbstractButton.tsx @@ -75,6 +75,12 @@ const getButtonStyles = (theme: GrafanaTheme, size: ButtonSize, variant: ButtonV iconDistance = theme.spacing.xs; height = theme.height.sm; break; + case ButtonSize.Medium: + padding = `${theme.spacing.sm} ${theme.spacing.md}`; + fontSize = theme.typography.size.md; + iconDistance = theme.spacing.sm; + height = theme.height.md; + break; case ButtonSize.Large: padding = `${theme.spacing.md} ${theme.spacing.lg}`; fontSize = theme.typography.size.lg; diff --git a/packages/grafana-ui/src/components/Gauge/Gauge.tsx b/packages/grafana-ui/src/components/Gauge/Gauge.tsx index eb49891d298e..0a0495c4848d 100644 --- a/packages/grafana-ui/src/components/Gauge/Gauge.tsx +++ b/packages/grafana-ui/src/components/Gauge/Gauge.tsx @@ -58,7 +58,7 @@ export class Gauge extends PureComponent { if (length > 12) { return FONT_SCALE - (length * 5) / 110; } - return FONT_SCALE - (length * 5) / 100; + return FONT_SCALE - (length * 5) / 101; } draw() { @@ -78,7 +78,8 @@ export class Gauge extends PureComponent { const gaugeWidthReduceRatio = showThresholdLabels ? 1.5 : 1; const gaugeWidth = Math.min(dimension / 5.5, 40) / gaugeWidthReduceRatio; const thresholdMarkersWidth = gaugeWidth / 5; - const fontSize = Math.min(dimension / 5.5, 100) * (value.text !== null ? this.getFontScale(value.text.length) : 1); + const fontSize = Math.min(dimension / 4, 100) * (value.text !== null ? this.getFontScale(value.text.length) : 1); + const thresholdLabelFontSize = fontSize / 2.5; const options: any = { diff --git a/packages/grafana-ui/src/components/RefreshPicker/RefreshPicker.tsx b/packages/grafana-ui/src/components/RefreshPicker/RefreshPicker.tsx index 2046a5a50cee..60a8973ca095 100644 --- a/packages/grafana-ui/src/components/RefreshPicker/RefreshPicker.tsx +++ b/packages/grafana-ui/src/components/RefreshPicker/RefreshPicker.tsx @@ -5,7 +5,9 @@ import { Tooltip } from '../Tooltip/Tooltip'; import { ButtonSelect } from '../Select/ButtonSelect'; export const offOption = { label: 'Off', value: '' }; +export const liveOption = { label: 'Live', value: 'LIVE' }; export const defaultIntervals = ['5s', '10s', '30s', '1m', '5m', '15m', '30m', '1h', '2h', '1d']; +export const isLive = (refreshInterval: string): boolean => refreshInterval === liveOption.value; export interface Props { intervals?: string[]; @@ -13,6 +15,7 @@ export interface Props { onIntervalChanged: (interval: string) => void; value?: string; tooltip: string; + hasLiveOption?: boolean; } export class RefreshPicker extends PureComponent { @@ -36,6 +39,9 @@ export class RefreshPicker extends PureComponent { intervalsToOptions = (intervals: string[] = defaultIntervals): Array> => { const options = intervals.map(interval => ({ label: interval, value: interval })); + if (this.props.hasLiveOption) { + options.unshift(liveOption); + } options.unshift(offOption); return options; }; @@ -57,6 +63,7 @@ export class RefreshPicker extends PureComponent { const cssClasses = classNames({ 'refresh-picker': true, 'refresh-picker--off': selectedValue.label === offOption.label, + 'refresh-picker--live': selectedValue === liveOption, }); return ( @@ -68,7 +75,7 @@ export class RefreshPicker extends PureComponent { extends PureComponent> { isSearchable={false} options={options} onChange={this.onChange} - defaultValue={value} + value={value} maxMenuHeight={maxMenuHeight} components={combinedComponents} className="gf-form-select-box-button-select" diff --git a/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx b/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx index b44a49f9603e..026aa5600a15 100644 --- a/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx +++ b/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx @@ -1,8 +1,10 @@ import { PureComponent } from 'react'; -import { interval, Subscription, empty, Subject } from 'rxjs'; +import { interval, Subscription, Subject, of, NEVER } from 'rxjs'; import { tap, switchMap } from 'rxjs/operators'; +import _ from 'lodash'; import { stringToMs } from '../../utils/string'; +import { isLive } from '../RefreshPicker/RefreshPicker'; interface Props { func: () => any; // TODO @@ -24,7 +26,10 @@ export class SetInterval extends PureComponent { this.subscription = this.propsSubject .pipe( switchMap(props => { - return props.loading ? empty() : interval(stringToMs(props.interval)); + if (isLive(props.interval)) { + return of({}); + } + return props.loading ? NEVER : interval(stringToMs(props.interval)); }), tap(() => this.props.func()) ) @@ -32,7 +37,11 @@ export class SetInterval extends PureComponent { this.propsSubject.next(this.props); } - componentDidUpdate() { + componentDidUpdate(prevProps: Props) { + if ((isLive(prevProps.interval) && isLive(this.props.interval)) || _.isEqual(prevProps, this.props)) { + return; + } + this.propsSubject.next(this.props); } diff --git a/packages/grafana-ui/src/components/SingleStatShared/FieldPropertiesEditor.tsx b/packages/grafana-ui/src/components/SingleStatShared/FieldPropertiesEditor.tsx index 0c7858aef69d..8205599e9368 100644 --- a/packages/grafana-ui/src/components/SingleStatShared/FieldPropertiesEditor.tsx +++ b/packages/grafana-ui/src/components/SingleStatShared/FieldPropertiesEditor.tsx @@ -1,5 +1,5 @@ // Libraries -import React, { PureComponent, ChangeEvent } from 'react'; +import React, { ChangeEvent, useState, useCallback } from 'react'; // Components import { FormField } from '../FormField/FormField'; @@ -8,7 +8,7 @@ import { UnitPicker } from '../UnitPicker/UnitPicker'; // Types import { Field } from '../../types/data'; -import { toNumberString, toIntegerOrUndefined } from '../../utils'; +import { toIntegerOrUndefined } from '../../utils'; import { SelectOptionItem } from '../Select/Select'; import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay'; @@ -21,92 +21,108 @@ export interface Props { onChange: (value: Partial, event?: React.SyntheticEvent) => void; } -export class FieldPropertiesEditor extends PureComponent { - onTitleChange = (event: ChangeEvent) => - this.props.onChange({ ...this.props.value, title: event.target.value }); +export const FieldPropertiesEditor: React.FC = ({ value, onChange, showMinMax }) => { + const { unit, title } = value; - // @ts-ignore - onUnitChange = (unit: SelectOptionItem) => this.props.onChange({ ...this.props.value, unit: unit.value }); + const [decimals, setDecimals] = useState( + value.decimals !== undefined && value.decimals !== null ? value.decimals.toString() : '' + ); + const [min, setMin] = useState(value.min !== undefined && value.min !== null ? value.min.toString() : ''); + const [max, setMax] = useState(value.max !== undefined && value.max !== null ? value.max.toString() : ''); - onDecimalChange = (event: ChangeEvent) => { - this.props.onChange({ - ...this.props.value, - decimals: toIntegerOrUndefined(event.target.value), - }); + const onTitleChange = (event: ChangeEvent) => { + onChange({ ...value, title: event.target.value }); }; - onMinChange = (event: ChangeEvent) => { - this.props.onChange({ - ...this.props.value, - min: toIntegerOrUndefined(event.target.value), - }); + const onDecimalChange = useCallback( + (event: ChangeEvent) => { + setDecimals(event.target.value); + }, + [value.decimals, onChange] + ); + + const onMinChange = useCallback( + (event: ChangeEvent) => { + setMin(event.target.value); + }, + [value.min, onChange] + ); + + const onMaxChange = useCallback( + (event: ChangeEvent) => { + setMax(event.target.value); + }, + [value.max, onChange] + ); + + const onUnitChange = (unit: SelectOptionItem) => { + onChange({ ...value, unit: unit.value }); }; - onMaxChange = (event: ChangeEvent) => { - this.props.onChange({ - ...this.props.value, - max: toIntegerOrUndefined(event.target.value), + const commitChanges = useCallback(() => { + onChange({ + ...value, + decimals: toIntegerOrUndefined(decimals), + min: toIntegerOrUndefined(min), + max: toIntegerOrUndefined(max), }); - }; + }, [min, max, decimals]); - render() { - const { showMinMax } = this.props; - const { unit, decimals, min, max } = this.props.value; + const titleTooltip = ( +
+ Template Variables: +
+ {'$' + VAR_SERIES_NAME} +
+ {'$' + VAR_FIELD_NAME} +
+ {'$' + VAR_CELL_PREFIX + '{N}'} / {'$' + VAR_CALC} +
+ ); + return ( + <> + - const titleTooltip = ( -
- Template Variables: -
- {'$' + VAR_SERIES_NAME} -
- {'$' + VAR_FIELD_NAME} -
- {'$' + VAR_CELL_PREFIX + '{N}'} / {'$' + VAR_CALC} +
+ Unit +
- ); - - return ( - <> - - -
- Unit - -
- {showMinMax && ( - <> - - - - )} - - - ); - } -} + {showMinMax && ( + <> + + + + )} + + + ); +}; diff --git a/packages/grafana-ui/src/components/Table/Table.test.tsx b/packages/grafana-ui/src/components/Table/Table.test.tsx new file mode 100644 index 000000000000..43947c9455f8 --- /dev/null +++ b/packages/grafana-ui/src/components/Table/Table.test.tsx @@ -0,0 +1,48 @@ +import React from 'react'; + +import { readCSV } from '../../utils/csv'; +import { Table, Props } from './Table'; +import { getTheme } from '../../themes/index'; +import { GrafanaThemeType } from '../../types/theme'; +import renderer from 'react-test-renderer'; + +const series = readCSV('a,b,c\n1,2,3\n4,5,6')[0]; +const setup = (propOverrides?: object) => { + const props: Props = { + data: series, + + minColumnWidth: 100, + showHeader: true, + fixedHeader: true, + fixedColumns: 0, + rotate: false, + styles: [], + replaceVariables: (value: string) => value, + width: 600, + height: 800, + + theme: getTheme(GrafanaThemeType.Dark), + }; // partial + + Object.assign(props, propOverrides); + + const tree = renderer.create(); + const instance = (tree.getInstance() as unknown) as Table; + + return { + tree, + instance, + }; +}; + +describe('Table', () => { + it('ignore invalid properties', () => { + const { tree, instance } = setup(); + expect(tree.toJSON() + '').toEqual( + setup({ + id: 3, // Don't pass invalid parameters to MultiGrid + }).tree.toJSON() + '' + ); + expect(instance.measurer.has(0, 0)).toBeTruthy(); + }); +}); diff --git a/packages/grafana-ui/src/components/Table/Table.tsx b/packages/grafana-ui/src/components/Table/Table.tsx index 72b7d6392b93..71e96b486fd4 100644 --- a/packages/grafana-ui/src/components/Table/Table.tsx +++ b/packages/grafana-ui/src/components/Table/Table.tsx @@ -282,14 +282,16 @@ export class Table extends Component { this.scrollToTop = false; } + // Force MultiGrid to rerender if these options change + // See: https://github.com/bvaughn/react-virtualized#pass-thru-props + const refreshKeys = { + ...this.state, // Includes data and sort parameters + d1: this.props.data, + s0: this.props.styles, + }; return ( { const { thresholds, colors } = this.style; diff --git a/packages/grafana-ui/src/components/Table/_Table.scss b/packages/grafana-ui/src/components/Table/_Table.scss index d9fb2dafe6c7..bb82d021b4b5 100644 --- a/packages/grafana-ui/src/components/Table/_Table.scss +++ b/packages/grafana-ui/src/components/Table/_Table.scss @@ -70,6 +70,7 @@ text-overflow: ellipsis; white-space: nowrap; + overflow: hidden; border-right: 2px solid $body-bg; border-bottom: 2px solid $body-bg; diff --git a/packages/grafana-ui/src/types/app.ts b/packages/grafana-ui/src/types/app.ts index 01dd6e1a40a9..79d6fce50664 100644 --- a/packages/grafana-ui/src/types/app.ts +++ b/packages/grafana-ui/src/types/app.ts @@ -1,12 +1,12 @@ import { ComponentClass } from 'react'; import { NavModel } from './navModel'; -import { PluginMeta, PluginIncludeType, GrafanaPlugin } from './plugin'; +import { PluginMeta, PluginIncludeType, GrafanaPlugin, KeyValue } from './plugin'; -export interface AppRootProps { - meta: AppPluginMeta; +export interface AppRootProps { + meta: AppPluginMeta; path: string; // The URL path to this page - query: { [s: string]: any }; // The URL query parameters + query: KeyValue; // The URL query parameters /** * Pass the nav model to the container... is there a better way? @@ -14,23 +14,30 @@ export interface AppRootProps { onNavChanged: (nav: NavModel) => void; } -export interface AppPluginMeta extends PluginMeta { +export interface AppPluginMeta extends PluginMeta { // TODO anything specific to apps? } -export class AppPlugin extends GrafanaPlugin { +export class AppPlugin extends GrafanaPlugin> { // Content under: /a/${plugin-id}/* - root?: ComponentClass; + root?: ComponentClass>; rootNav?: NavModel; // Initial navigation model // Old style pages angularPages?: { [component: string]: any }; + /** + * Called after the module has loaded, and before the app is used. + * This function may be called multiple times on the same instance. + * The first time, `this.meta` will be undefined + */ + init(meta: AppPluginMeta) {} + /** * Set the component displayed under: * /a/${plugin-id}/* */ - setRootPage(root: ComponentClass, rootNav?: NavModel) { + setRootPage(root: ComponentClass>, rootNav?: NavModel) { this.root = root; this.rootNav = rootNav; return this; diff --git a/packages/grafana-ui/src/types/datasource.ts b/packages/grafana-ui/src/types/datasource.ts index 2b40064d7d9d..a2629ec6f6de 100644 --- a/packages/grafana-ui/src/types/datasource.ts +++ b/packages/grafana-ui/src/types/datasource.ts @@ -3,6 +3,7 @@ import { TimeRange } from './time'; import { PluginMeta, GrafanaPlugin } from './plugin'; import { TableData, TimeSeries, SeriesData, LoadingState } from './data'; import { PanelData } from './panel'; +import { LogRowModel } from './logs'; // NOTE: this seems more general than just DataSource export interface DataSourcePluginOptionsEditorProps { @@ -74,9 +75,7 @@ export class DataSourcePlugin< export interface DataSourcePluginMeta extends PluginMeta { builtIn?: boolean; // Is this for all metrics?: boolean; - tables?: boolean; logs?: boolean; - explore?: boolean; annotations?: boolean; alerting?: boolean; mixed?: boolean; @@ -84,6 +83,14 @@ export interface DataSourcePluginMeta extends PluginMeta { category?: string; queryOptions?: PluginMetaQueryOptions; sort?: number; + streaming?: boolean; + + /** + * By default, hidden queries are not passed to the datasource + * Set this to true in plugin.json to have hidden queries passed to the + * DataSource query method + */ + hiddenQueries?: boolean; } interface PluginMetaQueryOptions { @@ -172,6 +179,14 @@ export abstract class DataSourceApi< */ getQueryDisplayText?(query: TQuery): string; + /** + * Retrieve context for a given log row + */ + getLogRowContext?: ( + row: LogRowModel, + options?: TContextQueryOptions + ) => Promise; + /** * Set after constructor call, as the data source instance is the most common thing to pass around * we attach the components to this instance for easy access diff --git a/packages/grafana-ui/src/types/logs.ts b/packages/grafana-ui/src/types/logs.ts index 63f264fff4f3..3d8cc0951b83 100644 --- a/packages/grafana-ui/src/types/logs.ts +++ b/packages/grafana-ui/src/types/logs.ts @@ -1,3 +1,5 @@ +import { Labels, TimeSeries } from './data'; + /** * Mapping of log level abbreviation to canonical log level. * Supported levels are reduce to limit color variation. @@ -19,3 +21,85 @@ export enum LogLevel { trace = 'trace', unknown = 'unknown', } + +export enum LogsMetaKind { + Number, + String, + LabelsMap, +} + +export interface LogsMetaItem { + label: string; + value: string | number | Labels; + kind: LogsMetaKind; +} + +export interface LogRowModel { + duplicates?: number; + entry: string; + hasAnsi: boolean; + labels: Labels; + logLevel: LogLevel; + raw: string; + searchWords?: string[]; + timestamp: string; // ISO with nanosec precision + timeFromNow: string; + timeEpochMs: number; + timeLocal: string; + uniqueLabels?: Labels; +} + +export interface LogsModel { + hasUniqueLabels: boolean; + meta?: LogsMetaItem[]; + rows: LogRowModel[]; + series?: TimeSeries[]; +} + +export interface LogSearchMatch { + start: number; + length: number; + text: string; +} + +export interface LogLabelStatsModel { + active?: boolean; + count: number; + proportion: number; + value: string; +} + +export enum LogsDedupStrategy { + none = 'none', + exact = 'exact', + numbers = 'numbers', + signature = 'signature', +} + +export interface LogsParser { + /** + * Value-agnostic matcher for a field label. + * Used to filter rows, and first capture group contains the value. + */ + buildMatcher: (label: string) => RegExp; + + /** + * Returns all parsable substrings from a line, used for highlighting + */ + getFields: (line: string) => string[]; + + /** + * Gets the label name from a parsable substring of a line + */ + getLabelFromField: (field: string) => string; + + /** + * Gets the label value from a parsable substring of a line + */ + getValueFromField: (field: string) => string; + /** + * Function to verify if this is a valid parser for the given line. + * The parser accepts the line unless it returns undefined. + */ + test: (line: string) => any; +} diff --git a/packages/grafana-ui/src/types/panel.ts b/packages/grafana-ui/src/types/panel.ts index a1756ff02fea..aedb5b30a877 100644 --- a/packages/grafana-ui/src/types/panel.ts +++ b/packages/grafana-ui/src/types/panel.ts @@ -31,6 +31,7 @@ export interface PanelProps { options: T; onOptionsChange: (options: T) => void; renderCounter: number; + transparent: boolean; width: number; height: number; replaceVariables: InterpolateFunction; diff --git a/packages/grafana-ui/src/types/plugin.ts b/packages/grafana-ui/src/types/plugin.ts index 72e9d2453fd5..a9885e00cf82 100644 --- a/packages/grafana-ui/src/types/plugin.ts +++ b/packages/grafana-ui/src/types/plugin.ts @@ -11,7 +11,9 @@ export enum PluginType { app = 'app', } -export interface PluginMeta { +export type KeyValue = { [s: string]: T }; + +export interface PluginMeta { id: string; name: string; type: PluginType; @@ -27,8 +29,8 @@ export interface PluginMeta { dependencies?: PluginDependencies; // Filled in by the backend - jsonData?: { [str: string]: any }; - secureJsonData?: { [str: string]: any }; + jsonData?: T; + secureJsonData?: KeyValue; enabled?: boolean; defaultNavUrl?: string; hasUpdate?: boolean; @@ -93,7 +95,7 @@ export interface PluginMetaInfo { export interface PluginConfigPageProps { plugin: T; - query: { [s: string]: any }; // The URL query parameters + query: KeyValue; // The URL query parameters } export interface PluginConfigPage { diff --git a/packages/grafana-ui/src/utils/logs.ts b/packages/grafana-ui/src/utils/logs.ts index fb8c7977e2ad..b5c45b635daf 100644 --- a/packages/grafana-ui/src/utils/logs.ts +++ b/packages/grafana-ui/src/utils/logs.ts @@ -23,6 +23,15 @@ export function getLogLevel(line: string): LogLevel { return LogLevel.unknown; } +export function getLogLevelFromKey(key: string): LogLevel { + const level = (LogLevel as any)[key]; + if (level) { + return level; + } + + return LogLevel.unknown; +} + export function addLogLevelToSeries(series: SeriesData, lineIndex: number): SeriesData { return { ...series, // Keeps Tags, RefID etc diff --git a/packages/grafana-ui/src/utils/moment_wrapper.ts b/packages/grafana-ui/src/utils/moment_wrapper.ts index 063c427372b6..755f92a899af 100644 --- a/packages/grafana-ui/src/utils/moment_wrapper.ts +++ b/packages/grafana-ui/src/utils/moment_wrapper.ts @@ -43,6 +43,9 @@ export interface DateTimeLocale { export interface DateTimeDuration { asHours: () => number; + hours: () => number; + minutes: () => number; + seconds: () => number; } export interface DateTime extends Object { diff --git a/packages/grafana-ui/src/utils/processSeriesData.test.ts b/packages/grafana-ui/src/utils/processSeriesData.test.ts index 96afa79aa8c0..ea582e89b3a8 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.test.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.test.ts @@ -6,7 +6,7 @@ import { guessFieldTypes, guessFieldTypeFromValue, } from './processSeriesData'; -import { FieldType, TimeSeries } from '../types/data'; +import { FieldType, TimeSeries, SeriesData, TableData } from '../types/data'; import { dateTime } from './moment_wrapper'; describe('toSeriesData', () => { @@ -99,4 +99,25 @@ describe('SerisData backwards compatibility', () => { expect(isTableData(roundtrip)).toBeTruthy(); expect(roundtrip).toMatchObject(table); }); + + it('converts SeriesData to TableData to series and back again', () => { + const series: SeriesData = { + refId: 'Z', + meta: { + somethign: 8, + }, + fields: [ + { name: 'T', type: FieldType.time }, // first + { name: 'N', type: FieldType.number, filterable: true }, + { name: 'S', type: FieldType.string, filterable: true }, + ], + rows: [[1, 100, '1'], [2, 200, '2'], [3, 300, '3']], + }; + const table = toLegacyResponseData(series) as TableData; + expect(table.meta).toBe(series.meta); + expect(table.refId).toBe(series.refId); + + const names = table.columns.map(c => c.text); + expect(names).toEqual(['T', 'N', 'S']); + }); }); diff --git a/packages/grafana-ui/src/utils/processSeriesData.ts b/packages/grafana-ui/src/utils/processSeriesData.ts index 1ba31e3bbdee..38e9abf91358 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.ts @@ -4,7 +4,7 @@ import isString from 'lodash/isString'; import isBoolean from 'lodash/isBoolean'; // Types -import { SeriesData, Field, TimeSeries, FieldType, TableData } from '../types/index'; +import { SeriesData, Field, TimeSeries, FieldType, TableData, Column } from '../types/index'; import { isDateTime } from './moment_wrapper'; function convertTableToSeriesData(table: TableData): SeriesData { @@ -160,6 +160,7 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData const type = guessFieldTypeFromSeries(series, 1); if (type === FieldType.time) { return { + alias: fields[0].name || series.name, target: fields[0].name || series.name, datapoints: rows, unit: fields[0].unit, @@ -171,14 +172,12 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData return { columns: fields.map(f => { - return { - text: f.name, - filterable: f.filterable, - unit: f.unit, - refId: series.refId, - meta: series.meta, - }; + const { name, ...column } = f; + (column as Column).text = name; + return column as Column; }), + refId: series.refId, + meta: series.meta, rows, }; }; diff --git a/packages/grafana-ui/tslint.json b/packages/grafana-ui/tslint.json index 937aa29800e5..1033e1962fc7 100644 --- a/packages/grafana-ui/tslint.json +++ b/packages/grafana-ui/tslint.json @@ -1,6 +1,6 @@ { "extends": "../../tslint.json", "rules": { - "import-blacklist": [true, "moment", ["^@grafana/ui.*"]] + "import-blacklist": [true, "moment", ["^@grafana/ui.*"], ["^@grafana/runtime.*"]] } } diff --git a/pkg/api/admin_ldap.go b/pkg/api/admin_ldap.go index 6e1b40bfa3e8..8aca63c5394f 100644 --- a/pkg/api/admin_ldap.go +++ b/pkg/api/admin_ldap.go @@ -4,7 +4,7 @@ import ( "github.com/grafana/grafana/pkg/services/ldap" ) -func (server *HTTPServer) ReloadLdapCfg() Response { +func (server *HTTPServer) ReloadLDAPCfg() Response { if !ldap.IsEnabled() { return Error(400, "LDAP is not enabled", nil) } @@ -13,5 +13,5 @@ func (server *HTTPServer) ReloadLdapCfg() Response { if err != nil { return Error(500, "Failed to reload ldap config.", err) } - return Success("Ldap config reloaded") + return Success("LDAP config reloaded") } diff --git a/pkg/api/admin_users.go b/pkg/api/admin_users.go index 76193771eb91..9fedbc4ddf52 100644 --- a/pkg/api/admin_users.go +++ b/pkg/api/admin_users.go @@ -4,12 +4,12 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/metrics" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" ) -func AdminCreateUser(c *m.ReqContext, form dtos.AdminCreateUserForm) { - cmd := m.CreateUserCommand{ +func AdminCreateUser(c *models.ReqContext, form dtos.AdminCreateUserForm) { + cmd := models.CreateUserCommand{ Login: form.Login, Email: form.Email, Password: form.Password, @@ -38,7 +38,7 @@ func AdminCreateUser(c *m.ReqContext, form dtos.AdminCreateUserForm) { user := cmd.Result - result := m.UserIdDTO{ + result := models.UserIdDTO{ Message: "User created", Id: user.Id, } @@ -46,7 +46,7 @@ func AdminCreateUser(c *m.ReqContext, form dtos.AdminCreateUserForm) { c.JSON(200, result) } -func AdminUpdateUserPassword(c *m.ReqContext, form dtos.AdminUpdateUserPasswordForm) { +func AdminUpdateUserPassword(c *models.ReqContext, form dtos.AdminUpdateUserPasswordForm) { userID := c.ParamsInt64(":id") if len(form.Password) < 4 { @@ -54,7 +54,7 @@ func AdminUpdateUserPassword(c *m.ReqContext, form dtos.AdminUpdateUserPasswordF return } - userQuery := m.GetUserByIdQuery{Id: userID} + userQuery := models.GetUserByIdQuery{Id: userID} if err := bus.Dispatch(&userQuery); err != nil { c.JsonApiErr(500, "Could not read user from database", err) @@ -63,7 +63,7 @@ func AdminUpdateUserPassword(c *m.ReqContext, form dtos.AdminUpdateUserPasswordF passwordHashed := util.EncodePassword(form.Password, userQuery.Result.Salt) - cmd := m.ChangeUserPasswordCommand{ + cmd := models.ChangeUserPasswordCommand{ UserId: userID, NewPassword: passwordHashed, } @@ -77,17 +77,17 @@ func AdminUpdateUserPassword(c *m.ReqContext, form dtos.AdminUpdateUserPasswordF } // PUT /api/admin/users/:id/permissions -func AdminUpdateUserPermissions(c *m.ReqContext, form dtos.AdminUpdateUserPermissionsForm) { +func AdminUpdateUserPermissions(c *models.ReqContext, form dtos.AdminUpdateUserPermissionsForm) { userID := c.ParamsInt64(":id") - cmd := m.UpdateUserPermissionsCommand{ + cmd := models.UpdateUserPermissionsCommand{ UserId: userID, IsGrafanaAdmin: form.IsGrafanaAdmin, } if err := bus.Dispatch(&cmd); err != nil { - if err == m.ErrLastGrafanaAdmin { - c.JsonApiErr(400, m.ErrLastGrafanaAdmin.Error(), nil) + if err == models.ErrLastGrafanaAdmin { + c.JsonApiErr(400, models.ErrLastGrafanaAdmin.Error(), nil) return } @@ -98,10 +98,10 @@ func AdminUpdateUserPermissions(c *m.ReqContext, form dtos.AdminUpdateUserPermis c.JsonOK("User permissions updated") } -func AdminDeleteUser(c *m.ReqContext) { +func AdminDeleteUser(c *models.ReqContext) { userID := c.ParamsInt64(":id") - cmd := m.DeleteUserCommand{UserId: userID} + cmd := models.DeleteUserCommand{UserId: userID} if err := bus.Dispatch(&cmd); err != nil { c.JsonApiErr(500, "Failed to delete user", err) @@ -111,8 +111,49 @@ func AdminDeleteUser(c *m.ReqContext) { c.JsonOK("User deleted") } +// POST /api/admin/users/:id/disable +func (server *HTTPServer) AdminDisableUser(c *models.ReqContext) Response { + userID := c.ParamsInt64(":id") + + // External users shouldn't be disabled from API + authInfoQuery := &models.GetAuthInfoQuery{UserId: userID} + if err := bus.Dispatch(authInfoQuery); err != models.ErrUserNotFound { + return Error(500, "Could not disable external user", nil) + } + + disableCmd := models.DisableUserCommand{UserId: userID, IsDisabled: true} + if err := bus.Dispatch(&disableCmd); err != nil { + return Error(500, "Failed to disable user", err) + } + + err := server.AuthTokenService.RevokeAllUserTokens(c.Req.Context(), userID) + if err != nil { + return Error(500, "Failed to disable user", err) + } + + return Success("User disabled") +} + +// POST /api/admin/users/:id/enable +func AdminEnableUser(c *models.ReqContext) Response { + userID := c.ParamsInt64(":id") + + // External users shouldn't be disabled from API + authInfoQuery := &models.GetAuthInfoQuery{UserId: userID} + if err := bus.Dispatch(authInfoQuery); err != models.ErrUserNotFound { + return Error(500, "Could not enable external user", nil) + } + + disableCmd := models.DisableUserCommand{UserId: userID, IsDisabled: false} + if err := bus.Dispatch(&disableCmd); err != nil { + return Error(500, "Failed to enable user", err) + } + + return Success("User enabled") +} + // POST /api/admin/users/:id/logout -func (server *HTTPServer) AdminLogoutUser(c *m.ReqContext) Response { +func (server *HTTPServer) AdminLogoutUser(c *models.ReqContext) Response { userID := c.ParamsInt64(":id") if c.UserId == userID { @@ -123,13 +164,13 @@ func (server *HTTPServer) AdminLogoutUser(c *m.ReqContext) Response { } // GET /api/admin/users/:id/auth-tokens -func (server *HTTPServer) AdminGetUserAuthTokens(c *m.ReqContext) Response { +func (server *HTTPServer) AdminGetUserAuthTokens(c *models.ReqContext) Response { userID := c.ParamsInt64(":id") return server.getUserAuthTokensInternal(c, userID) } // POST /api/admin/users/:id/revoke-auth-token -func (server *HTTPServer) AdminRevokeUserAuthToken(c *m.ReqContext, cmd m.RevokeAuthTokenCmd) Response { +func (server *HTTPServer) AdminRevokeUserAuthToken(c *models.ReqContext, cmd models.RevokeAuthTokenCmd) Response { userID := c.ParamsInt64(":id") return server.revokeUserAuthTokenInternal(c, userID, cmd) } diff --git a/pkg/api/admin_users_test.go b/pkg/api/admin_users_test.go index b94f09b0b753..4d858183545f 100644 --- a/pkg/api/admin_users_test.go +++ b/pkg/api/admin_users_test.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/auth" @@ -84,6 +85,36 @@ func TestAdminApiEndpoint(t *testing.T) { So(userId, ShouldEqual, 200) }) }) + + Convey("When a server admin attempts to disable/enable external user", t, func() { + userId := int64(0) + bus.AddHandler("test", func(cmd *m.GetAuthInfoQuery) error { + userId = cmd.UserId + return nil + }) + + adminDisableUserScenario("Should return Could not disable external user error", "disable", "/api/admin/users/42/disable", "/api/admin/users/:id/disable", func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 500) + + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + So(respJSON.Get("message").MustString(), ShouldEqual, "Could not disable external user") + + So(userId, ShouldEqual, 42) + }) + + adminDisableUserScenario("Should return Could not enable external user error", "enable", "/api/admin/users/42/enable", "/api/admin/users/:id/enable", func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 500) + + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + So(respJSON.Get("message").MustString(), ShouldEqual, "Could not enable external user") + + So(userId, ShouldEqual, 42) + }) + }) } func putAdminScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.AdminUpdateUserPermissionsForm, fn scenarioFunc) { @@ -186,3 +217,32 @@ func adminGetUserAuthTokensScenario(desc string, url string, routePattern string fn(sc) }) } + +func adminDisableUserScenario(desc string, action string, url string, routePattern string, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + fakeAuthTokenService := auth.NewFakeUserAuthTokenService() + + hs := HTTPServer{ + Bus: bus.GetBus(), + AuthTokenService: fakeAuthTokenService, + } + + sc := setupScenarioContext(url) + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + + if action == "enable" { + return AdminEnableUser(c) + } + + return hs.AdminDisableUser(c) + }) + + sc.m.Post(routePattern, sc.defaultHandler) + + fn(sc) + }) +} diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 0cd00d3b015f..e5e943260275 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -131,9 +131,9 @@ func AlertTest(c *m.ReqContext, dto dtos.AlertTestCommand) Response { } backendCmd := alerting.AlertTestCommand{ - OrgId: c.OrgId, + OrgID: c.OrgId, Dashboard: dto.Dashboard, - PanelId: dto.PanelId, + PanelID: dto.PanelId, User: c.SignedInUser, } diff --git a/pkg/api/api.go b/pkg/api/api.go index 9b5aae105cba..9f80f1cb4fba 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -381,6 +381,8 @@ func (hs *HTTPServer) registerRoutes() { adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword) adminRoute.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions) adminRoute.Delete("/users/:id", AdminDeleteUser) + adminRoute.Post("/users/:id/disable", Wrap(hs.AdminDisableUser)) + adminRoute.Post("/users/:id/enable", Wrap(AdminEnableUser)) adminRoute.Get("/users/:id/quotas", Wrap(GetUserQuotas)) adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), Wrap(UpdateUserQuota)) adminRoute.Get("/stats", AdminGetStats) @@ -393,7 +395,7 @@ func (hs *HTTPServer) registerRoutes() { adminRoute.Post("/provisioning/dashboards/reload", Wrap(hs.AdminProvisioningReloadDasboards)) adminRoute.Post("/provisioning/datasources/reload", Wrap(hs.AdminProvisioningReloadDatasources)) adminRoute.Post("/provisioning/notifications/reload", Wrap(hs.AdminProvisioningReloadNotifications)) - adminRoute.Post("/ldap/reload", Wrap(hs.ReloadLdapCfg)) + adminRoute.Post("/ldap/reload", Wrap(hs.ReloadLDAPCfg)) }, reqGrafanaAdmin) // rendering diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index ed153c02efe3..27f70d4d388a 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -316,6 +316,7 @@ func GetHomeDashboard(c *m.ReqContext) Response { if err != nil { return Error(500, "Failed to load home dashboard", err) } + defer file.Close() dash := dtos.DashboardFullWithMeta{} dash.Meta.IsHome = true diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index e881fc1539de..643a53d18fbd 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -176,7 +176,7 @@ func (hs *HTTPServer) getFrontendSettingsMap(c *m.ReqContext) (map[string]interf "appSubUrl": setting.AppSubUrl, "allowOrgCreate": (setting.AllowUserOrgCreate && c.IsSignedIn) || c.IsGrafanaAdmin, "authProxyEnabled": setting.AuthProxyEnabled, - "ldapEnabled": setting.LdapEnabled, + "ldapEnabled": setting.LDAPEnabled, "alertingEnabled": setting.AlertingEnabled, "alertingErrorOrTimeout": setting.AlertingErrorOrTimeout, "alertingNoDataOrNullValues": setting.AlertingNoDataOrNullValues, diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 7ec4fbaa3b3c..d2094b33cb1a 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -30,7 +30,7 @@ import ( "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" - "gopkg.in/macaron.v1" + macaron "gopkg.in/macaron.v1" ) func init() { @@ -227,6 +227,10 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { m.Use(middleware.AddDefaultResponseHeaders()) + if setting.ServeFromSubPath && setting.AppSubUrl != "" { + m.SetURLPrefix(setting.AppSubUrl) + } + m.Use(macaron.Renderer(macaron.RenderOptions{ Directory: path.Join(setting.StaticRootPath, "views"), IndentJSON: macaron.Env != macaron.PROD, diff --git a/pkg/api/login.go b/pkg/api/login.go index e7fe6db93477..fe4b8f5d1be2 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -105,6 +105,10 @@ func (hs *HTTPServer) LoginPost(c *m.ReqContext, cmd dtos.LoginCommand) Response return Error(401, "Invalid username or password", err) } + if err == login.ErrUserDisabled { + return Error(401, "User is disabled", err) + } + return Error(500, "Error while trying to authenticate user", err) } diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index 6e5ae0f8761b..5b988686cfba 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -2,6 +2,7 @@ package api import ( "context" + "sort" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" @@ -66,7 +67,14 @@ func (hs *HTTPServer) QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) R func GetTestDataScenarios(c *m.ReqContext) Response { result := make([]interface{}, 0) - for _, scenario := range testdata.ScenarioRegistry { + scenarioIds := make([]string, 0) + for id := range testdata.ScenarioRegistry { + scenarioIds = append(scenarioIds, id) + } + sort.Strings(scenarioIds) + + for _, scenarioId := range scenarioIds { + scenario := testdata.ScenarioRegistry[scenarioId] result = append(result, map[string]interface{}{ "id": scenario.Id, "name": scenario.Name, diff --git a/pkg/api/password.go b/pkg/api/password.go index 4776c6a30649..278408ee9afe 100644 --- a/pkg/api/password.go +++ b/pkg/api/password.go @@ -9,7 +9,7 @@ import ( ) func SendResetPasswordEmail(c *m.ReqContext, form dtos.SendResetPasswordEmailForm) Response { - if setting.LdapEnabled || setting.AuthProxyEnabled { + if setting.LDAPEnabled || setting.AuthProxyEnabled { return Error(401, "Not allowed to reset password when LDAP or Auth Proxy is enabled", nil) } if setting.DisableLoginForm { diff --git a/pkg/api/team_members.go b/pkg/api/team_members.go index 54a4d8220e5a..bc622f8662ae 100644 --- a/pkg/api/team_members.go +++ b/pkg/api/team_members.go @@ -21,7 +21,7 @@ func GetTeamMembers(c *m.ReqContext) Response { member.AvatarUrl = dtos.GetGravatarUrl(member.Email) member.Labels = []string{} - if setting.IsEnterprise && setting.LdapEnabled && member.External { + if setting.IsEnterprise && setting.LDAPEnabled && member.External { member.Labels = append(member.Labels, "LDAP") } } diff --git a/pkg/api/user.go b/pkg/api/user.go index 9eb7e75eab0e..cde5b7b92fdd 100644 --- a/pkg/api/user.go +++ b/pkg/api/user.go @@ -202,7 +202,7 @@ func (hs *HTTPServer) ChangeActiveOrgAndRedirectToHome(c *m.ReqContext) { } func ChangeUserPassword(c *m.ReqContext, cmd m.ChangeUserPasswordCommand) Response { - if setting.LdapEnabled || setting.AuthProxyEnabled { + if setting.LDAPEnabled || setting.AuthProxyEnabled { return Error(400, "Not allowed to change password when LDAP or Auth Proxy is enabled", nil) } diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index d5add2b71684..ebaee5573482 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -7,14 +7,16 @@ import ( "github.com/codegangsta/cli" "github.com/fatih/color" "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands/datamigrations" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" ) -func runDbCommand(command func(commandLine CommandLine) error) func(context *cli.Context) { +func runDbCommand(command func(commandLine utils.CommandLine, sqlStore *sqlstore.SqlStore) error) func(context *cli.Context) { return func(context *cli.Context) { - cmd := &contextCommandLine{context} + cmd := &utils.ContextCommandLine{Context: context} cfg := setting.NewCfg() cfg.Load(&setting.CommandLineArgs{ @@ -28,7 +30,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli engine.Bus = bus.GetBus() engine.Init() - if err := command(cmd); err != nil { + if err := command(cmd, engine); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) logger.Errorf("%s\n\n", err) @@ -40,10 +42,10 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli } } -func runPluginCommand(command func(commandLine CommandLine) error) func(context *cli.Context) { +func runPluginCommand(command func(commandLine utils.CommandLine) error) func(context *cli.Context) { return func(context *cli.Context) { - cmd := &contextCommandLine{context} + cmd := &utils.ContextCommandLine{Context: context} if err := command(cmd); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) logger.Errorf("%s %s\n\n", color.RedString("✗"), err) @@ -107,6 +109,17 @@ var adminCommands = []cli.Command{ }, }, }, + { + Name: "data-migration", + Usage: "Runs a script that migrates or cleanups data in your db", + Subcommands: []cli.Command{ + { + Name: "encrypt-datasource-passwords", + Usage: "Migrates passwords from unsecured fields to secure_json_data field. Return ok unless there is an error. Safe to execute multiple times.", + Action: runDbCommand(datamigrations.EncryptDatasourcePaswords), + }, + }, + }, } var Commands = []cli.Command{ diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go new file mode 100644 index 000000000000..e55fa2d70b88 --- /dev/null +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go @@ -0,0 +1,126 @@ +package datamigrations + +import ( + "context" + "encoding/json" + + "github.com/fatih/color" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + "github.com/grafana/grafana/pkg/util/errutil" +) + +var ( + datasourceTypes = []string{ + "mysql", + "influxdb", + "elasticsearch", + "graphite", + "prometheus", + "opentsdb", + } +) + +// EncryptDatasourcePaswords migrates un-encrypted secrets on datasources +// to the secureJson Column. +func EncryptDatasourcePaswords(c utils.CommandLine, sqlStore *sqlstore.SqlStore) error { + return sqlStore.WithDbSession(context.Background(), func(session *sqlstore.DBSession) error { + passwordsUpdated, err := migrateColumn(session, "password") + if err != nil { + return err + } + + basicAuthUpdated, err := migrateColumn(session, "basic_auth_password") + if err != nil { + return err + } + + logger.Info("\n") + if passwordsUpdated > 0 { + logger.Infof("%s Encrypted password field for %d datasources \n", color.GreenString("✔"), passwordsUpdated) + } + + if basicAuthUpdated > 0 { + logger.Infof("%s Encrypted basic_auth_password field for %d datasources \n", color.GreenString("✔"), basicAuthUpdated) + } + + if passwordsUpdated == 0 && basicAuthUpdated == 0 { + logger.Infof("%s All datasources secrets are allready encrypted\n", color.GreenString("✔")) + } + + logger.Info("\n") + + logger.Warn("Warning: Datasource provisioning files need to be manually changed to prevent overwriting of " + + "the data during provisioning. See https://grafana.com/docs/installation/upgrading/#upgrading-to-v6-2 for " + + "details") + return nil + }) +} + +func migrateColumn(session *sqlstore.DBSession, column string) (int, error) { + var rows []map[string]string + + session.Cols("id", column, "secure_json_data") + session.Table("data_source") + session.In("type", datasourceTypes) + session.Where(column + " IS NOT NULL AND " + column + " != ''") + err := session.Find(&rows) + + if err != nil { + return 0, errutil.Wrapf(err, "failed to select column: %s", column) + } + + rowsUpdated, err := updateRows(session, rows, column) + return rowsUpdated, errutil.Wrapf(err, "failed to update column: %s", column) +} + +func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordFieldName string) (int, error) { + var rowsUpdated int + + for _, row := range rows { + newSecureJSONData, err := getUpdatedSecureJSONData(row, passwordFieldName) + if err != nil { + return 0, err + } + + data, err := json.Marshal(newSecureJSONData) + if err != nil { + return 0, errutil.Wrap("marshaling newSecureJsonData failed", err) + } + + newRow := map[string]interface{}{"secure_json_data": data, passwordFieldName: ""} + session.Table("data_source") + session.Where("id = ?", row["id"]) + // Setting both columns while having value only for secure_json_data should clear the [passwordFieldName] column + session.Cols("secure_json_data", passwordFieldName) + + _, err = session.Update(newRow) + if err != nil { + return 0, err + } + + rowsUpdated++ + } + return rowsUpdated, nil +} + +func getUpdatedSecureJSONData(row map[string]string, passwordFieldName string) (map[string]interface{}, error) { + encryptedPassword, err := util.Encrypt([]byte(row[passwordFieldName]), setting.SecretKey) + if err != nil { + return nil, err + } + + var secureJSONData map[string]interface{} + + if err := json.Unmarshal([]byte(row["secure_json_data"]), &secureJSONData); err != nil { + return nil, err + } + + jsonFieldName := util.ToCamelCase(passwordFieldName) + secureJSONData[jsonFieldName] = encryptedPassword + return secureJSONData, nil +} diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go new file mode 100644 index 000000000000..64987423decd --- /dev/null +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go @@ -0,0 +1,67 @@ +package datamigrations + +import ( + "testing" + "time" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands/commandstest" + "github.com/grafana/grafana/pkg/components/securejsondata" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/stretchr/testify/assert" +) + +func TestPasswordMigrationCommand(t *testing.T) { + //setup datasources with password, basic_auth and none + sqlstore := sqlstore.InitTestDB(t) + session := sqlstore.NewSession() + defer session.Close() + + datasources := []*models.DataSource{ + {Type: "influxdb", Name: "influxdb", Password: "foobar"}, + {Type: "graphite", Name: "graphite", BasicAuthPassword: "foobar"}, + {Type: "prometheus", Name: "prometheus", SecureJsonData: securejsondata.GetEncryptedJsonData(map[string]string{})}, + } + + // set required default values + for _, ds := range datasources { + ds.Created = time.Now() + ds.Updated = time.Now() + ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{}) + } + + _, err := session.Insert(&datasources) + assert.Nil(t, err) + + //run migration + err = EncryptDatasourcePaswords(&commandstest.FakeCommandLine{}, sqlstore) + assert.Nil(t, err) + + //verify that no datasources still have password or basic_auth + var dss []*models.DataSource + err = session.SQL("select * from data_source").Find(&dss) + assert.Nil(t, err) + assert.Equal(t, len(dss), 3) + + for _, ds := range dss { + sj := ds.SecureJsonData.Decrypt() + + if ds.Name == "influxdb" { + assert.Equal(t, ds.Password, "") + v, exist := sj["password"] + assert.True(t, exist) + assert.Equal(t, v, "foobar", "expected password to be moved to securejson") + } + + if ds.Name == "graphite" { + assert.Equal(t, ds.BasicAuthPassword, "") + v, exist := sj["basicAuthPassword"] + assert.True(t, exist) + assert.Equal(t, v, "foobar", "expected basic_auth_password to be moved to securejson") + } + + if ds.Name == "prometheus" { + assert.Equal(t, len(sj), 0) + } + } +} diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index 99cef15e50e3..db3907682638 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -14,13 +14,14 @@ import ( "strings" "github.com/fatih/color" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" ) -func validateInput(c CommandLine, pluginFolder string) error { +func validateInput(c utils.CommandLine, pluginFolder string) error { arg := c.Args().First() if arg == "" { return errors.New("please specify plugin to install") @@ -46,7 +47,7 @@ func validateInput(c CommandLine, pluginFolder string) error { return nil } -func installCommand(c CommandLine) error { +func installCommand(c utils.CommandLine) error { pluginFolder := c.PluginDirectory() if err := validateInput(c, pluginFolder); err != nil { return err @@ -60,7 +61,7 @@ func installCommand(c CommandLine) error { // InstallPlugin downloads the plugin code as a zip file from the Grafana.com API // and then extracts the zip into the plugins directory. -func InstallPlugin(pluginName, version string, c CommandLine) error { +func InstallPlugin(pluginName, version string, c utils.CommandLine) error { pluginFolder := c.PluginDirectory() downloadURL := c.PluginURL() if downloadURL == "" { diff --git a/pkg/cmd/grafana-cli/commands/listremote_command.go b/pkg/cmd/grafana-cli/commands/listremote_command.go index 4798369def11..7351ee58a371 100644 --- a/pkg/cmd/grafana-cli/commands/listremote_command.go +++ b/pkg/cmd/grafana-cli/commands/listremote_command.go @@ -3,9 +3,10 @@ package commands import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func listremoteCommand(c CommandLine) error { +func listremoteCommand(c utils.CommandLine) error { plugin, err := s.ListAllPlugins(c.RepoDirectory()) if err != nil { diff --git a/pkg/cmd/grafana-cli/commands/listversions_command.go b/pkg/cmd/grafana-cli/commands/listversions_command.go index 95c536e94f0a..78d681c06a3a 100644 --- a/pkg/cmd/grafana-cli/commands/listversions_command.go +++ b/pkg/cmd/grafana-cli/commands/listversions_command.go @@ -5,9 +5,10 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func validateVersionInput(c CommandLine) error { +func validateVersionInput(c utils.CommandLine) error { arg := c.Args().First() if arg == "" { return errors.New("please specify plugin to list versions for") @@ -16,7 +17,7 @@ func validateVersionInput(c CommandLine) error { return nil } -func listversionsCommand(c CommandLine) error { +func listversionsCommand(c utils.CommandLine) error { if err := validateVersionInput(c); err != nil { return err } diff --git a/pkg/cmd/grafana-cli/commands/ls_command.go b/pkg/cmd/grafana-cli/commands/ls_command.go index 30745ce3172d..63492d732e98 100644 --- a/pkg/cmd/grafana-cli/commands/ls_command.go +++ b/pkg/cmd/grafana-cli/commands/ls_command.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) var ls_getPlugins func(path string) []m.InstalledPlugin = s.GetLocalPlugins @@ -31,7 +32,7 @@ var validateLsCommand = func(pluginDir string) error { return nil } -func lsCommand(c CommandLine) error { +func lsCommand(c utils.CommandLine) error { pluginDir := c.PluginDirectory() if err := validateLsCommand(pluginDir); err != nil { return err diff --git a/pkg/cmd/grafana-cli/commands/remove_command.go b/pkg/cmd/grafana-cli/commands/remove_command.go index e51929dc95cb..eb536d7b8c7a 100644 --- a/pkg/cmd/grafana-cli/commands/remove_command.go +++ b/pkg/cmd/grafana-cli/commands/remove_command.go @@ -5,12 +5,13 @@ import ( "fmt" "strings" - services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) var removePlugin func(pluginPath, id string) error = services.RemoveInstalledPlugin -func removeCommand(c CommandLine) error { +func removeCommand(c utils.CommandLine) error { pluginPath := c.PluginDirectory() plugin := c.Args().First() diff --git a/pkg/cmd/grafana-cli/commands/reset_password_command.go b/pkg/cmd/grafana-cli/commands/reset_password_command.go index af2b8b3f89ae..4a6a4b674f2e 100644 --- a/pkg/cmd/grafana-cli/commands/reset_password_command.go +++ b/pkg/cmd/grafana-cli/commands/reset_password_command.go @@ -6,13 +6,15 @@ import ( "github.com/fatih/color" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/util" ) const AdminUserId = 1 -func resetPasswordCommand(c CommandLine) error { +func resetPasswordCommand(c utils.CommandLine, sqlStore *sqlstore.SqlStore) error { newPassword := c.Args().First() password := models.Password(newPassword) diff --git a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go index e01df2dab602..a5aadbbb0c23 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go @@ -4,6 +4,7 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/hashicorp/go-version" ) @@ -27,7 +28,7 @@ func ShouldUpgrade(installed string, remote m.Plugin) bool { return false } -func upgradeAllCommand(c CommandLine) error { +func upgradeAllCommand(c utils.CommandLine) error { pluginsDir := c.PluginDirectory() localPlugins := s.GetLocalPlugins(pluginsDir) diff --git a/pkg/cmd/grafana-cli/commands/upgrade_command.go b/pkg/cmd/grafana-cli/commands/upgrade_command.go index 396371d35772..f32961ce5895 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_command.go @@ -4,9 +4,10 @@ import ( "github.com/fatih/color" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func upgradeCommand(c CommandLine) error { +func upgradeCommand(c utils.CommandLine) error { pluginsDir := c.PluginDirectory() pluginName := c.Args().First() diff --git a/pkg/cmd/grafana-cli/commands/command_line.go b/pkg/cmd/grafana-cli/utils/command_line.go similarity index 64% rename from pkg/cmd/grafana-cli/commands/command_line.go rename to pkg/cmd/grafana-cli/utils/command_line.go index d487aff8aaaa..d3142d0f195e 100644 --- a/pkg/cmd/grafana-cli/commands/command_line.go +++ b/pkg/cmd/grafana-cli/utils/command_line.go @@ -1,4 +1,4 @@ -package commands +package utils import ( "github.com/codegangsta/cli" @@ -22,30 +22,30 @@ type CommandLine interface { PluginURL() string } -type contextCommandLine struct { +type ContextCommandLine struct { *cli.Context } -func (c *contextCommandLine) ShowHelp() { +func (c *ContextCommandLine) ShowHelp() { cli.ShowCommandHelp(c.Context, c.Command.Name) } -func (c *contextCommandLine) ShowVersion() { +func (c *ContextCommandLine) ShowVersion() { cli.ShowVersion(c.Context) } -func (c *contextCommandLine) Application() *cli.App { +func (c *ContextCommandLine) Application() *cli.App { return c.App } -func (c *contextCommandLine) PluginDirectory() string { +func (c *ContextCommandLine) PluginDirectory() string { return c.GlobalString("pluginsDir") } -func (c *contextCommandLine) RepoDirectory() string { +func (c *ContextCommandLine) RepoDirectory() string { return c.GlobalString("repo") } -func (c *contextCommandLine) PluginURL() string { +func (c *ContextCommandLine) PluginURL() string { return c.GlobalString("pluginUrl") } diff --git a/pkg/components/gtime/gtime.go b/pkg/components/gtime/gtime.go new file mode 100644 index 000000000000..e3e4e449f661 --- /dev/null +++ b/pkg/components/gtime/gtime.go @@ -0,0 +1,28 @@ +package gtime + +import ( + "regexp" + "strconv" + "time" +) + +// ParseInterval parses and interval with support for all units that Grafana uses. +func ParseInterval(interval string) (time.Duration, error) { + re := regexp.MustCompile(`(\d+)([wdy])`) + result := re.FindSubmatch([]byte(interval)) + + if len(result) == 3 { + num, _ := strconv.Atoi(string(result[1])) + period := string(result[2]) + + if period == `d` { + return time.Hour * 24 * time.Duration(num), nil + } else if period == `w` { + return time.Hour * 24 * 7 * time.Duration(num), nil + } else { + return time.Hour * 24 * 7 * 365 * time.Duration(num), nil + } + } else { + return time.ParseDuration(interval) + } +} diff --git a/pkg/components/gtime/gtime_test.go b/pkg/components/gtime/gtime_test.go new file mode 100644 index 000000000000..e683184023fa --- /dev/null +++ b/pkg/components/gtime/gtime_test.go @@ -0,0 +1,34 @@ +package gtime + +import ( + "errors" + "fmt" + "testing" + "time" +) + +func TestParseInterval(t *testing.T) { + tcs := []struct { + interval string + duration time.Duration + err error + }{ + {interval: "1d", duration: time.Hour * 24}, + {interval: "1w", duration: time.Hour * 24 * 7}, + {interval: "1y", duration: time.Hour * 24 * 7 * 365}, + {interval: "1M", err: errors.New("time: unknown unit M in duration 1M")}, + {interval: "invalid-duration", err: errors.New("time: invalid duration invalid-duration")}, + } + + for i, tc := range tcs { + t.Run(fmt.Sprintf("testcase %d", i), func(t *testing.T) { + res, err := ParseInterval(tc.interval) + if err != nil && err.Error() != tc.err.Error() { + t.Fatalf("expected '%v' got '%v'", tc.err, err) + } + if res != tc.duration { + t.Errorf("expected %v got %v", tc.duration, res) + } + }) + } +} diff --git a/pkg/components/imguploader/azureblobuploader.go b/pkg/components/imguploader/azureblobuploader.go index bfcb901dd0c2..fd39f70678b0 100644 --- a/pkg/components/imguploader/azureblobuploader.go +++ b/pkg/components/imguploader/azureblobuploader.go @@ -46,10 +46,11 @@ func (az *AzureBlobUploader) Upload(ctx context.Context, imageDiskPath string) ( blob := NewStorageClient(az.account_name, az.account_key) file, err := os.Open(imageDiskPath) - if err != nil { return "", err } + defer file.Close() + randomFileName := util.GetRandomString(30) + ".png" // upload image az.log.Debug("Uploading image to azure_blob", "container_name", az.container_name, "blob_name", randomFileName) diff --git a/pkg/components/imguploader/gcsuploader.go b/pkg/components/imguploader/gcsuploader.go index 8932e96e59eb..443534ff76ad 100644 --- a/pkg/components/imguploader/gcsuploader.go +++ b/pkg/components/imguploader/gcsuploader.go @@ -67,6 +67,7 @@ func (u *GCSUploader) uploadFile(client *http.Client, imageDiskPath, key string) if err != nil { return err } + defer fileReader.Close() reqUrl := fmt.Sprintf(uploadUrl, u.bucket, key) u.log.Debug("Request URL: ", reqUrl) diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go index d690f629f577..7ad8a0f7bebc 100644 --- a/pkg/components/imguploader/s3uploader.go +++ b/pkg/components/imguploader/s3uploader.go @@ -69,6 +69,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, if err != nil { return "", err } + defer file.Close() sess, err = session.NewSession(cfg) if err != nil { diff --git a/pkg/infra/usagestats/usage_stats.go b/pkg/infra/usagestats/usage_stats.go index 8b20391cbb43..e5b9e8fd8cb7 100644 --- a/pkg/infra/usagestats/usage_stats.go +++ b/pkg/infra/usagestats/usage_stats.go @@ -132,7 +132,7 @@ func (uss *UsageStatsService) sendUsageStats(oauthProviders map[string]bool) { authTypes := map[string]bool{} authTypes["anonymous"] = setting.AnonymousEnabled authTypes["basic_auth"] = setting.BasicAuthEnabled - authTypes["ldap"] = setting.LdapEnabled + authTypes["ldap"] = setting.LDAPEnabled authTypes["auth_proxy"] = setting.AuthProxyEnabled for provider, enabled := range oauthProviders { diff --git a/pkg/infra/usagestats/usage_stats_test.go b/pkg/infra/usagestats/usage_stats_test.go index ea5b95d6ef08..07f2df221924 100644 --- a/pkg/infra/usagestats/usage_stats_test.go +++ b/pkg/infra/usagestats/usage_stats_test.go @@ -182,7 +182,7 @@ func TestMetrics(t *testing.T) { setting.BuildVersion = "5.0.0" setting.AnonymousEnabled = true setting.BasicAuthEnabled = true - setting.LdapEnabled = true + setting.LDAPEnabled = true setting.AuthProxyEnabled = true setting.Packaging = "deb" diff --git a/pkg/login/auth.go b/pkg/login/auth.go index 56f614d92dea..ca5b572deb83 100644 --- a/pkg/login/auth.go +++ b/pkg/login/auth.go @@ -4,8 +4,8 @@ import ( "errors" "github.com/grafana/grafana/pkg/bus" - m "github.com/grafana/grafana/pkg/models" - LDAP "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/ldap" ) var ( @@ -19,13 +19,15 @@ var ( ErrPasswordEmpty = errors.New("No password provided") ErrUsersQuotaReached = errors.New("Users quota reached") ErrGettingUserQuota = errors.New("Error getting user quota") + ErrUserDisabled = errors.New("User is disabled") ) func Init() { bus.AddHandler("auth", AuthenticateUser) } -func AuthenticateUser(query *m.LoginUserQuery) error { +// AuthenticateUser authenticates the user via username & password +func AuthenticateUser(query *models.LoginUserQuery) error { if err := validateLoginAttempts(query.Username); err != nil { return err } @@ -35,29 +37,33 @@ func AuthenticateUser(query *m.LoginUserQuery) error { } err := loginUsingGrafanaDB(query) - if err == nil || (err != m.ErrUserNotFound && err != ErrInvalidCredentials) { + if err == nil || (err != models.ErrUserNotFound && err != ErrInvalidCredentials && err != ErrUserDisabled) { return err } - ldapEnabled, ldapErr := loginUsingLdap(query) + ldapEnabled, ldapErr := loginUsingLDAP(query) if ldapEnabled { - if ldapErr == nil || ldapErr != LDAP.ErrInvalidCredentials { + if ldapErr == nil || ldapErr != ldap.ErrInvalidCredentials { return ldapErr } - err = ldapErr + if err != ErrUserDisabled || ldapErr != ldap.ErrInvalidCredentials { + err = ldapErr + } } - if err == ErrInvalidCredentials || err == LDAP.ErrInvalidCredentials { + if err == ErrInvalidCredentials || err == ldap.ErrInvalidCredentials { saveInvalidLoginAttempt(query) + return ErrInvalidCredentials } - if err == m.ErrUserNotFound { + if err == models.ErrUserNotFound { return ErrInvalidCredentials } return err } + func validatePasswordSet(password string) error { if len(password) == 0 { return ErrPasswordEmpty diff --git a/pkg/login/auth_test.go b/pkg/login/auth_test.go index 85ad3bc07dce..06a78cc6d5df 100644 --- a/pkg/login/auth_test.go +++ b/pkg/login/auth_test.go @@ -6,8 +6,8 @@ import ( . "github.com/smartystreets/goconvey/convey" - m "github.com/grafana/grafana/pkg/models" - LDAP "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/ldap" ) func TestAuthenticateUser(t *testing.T) { @@ -15,9 +15,9 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When a user authenticates without setting a password", func(sc *authScenarioContext) { mockLoginAttemptValidation(nil, sc) mockLoginUsingGrafanaDB(nil, sc) - mockLoginUsingLdap(false, nil, sc) + mockLoginUsingLDAP(false, nil, sc) - loginQuery := m.LoginUserQuery{ + loginQuery := models.LoginUserQuery{ Username: "user", Password: "", } @@ -33,7 +33,7 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When a user authenticates having too many login attempts", func(sc *authScenarioContext) { mockLoginAttemptValidation(ErrTooManyLoginAttempts, sc) mockLoginUsingGrafanaDB(nil, sc) - mockLoginUsingLdap(true, nil, sc) + mockLoginUsingLDAP(true, nil, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) @@ -50,7 +50,7 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When grafana user authenticate with valid credentials", func(sc *authScenarioContext) { mockLoginAttemptValidation(nil, sc) mockLoginUsingGrafanaDB(nil, sc) - mockLoginUsingLdap(true, ErrInvalidCredentials, sc) + mockLoginUsingLDAP(true, ErrInvalidCredentials, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) @@ -68,7 +68,7 @@ func TestAuthenticateUser(t *testing.T) { customErr := errors.New("custom") mockLoginAttemptValidation(nil, sc) mockLoginUsingGrafanaDB(customErr, sc) - mockLoginUsingLdap(true, ErrInvalidCredentials, sc) + mockLoginUsingLDAP(true, ErrInvalidCredentials, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) @@ -84,8 +84,8 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When a non-existing grafana user authenticate and ldap disabled", func(sc *authScenarioContext) { mockLoginAttemptValidation(nil, sc) - mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) - mockLoginUsingLdap(false, nil, sc) + mockLoginUsingGrafanaDB(models.ErrUserNotFound, sc) + mockLoginUsingLDAP(false, nil, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) @@ -101,14 +101,14 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When a non-existing grafana user authenticate and invalid ldap credentials", func(sc *authScenarioContext) { mockLoginAttemptValidation(nil, sc) - mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) - mockLoginUsingLdap(true, LDAP.ErrInvalidCredentials, sc) + mockLoginUsingGrafanaDB(models.ErrUserNotFound, sc) + mockLoginUsingLDAP(true, ldap.ErrInvalidCredentials, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) Convey("it should result in", func() { - So(err, ShouldEqual, LDAP.ErrInvalidCredentials) + So(err, ShouldEqual, ErrInvalidCredentials) So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) So(sc.grafanaLoginWasCalled, ShouldBeTrue) So(sc.ldapLoginWasCalled, ShouldBeTrue) @@ -118,8 +118,8 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When a non-existing grafana user authenticate and valid ldap credentials", func(sc *authScenarioContext) { mockLoginAttemptValidation(nil, sc) - mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) - mockLoginUsingLdap(true, nil, sc) + mockLoginUsingGrafanaDB(models.ErrUserNotFound, sc) + mockLoginUsingLDAP(true, nil, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) @@ -136,8 +136,8 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When a non-existing grafana user authenticate and ldap returns unexpected error", func(sc *authScenarioContext) { customErr := errors.New("custom") mockLoginAttemptValidation(nil, sc) - mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) - mockLoginUsingLdap(true, customErr, sc) + mockLoginUsingGrafanaDB(models.ErrUserNotFound, sc) + mockLoginUsingLDAP(true, customErr, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) @@ -154,13 +154,13 @@ func TestAuthenticateUser(t *testing.T) { authScenario("When grafana user authenticate with invalid credentials and invalid ldap credentials", func(sc *authScenarioContext) { mockLoginAttemptValidation(nil, sc) mockLoginUsingGrafanaDB(ErrInvalidCredentials, sc) - mockLoginUsingLdap(true, LDAP.ErrInvalidCredentials, sc) + mockLoginUsingLDAP(true, ldap.ErrInvalidCredentials, sc) mockSaveInvalidLoginAttempt(sc) err := AuthenticateUser(sc.loginUserQuery) Convey("it should result in", func() { - So(err, ShouldEqual, LDAP.ErrInvalidCredentials) + So(err, ShouldEqual, ErrInvalidCredentials) So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) So(sc.grafanaLoginWasCalled, ShouldBeTrue) So(sc.ldapLoginWasCalled, ShouldBeTrue) @@ -171,7 +171,7 @@ func TestAuthenticateUser(t *testing.T) { } type authScenarioContext struct { - loginUserQuery *m.LoginUserQuery + loginUserQuery *models.LoginUserQuery grafanaLoginWasCalled bool ldapLoginWasCalled bool loginAttemptValidationWasCalled bool @@ -181,14 +181,14 @@ type authScenarioContext struct { type authScenarioFunc func(sc *authScenarioContext) func mockLoginUsingGrafanaDB(err error, sc *authScenarioContext) { - loginUsingGrafanaDB = func(query *m.LoginUserQuery) error { + loginUsingGrafanaDB = func(query *models.LoginUserQuery) error { sc.grafanaLoginWasCalled = true return err } } -func mockLoginUsingLdap(enabled bool, err error, sc *authScenarioContext) { - loginUsingLdap = func(query *m.LoginUserQuery) (bool, error) { +func mockLoginUsingLDAP(enabled bool, err error, sc *authScenarioContext) { + loginUsingLDAP = func(query *models.LoginUserQuery) (bool, error) { sc.ldapLoginWasCalled = true return enabled, err } @@ -202,7 +202,7 @@ func mockLoginAttemptValidation(err error, sc *authScenarioContext) { } func mockSaveInvalidLoginAttempt(sc *authScenarioContext) { - saveInvalidLoginAttempt = func(query *m.LoginUserQuery) { + saveInvalidLoginAttempt = func(query *models.LoginUserQuery) { sc.saveInvalidLoginAttemptWasCalled = true } } @@ -210,12 +210,12 @@ func mockSaveInvalidLoginAttempt(sc *authScenarioContext) { func authScenario(desc string, fn authScenarioFunc) { Convey(desc, func() { origLoginUsingGrafanaDB := loginUsingGrafanaDB - origLoginUsingLdap := loginUsingLdap + origLoginUsingLDAP := loginUsingLDAP origValidateLoginAttempts := validateLoginAttempts origSaveInvalidLoginAttempt := saveInvalidLoginAttempt sc := &authScenarioContext{ - loginUserQuery: &m.LoginUserQuery{ + loginUserQuery: &models.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", @@ -224,7 +224,7 @@ func authScenario(desc string, fn authScenarioFunc) { defer func() { loginUsingGrafanaDB = origLoginUsingGrafanaDB - loginUsingLdap = origLoginUsingLdap + loginUsingLDAP = origLoginUsingLDAP validateLoginAttempts = origValidateLoginAttempts saveInvalidLoginAttempt = origSaveInvalidLoginAttempt }() diff --git a/pkg/login/grafana_login.go b/pkg/login/grafana_login.go index e8594fdd190d..64bd4b201e7b 100644 --- a/pkg/login/grafana_login.go +++ b/pkg/login/grafana_login.go @@ -26,6 +26,10 @@ var loginUsingGrafanaDB = func(query *m.LoginUserQuery) error { user := userQuery.Result + if user.IsDisabled { + return ErrUserDisabled + } + if err := validatePassword(query.Password, user.Password, user.Salt); err != nil { return err } diff --git a/pkg/login/grafana_login_test.go b/pkg/login/grafana_login_test.go index 2c189ae00729..eddc6e12f5f6 100644 --- a/pkg/login/grafana_login_test.go +++ b/pkg/login/grafana_login_test.go @@ -63,6 +63,23 @@ func TestGrafanaLogin(t *testing.T) { So(sc.loginUserQuery.User.Password, ShouldEqual, sc.loginUserQuery.Password) }) }) + + grafanaLoginScenario("When login with disabled user", func(sc *grafanaLoginScenarioContext) { + sc.withDisabledUser() + err := loginUsingGrafanaDB(sc.loginUserQuery) + + Convey("it should return user is disabled error", func() { + So(err, ShouldEqual, ErrUserDisabled) + }) + + Convey("it should not call password validation", func() { + So(sc.validatePasswordCalled, ShouldBeFalse) + }) + + Convey("it should not pupulate user object", func() { + So(sc.loginUserQuery.User, ShouldBeNil) + }) + }) }) } @@ -138,3 +155,9 @@ func (sc *grafanaLoginScenarioContext) withInvalidPassword() { }) mockPasswordValidation(false, sc) } + +func (sc *grafanaLoginScenarioContext) withDisabledUser() { + sc.getUserByLoginQueryReturns(&m.User{ + IsDisabled: true, + }) +} diff --git a/pkg/login/ldap_login.go b/pkg/login/ldap_login.go index abd861cbe6dd..33693212ac7f 100644 --- a/pkg/login/ldap_login.go +++ b/pkg/login/ldap_login.go @@ -2,17 +2,24 @@ package login import ( "github.com/grafana/grafana/pkg/models" - LDAP "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/services/multildap" + "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util/errutil" ) -var newLDAP = LDAP.New -var getLDAPConfig = LDAP.GetConfig -var isLDAPEnabled = LDAP.IsEnabled +// getLDAPConfig gets LDAP config +var getLDAPConfig = multildap.GetConfig -// loginUsingLdap logs in user using LDAP. It returns whether LDAP is enabled and optional error and query arg will be +// isLDAPEnabled checks if LDAP is enabled +var isLDAPEnabled = multildap.IsEnabled + +// newLDAP creates multiple LDAP instance +var newLDAP = multildap.New + +// loginUsingLDAP logs in user using LDAP. It returns whether LDAP is enabled and optional error and query arg will be // populated with the logged in user if successful. -var loginUsingLdap = func(query *models.LoginUserQuery) (bool, error) { +var loginUsingLDAP = func(query *models.LoginUserQuery) (bool, error) { enabled := isLDAPEnabled() if !enabled { @@ -23,18 +30,21 @@ var loginUsingLdap = func(query *models.LoginUserQuery) (bool, error) { if err != nil { return true, errutil.Wrap("Failed to get LDAP config", err) } - if len(config.Servers) == 0 { - return true, ErrNoLDAPServers - } - for _, server := range config.Servers { - auth := newLDAP(server) + externalUser, err := newLDAP(config.Servers).Login(query) + if err != nil { + return true, err + } - err := auth.Login(query) - if err == nil || err != LDAP.ErrInvalidCredentials { - return true, err - } + login, err := user.Upsert(&user.UpsertArgs{ + ExternalUser: externalUser, + SignupAllowed: setting.LDAPAllowSignup, + }) + if err != nil { + return true, err } - return true, LDAP.ErrInvalidCredentials + query.User = login + + return true, nil } diff --git a/pkg/login/ldap_login_test.go b/pkg/login/ldap_login_test.go index 3ea82d0a8ed6..ac5ba49e2f1d 100644 --- a/pkg/login/ldap_login_test.go +++ b/pkg/login/ldap_login_test.go @@ -6,50 +6,51 @@ import ( . "github.com/smartystreets/goconvey/convey" - m "github.com/grafana/grafana/pkg/models" - LDAP "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/services/multildap" "github.com/grafana/grafana/pkg/setting" ) var errTest = errors.New("Test error") -func TestLdapLogin(t *testing.T) { +func TestLDAPLogin(t *testing.T) { Convey("Login using ldap", t, func() { Convey("Given ldap enabled and no server configured", func() { - setting.LdapEnabled = true + setting.LDAPEnabled = true - ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) { + LDAPLoginScenario("When login", func(sc *LDAPLoginScenarioContext) { sc.withLoginResult(false) - getLDAPConfig = func() (*LDAP.Config, error) { - config := &LDAP.Config{ - Servers: []*LDAP.ServerConfig{}, + getLDAPConfig = func() (*ldap.Config, error) { + config := &ldap.Config{ + Servers: []*ldap.ServerConfig{}, } return config, nil } - enabled, err := loginUsingLdap(sc.loginUserQuery) + enabled, err := loginUsingLDAP(sc.loginUserQuery) Convey("it should return true", func() { So(enabled, ShouldBeTrue) }) Convey("it should return no LDAP servers error", func() { - So(err, ShouldEqual, ErrNoLDAPServers) + So(err, ShouldEqual, errTest) }) Convey("it should not call ldap login", func() { - So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeFalse) + So(sc.LDAPAuthenticatorMock.loginCalled, ShouldBeTrue) }) }) }) Convey("Given ldap disabled", func() { - setting.LdapEnabled = false + setting.LDAPEnabled = false - ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) { + LDAPLoginScenario("When login", func(sc *LDAPLoginScenarioContext) { sc.withLoginResult(false) - enabled, err := loginUsingLdap(&m.LoginUserQuery{ + enabled, err := loginUsingLDAP(&models.LoginUserQuery{ Username: "user", Password: "pwd", }) @@ -63,75 +64,88 @@ func TestLdapLogin(t *testing.T) { }) Convey("it should not call ldap login", func() { - So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeFalse) + So(sc.LDAPAuthenticatorMock.loginCalled, ShouldBeFalse) }) }) }) }) } -func mockLdapAuthenticator(valid bool) *mockAuth { - mock := &mockAuth{ - validLogin: valid, - } - - newLDAP = func(server *LDAP.ServerConfig) LDAP.IAuth { - return mock - } - - return mock -} - type mockAuth struct { validLogin bool loginCalled bool } -func (auth *mockAuth) Login(query *m.LoginUserQuery) error { +func (auth *mockAuth) Login(query *models.LoginUserQuery) ( + *models.ExternalUserInfo, + error, +) { auth.loginCalled = true if !auth.validLogin { - return errTest + return nil, errTest } - return nil + return nil, nil } -func (auth *mockAuth) Users() ([]*LDAP.UserInfo, error) { +func (auth *mockAuth) Users(logins []string) ( + []*models.ExternalUserInfo, + error, +) { return nil, nil } -func (auth *mockAuth) SyncUser(query *m.LoginUserQuery) error { +func (auth *mockAuth) User(login string) ( + *models.ExternalUserInfo, + error, +) { + return nil, nil +} + +func (auth *mockAuth) Add(dn string, values map[string][]string) error { return nil } -func (auth *mockAuth) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LDAP.UserInfo) (*m.User, error) { - return nil, nil +func (auth *mockAuth) Remove(dn string) error { + return nil +} + +func mockLDAPAuthenticator(valid bool) *mockAuth { + mock := &mockAuth{ + validLogin: valid, + } + + newLDAP = func(servers []*ldap.ServerConfig) multildap.IMultiLDAP { + return mock + } + + return mock } -type ldapLoginScenarioContext struct { - loginUserQuery *m.LoginUserQuery - ldapAuthenticatorMock *mockAuth +type LDAPLoginScenarioContext struct { + loginUserQuery *models.LoginUserQuery + LDAPAuthenticatorMock *mockAuth } -type ldapLoginScenarioFunc func(c *ldapLoginScenarioContext) +type LDAPLoginScenarioFunc func(c *LDAPLoginScenarioContext) -func ldapLoginScenario(desc string, fn ldapLoginScenarioFunc) { +func LDAPLoginScenario(desc string, fn LDAPLoginScenarioFunc) { Convey(desc, func() { mock := &mockAuth{} - sc := &ldapLoginScenarioContext{ - loginUserQuery: &m.LoginUserQuery{ + sc := &LDAPLoginScenarioContext{ + loginUserQuery: &models.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", }, - ldapAuthenticatorMock: mock, + LDAPAuthenticatorMock: mock, } - getLDAPConfig = func() (*LDAP.Config, error) { - config := &LDAP.Config{ - Servers: []*LDAP.ServerConfig{ + getLDAPConfig = func() (*ldap.Config, error) { + config := &ldap.Config{ + Servers: []*ldap.ServerConfig{ { Host: "", }, @@ -141,19 +155,19 @@ func ldapLoginScenario(desc string, fn ldapLoginScenarioFunc) { return config, nil } - newLDAP = func(server *LDAP.ServerConfig) LDAP.IAuth { + newLDAP = func(server []*ldap.ServerConfig) multildap.IMultiLDAP { return mock } defer func() { - newLDAP = LDAP.New - getLDAPConfig = LDAP.GetConfig + newLDAP = multildap.New + getLDAPConfig = multildap.GetConfig }() fn(sc) }) } -func (sc *ldapLoginScenarioContext) withLoginResult(valid bool) { - sc.ldapAuthenticatorMock = mockLdapAuthenticator(valid) +func (sc *LDAPLoginScenarioContext) withLoginResult(valid bool) { + sc.LDAPAuthenticatorMock = mockLDAPAuthenticator(valid) } diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index d3d8d8e77d8a..9ec5852b73dc 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -31,13 +31,15 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Check if allowed to continue with this IP if result, err := auth.IsAllowedIP(); !result { + ctx.Logger.Error("auth proxy: failed to check whitelisted ip addresses", "message", err.Error(), "error", err.DetailsError) ctx.Handle(407, err.Error(), err.DetailsError) return true } - // Try to get user id from various sources - id, err := auth.GetUserID() + // Try to log in user from various providers + id, err := auth.Login() if err != nil { + ctx.Logger.Error("auth proxy: failed to login", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } @@ -45,6 +47,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Get full user info user, err := auth.GetSignedUser(id) if err != nil { + ctx.Logger.Error("auth proxy: failed to get signed in user", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } @@ -54,7 +57,8 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, ctx.IsSignedIn = true // Remember user data it in cache - if err := auth.Remember(); err != nil { + if err := auth.Remember(id); err != nil { + ctx.Logger.Error("auth proxy: failed to store user in cache", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } diff --git a/pkg/middleware/auth_proxy/auth_proxy.go b/pkg/middleware/auth_proxy/auth_proxy.go index 98bacbeccf47..e078aaf84251 100644 --- a/pkg/middleware/auth_proxy/auth_proxy.go +++ b/pkg/middleware/auth_proxy/auth_proxy.go @@ -12,6 +12,8 @@ import ( "github.com/grafana/grafana/pkg/infra/remotecache" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/services/multildap" + "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/setting" ) @@ -21,10 +23,14 @@ const ( CachePrefix = "auth-proxy-sync-ttl:%s" ) -var ( - getLDAPConfig = ldap.GetConfig - isLDAPEnabled = ldap.IsEnabled -) +// getLDAPConfig gets LDAP config +var getLDAPConfig = ldap.GetConfig + +// isLDAPEnabled checks if LDAP is enabled +var isLDAPEnabled = ldap.IsEnabled + +// newLDAP creates multiple LDAP instance +var newLDAP = multildap.New // AuthProxy struct type AuthProxy struct { @@ -33,13 +39,13 @@ type AuthProxy struct { orgID int64 header string - LDAP func(server *ldap.ServerConfig) ldap.IAuth - - enabled bool - whitelistIP string - headerType string - headers map[string]string - cacheTTL int + enabled bool + LDAPAllowSignup bool + AuthProxyAutoSignUp bool + whitelistIP string + headerType string + headers map[string]string + cacheTTL int } // Error auth proxy specific error @@ -58,7 +64,7 @@ func newError(message string, err error) *Error { // Error returns a Error error string func (err *Error) Error() string { - return fmt.Sprintf("%s", err.Message) + return err.Message } // Options for the AuthProxy @@ -78,13 +84,13 @@ func New(options *Options) *AuthProxy { orgID: options.OrgID, header: header, - LDAP: ldap.New, - - enabled: setting.AuthProxyEnabled, - headerType: setting.AuthProxyHeaderProperty, - headers: setting.AuthProxyHeaders, - whitelistIP: setting.AuthProxyWhitelist, - cacheTTL: setting.AuthProxyLdapSyncTtl, + enabled: setting.AuthProxyEnabled, + headerType: setting.AuthProxyHeaderProperty, + headers: setting.AuthProxyHeaders, + whitelistIP: setting.AuthProxyWhitelist, + cacheTTL: setting.AuthProxyLDAPSyncTtl, + LDAPAllowSignup: setting.LDAPAllowSignup, + AuthProxyAutoSignUp: setting.AuthProxyAutoSignUp, } } @@ -92,20 +98,12 @@ func New(options *Options) *AuthProxy { func (auth *AuthProxy) IsEnabled() bool { // Bail if the setting is not enabled - if !auth.enabled { - return false - } - - return true + return auth.enabled } // HasHeader checks if the we have specified header func (auth *AuthProxy) HasHeader() bool { - if len(auth.header) == 0 { - return false - } - - return true + return len(auth.header) != 0 } // IsAllowedIP compares presented IP with the whitelist one @@ -144,34 +142,22 @@ func (auth *AuthProxy) IsAllowedIP() (bool, *Error) { return false, newError("Proxy authentication required", err) } -// InCache checks if we have user in cache -func (auth *AuthProxy) InCache() bool { - userID, _ := auth.GetUserIDViaCache() - - if userID == 0 { - return false - } - - return true -} - // getKey forms a key for the cache func (auth *AuthProxy) getKey() string { return fmt.Sprintf(CachePrefix, auth.header) } -// GetUserID gets user id with whatever means possible -func (auth *AuthProxy) GetUserID() (int64, *Error) { - if auth.InCache() { +// Login logs in user id with whatever means possible +func (auth *AuthProxy) Login() (int64, *Error) { + id, _ := auth.GetUserViaCache() + if id != 0 { // Error here means absent cache - we don't need to handle that - id, _ := auth.GetUserIDViaCache() - return id, nil } if isLDAPEnabled() { - id, err := auth.GetUserIDViaLDAP() + id, err := auth.LoginViaLDAP() if err == ldap.ErrInvalidCredentials { return 0, newError( @@ -181,16 +167,16 @@ func (auth *AuthProxy) GetUserID() (int64, *Error) { } if err != nil { - return 0, newError("Failed to sync user", err) + return 0, newError("Failed to get the user", err) } return id, nil } - id, err := auth.GetUserIDViaHeader() + id, err := auth.LoginViaHeader() if err != nil { return 0, newError( - "Failed to login as user specified in auth proxy header", + "Failed to log in as user, specified in auth proxy header", err, ) } @@ -198,8 +184,8 @@ func (auth *AuthProxy) GetUserID() (int64, *Error) { return id, nil } -// GetUserIDViaCache gets the user from cache -func (auth *AuthProxy) GetUserIDViaCache() (int64, error) { +// GetUserViaCache gets user id from cache +func (auth *AuthProxy) GetUserViaCache() (int64, error) { var ( cacheKey = auth.getKey() userID, err = auth.store.Get(cacheKey) @@ -212,33 +198,34 @@ func (auth *AuthProxy) GetUserIDViaCache() (int64, error) { return userID.(int64), nil } -// GetUserIDViaLDAP gets user via LDAP request -func (auth *AuthProxy) GetUserIDViaLDAP() (int64, *Error) { - query := &models.LoginUserQuery{ - ReqContext: auth.ctx, - Username: auth.header, - } - +// LoginViaLDAP logs in user via LDAP request +func (auth *AuthProxy) LoginViaLDAP() (int64, *Error) { config, err := getLDAPConfig() if err != nil { return 0, newError("Failed to get LDAP config", nil) } - if len(config.Servers) == 0 { - return 0, newError("No LDAP servers available", nil) + + extUser, err := newLDAP(config.Servers).User(auth.header) + if err != nil { + return 0, newError(err.Error(), nil) } - for _, server := range config.Servers { - author := auth.LDAP(server) - if err := author.SyncUser(query); err != nil { - return 0, newError(err.Error(), nil) - } + // Have to sync grafana and LDAP user during log in + user, err := user.Upsert(&user.UpsertArgs{ + ReqContext: auth.ctx, + SignupAllowed: auth.LDAPAllowSignup, + ExternalUser: extUser, + }) + if err != nil { + return 0, newError(err.Error(), nil) } - return query.User.Id, nil + return user.Id, nil } -// GetUserIDViaHeader gets user from the header only -func (auth *AuthProxy) GetUserIDViaHeader() (int64, error) { +// LoginViaHeader logs in user from the header only +// TODO: refactor - cyclomatic complexity should be much lower +func (auth *AuthProxy) LoginViaHeader() (int64, error) { extUser := &models.ExternalUserInfo{ AuthModule: "authproxy", AuthId: auth.header, @@ -269,18 +256,16 @@ func (auth *AuthProxy) GetUserIDViaHeader() (int64, error) { } } - // add/update user in grafana - cmd := &models.UpsertUserCommand{ + result, err := user.Upsert(&user.UpsertArgs{ ReqContext: auth.ctx, + SignupAllowed: true, ExternalUser: extUser, - SignupAllowed: setting.AuthProxyAutoSignUp, - } - err := bus.Dispatch(cmd) + }) if err != nil { return 0, err } - return cmd.Result.Id, nil + return result.Id, nil } // GetSignedUser get full signed user info @@ -298,21 +283,18 @@ func (auth *AuthProxy) GetSignedUser(userID int64) (*models.SignedInUser, *Error } // Remember user in cache -func (auth *AuthProxy) Remember() *Error { +func (auth *AuthProxy) Remember(id int64) *Error { + key := auth.getKey() - // Make sure we do not rewrite the expiration time - if auth.InCache() { + // Check if user already in cache + userID, _ := auth.store.Get(key) + if userID != nil { return nil } - var ( - key = auth.getKey() - value, _ = auth.GetUserIDViaCache() - expiration = time.Duration(-auth.cacheTTL) * time.Minute - - err = auth.store.Set(key, value, expiration) - ) + expiration := time.Duration(-auth.cacheTTL) * time.Minute + err := auth.store.Set(key, id, expiration) if err != nil { return newError(err.Error(), nil) } diff --git a/pkg/middleware/auth_proxy/auth_proxy_test.go b/pkg/middleware/auth_proxy/auth_proxy_test.go index fbddca81d401..4a1edc66ba5a 100644 --- a/pkg/middleware/auth_proxy/auth_proxy_test.go +++ b/pkg/middleware/auth_proxy/auth_proxy_test.go @@ -1,6 +1,7 @@ package authproxy import ( + "errors" "fmt" "net/http" "testing" @@ -8,24 +9,40 @@ import ( . "github.com/smartystreets/goconvey/convey" "gopkg.in/macaron.v1" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/remotecache" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/ldap" + "github.com/grafana/grafana/pkg/services/multildap" "github.com/grafana/grafana/pkg/setting" ) -type TestLDAP struct { - ldap.Auth - ID int64 - syncCalled bool +type TestMultiLDAP struct { + multildap.MultiLDAP + ID int64 + userCalled bool + loginCalled bool } -func (stub *TestLDAP) SyncUser(query *models.LoginUserQuery) error { - stub.syncCalled = true - query.User = &models.User{ - Id: stub.ID, +func (stub *TestMultiLDAP) Login(query *models.LoginUserQuery) ( + *models.ExternalUserInfo, error, +) { + stub.loginCalled = true + result := &models.ExternalUserInfo{ + UserId: stub.ID, } - return nil + return result, nil +} + +func (stub *TestMultiLDAP) User(login string) ( + *models.ExternalUserInfo, + error, +) { + stub.userCalled = true + result := &models.ExternalUserInfo{ + UserId: stub.ID, + } + return result, nil } func TestMiddlewareContext(t *testing.T) { @@ -44,7 +61,7 @@ func TestMiddlewareContext(t *testing.T) { }, } - Convey("gets data from the cache", func() { + Convey("logs in user from the cache", func() { store := remotecache.NewFakeStore(t) key := fmt.Sprintf(CachePrefix, name) store.Set(key, int64(33), 0) @@ -55,53 +72,64 @@ func TestMiddlewareContext(t *testing.T) { OrgID: 4, }) - id, err := auth.GetUserID() + id, err := auth.Login() So(err, ShouldBeNil) So(id, ShouldEqual, 33) }) Convey("LDAP", func() { - Convey("gets data from the LDAP", func() { + Convey("logs in via LDAP", func() { + bus.AddHandler("test", func(cmd *models.UpsertUserCommand) error { + cmd.Result = &models.User{ + Id: 42, + } + + return nil + }) + isLDAPEnabled = func() bool { return true } + stub := &TestMultiLDAP{ + ID: 42, + } + getLDAPConfig = func() (*ldap.Config, error) { config := &ldap.Config{ Servers: []*ldap.ServerConfig{ - {}, + { + SearchBaseDNs: []string{"BaseDNHere"}, + }, }, } return config, nil } + newLDAP = func(servers []*ldap.ServerConfig) multildap.IMultiLDAP { + return stub + } + defer func() { + newLDAP = multildap.New isLDAPEnabled = ldap.IsEnabled getLDAPConfig = ldap.GetConfig }() store := remotecache.NewFakeStore(t) - auth := New(&Options{ + server := New(&Options{ Store: store, Ctx: ctx, OrgID: 4, }) - stub := &TestLDAP{ - ID: 42, - } - - auth.LDAP = func(server *ldap.ServerConfig) ldap.IAuth { - return stub - } - - id, err := auth.GetUserID() + id, err := server.Login() So(err, ShouldBeNil) So(id, ShouldEqual, 42) - So(stub.syncCalled, ShouldEqual, true) + So(stub.userCalled, ShouldEqual, true) }) Convey("gets nice error if ldap is enabled but not configured", func() { @@ -110,13 +138,11 @@ func TestMiddlewareContext(t *testing.T) { } getLDAPConfig = func() (*ldap.Config, error) { - config := &ldap.Config{ - Servers: []*ldap.ServerConfig{}, - } - return config, nil + return nil, errors.New("Something went wrong") } defer func() { + newLDAP = multildap.New isLDAPEnabled = ldap.IsEnabled getLDAPConfig = ldap.GetConfig }() @@ -129,20 +155,20 @@ func TestMiddlewareContext(t *testing.T) { OrgID: 4, }) - stub := &TestLDAP{ + stub := &TestMultiLDAP{ ID: 42, } - auth.LDAP = func(server *ldap.ServerConfig) ldap.IAuth { + newLDAP = func(servers []*ldap.ServerConfig) multildap.IMultiLDAP { return stub } - id, err := auth.GetUserID() + id, err := auth.Login() So(err, ShouldNotBeNil) - So(err.Error(), ShouldContainSubstring, "Failed to sync user") + So(err.Error(), ShouldContainSubstring, "Failed to get the user") So(id, ShouldNotEqual, 42) - So(stub.syncCalled, ShouldEqual, false) + So(stub.loginCalled, ShouldEqual, false) }) }) diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index ec7194a7fc66..1b465e7a4e54 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -7,6 +7,8 @@ import ( "strings" "time" + macaron "gopkg.in/macaron.v1" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/apikeygen" "github.com/grafana/grafana/pkg/infra/log" @@ -14,7 +16,6 @@ import ( m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" - macaron "gopkg.in/macaron.v1" ) var ( diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 694ecef14f41..c50960569b63 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -300,7 +300,7 @@ func TestMiddlewareContext(t *testing.T) { setting.AuthProxyEnabled = true setting.AuthProxyWhitelist = "" setting.AuthProxyAutoSignUp = true - setting.LdapEnabled = true + setting.LDAPEnabled = true setting.AuthProxyHeaderName = "X-WEBAUTH-USER" setting.AuthProxyHeaderProperty = "username" name := "markelog" @@ -326,7 +326,7 @@ func TestMiddlewareContext(t *testing.T) { }) middlewareScenario(t, "should create an user from a header", func(sc *scenarioContext) { - setting.LdapEnabled = false + setting.LDAPEnabled = false setting.AuthProxyAutoSignUp = true bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { @@ -354,7 +354,7 @@ func TestMiddlewareContext(t *testing.T) { }) middlewareScenario(t, "should get an existing user from header", func(sc *scenarioContext) { - setting.LdapEnabled = false + setting.LDAPEnabled = false bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 2, UserId: 12} @@ -379,7 +379,7 @@ func TestMiddlewareContext(t *testing.T) { middlewareScenario(t, "should allow the request from whitelist IP", func(sc *scenarioContext) { setting.AuthProxyWhitelist = "192.168.1.0/24, 2001::0/120" - setting.LdapEnabled = false + setting.LDAPEnabled = false bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 4, UserId: 33} @@ -405,7 +405,7 @@ func TestMiddlewareContext(t *testing.T) { middlewareScenario(t, "should not allow the request from whitelist IP", func(sc *scenarioContext) { setting.AuthProxyWhitelist = "8.8.8.8" - setting.LdapEnabled = false + setting.LDAPEnabled = false bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 4, UserId: 33} diff --git a/pkg/models/user.go b/pkg/models/user.go index 69031e403386..de61150512d2 100644 --- a/pkg/models/user.go +++ b/pkg/models/user.go @@ -30,6 +30,7 @@ type User struct { EmailVerified bool Theme string HelpFlags1 HelpFlags1 + IsDisabled bool IsAdmin bool OrgId int64 @@ -88,6 +89,16 @@ type UpdateUserPermissionsCommand struct { UserId int64 `json:"-"` } +type DisableUserCommand struct { + UserId int64 + IsDisabled bool +} + +type BatchDisableUsersCommand struct { + UserIds []int64 + IsDisabled bool +} + type DeleteUserCommand struct { UserId int64 } @@ -203,6 +214,7 @@ type UserProfileDTO struct { Theme string `json:"theme"` OrgId int64 `json:"orgId"` IsGrafanaAdmin bool `json:"isGrafanaAdmin"` + IsDisabled bool `json:"isDisabled"` } type UserSearchHitDTO struct { @@ -212,6 +224,7 @@ type UserSearchHitDTO struct { Email string `json:"email"` AvatarUrl string `json:"avatarUrl"` IsAdmin bool `json:"isAdmin"` + IsDisabled bool `json:"isDisabled"` LastSeenAt time.Time `json:"lastSeenAt"` LastSeenAtAge string `json:"lastSeenAtAge"` } diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go index 11018c7fd716..e6f02212ebad 100644 --- a/pkg/models/user_auth.go +++ b/pkg/models/user_auth.go @@ -6,6 +6,10 @@ import ( "golang.org/x/oauth2" ) +const ( + AuthModuleLDAP = "ldap" +) + type UserAuth struct { Id int64 UserId int64 @@ -29,6 +33,7 @@ type ExternalUserInfo struct { Groups []string OrgRoles map[int64]RoleType IsGrafanaAdmin *bool // This is a pointer to know if we should sync this or not (nil = ignore sync) + IsDisabled bool } // --------------------- @@ -81,6 +86,12 @@ type GetUserByAuthInfoQuery struct { Result *User } +type GetExternalUserInfoByLoginQuery struct { + LoginOrEmail string + + Result *ExternalUserInfo +} + type GetAuthInfoQuery struct { UserId int64 AuthModule string diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 57dacbe43b51..1379daf5a6da 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -18,16 +18,18 @@ import ( // DataSourcePlugin contains all metadata about a datasource plugin type DataSourcePlugin struct { FrontendPluginBase - Annotations bool `json:"annotations"` - Metrics bool `json:"metrics"` - Alerting bool `json:"alerting"` - Explore bool `json:"explore"` - Table bool `json:"tables"` - Logs bool `json:"logs"` - QueryOptions map[string]bool `json:"queryOptions,omitempty"` - BuiltIn bool `json:"builtIn,omitempty"` - Mixed bool `json:"mixed,omitempty"` - Routes []*AppPluginRoute `json:"routes"` + Annotations bool `json:"annotations"` + Metrics bool `json:"metrics"` + Alerting bool `json:"alerting"` + Explore bool `json:"explore"` + Table bool `json:"tables"` + HiddenQueries bool `json:"hiddenQueries"` + Logs bool `json:"logs"` + QueryOptions map[string]bool `json:"queryOptions,omitempty"` + BuiltIn bool `json:"builtIn,omitempty"` + Mixed bool `json:"mixed,omitempty"` + Routes []*AppPluginRoute `json:"routes"` + Streaming bool `json:"streaming"` Backend bool `json:"backend,omitempty"` Executable string `json:"executable,omitempty"` diff --git a/pkg/services/alerting/conditions/evaluator.go b/pkg/services/alerting/conditions/evaluator.go index eef593d39e23..3045b633f1e4 100644 --- a/pkg/services/alerting/conditions/evaluator.go +++ b/pkg/services/alerting/conditions/evaluator.go @@ -14,22 +14,25 @@ var ( rangedTypes = []string{"within_range", "outside_range"} ) +// AlertEvaluator evaluates the reduced value of a timeserie. +// Returning true if a timeserie is violating the condition +// ex: ThresholdEvaluator, NoValueEvaluator, RangeEvaluator type AlertEvaluator interface { Eval(reducedValue null.Float) bool } -type NoValueEvaluator struct{} +type noValueEvaluator struct{} -func (e *NoValueEvaluator) Eval(reducedValue null.Float) bool { +func (e *noValueEvaluator) Eval(reducedValue null.Float) bool { return !reducedValue.Valid } -type ThresholdEvaluator struct { +type thresholdEvaluator struct { Type string Threshold float64 } -func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) { +func newThresholdEvaluator(typ string, model *simplejson.Json) (*thresholdEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { return nil, fmt.Errorf("Evaluator missing threshold parameter") @@ -40,12 +43,12 @@ func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvalua return nil, fmt.Errorf("Evaluator has invalid parameter") } - defaultEval := &ThresholdEvaluator{Type: typ} + defaultEval := &thresholdEvaluator{Type: typ} defaultEval.Threshold, _ = firstParam.Float64() return defaultEval, nil } -func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { +func (e *thresholdEvaluator) Eval(reducedValue null.Float) bool { if !reducedValue.Valid { return false } @@ -60,13 +63,13 @@ func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { return false } -type RangedEvaluator struct { +type rangedEvaluator struct { Type string Lower float64 Upper float64 } -func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, error) { +func newRangedEvaluator(typ string, model *simplejson.Json) (*rangedEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"} @@ -82,13 +85,13 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e return nil, alerting.ValidationError{Reason: "Evaluator has invalid second parameter"} } - rangedEval := &RangedEvaluator{Type: typ} + rangedEval := &rangedEvaluator{Type: typ} rangedEval.Lower, _ = firstParam.Float64() rangedEval.Upper, _ = secondParam.Float64() return rangedEval, nil } -func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { +func (e *rangedEvaluator) Eval(reducedValue null.Float) bool { if !reducedValue.Valid { return false } @@ -105,6 +108,8 @@ func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { return false } +// NewAlertEvaluator is a factory function for returning +// an `AlertEvaluator` depending on the json model. func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { typ := model.Get("type").MustString() if typ == "" { @@ -120,7 +125,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { } if typ == "no_value" { - return &NoValueEvaluator{}, nil + return &noValueEvaluator{}, nil } return nil, fmt.Errorf("Evaluator invalid evaluator type: %s", typ) diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index 37dbd9b3f7a6..b29f39b49169 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -17,26 +17,31 @@ import ( func init() { alerting.RegisterCondition("query", func(model *simplejson.Json, index int) (alerting.Condition, error) { - return NewQueryCondition(model, index) + return newQueryCondition(model, index) }) } +// QueryCondition is responsible for issue and query, reduce the +// timeseries into single values and evaluate if they are firing or not. type QueryCondition struct { Index int Query AlertQuery - Reducer QueryReducer + Reducer *queryReducer Evaluator AlertEvaluator Operator string HandleRequest tsdb.HandleRequestFunc } +// AlertQuery contains information about what datasource a query +// should be sent to and the query object. type AlertQuery struct { Model *simplejson.Json - DatasourceId int64 + DatasourceID int64 From string To string } +// Eval evaluates the `QueryCondition`. func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.ConditionResult, error) { timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To) @@ -101,8 +106,8 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) { getDsInfo := &models.GetDataSourceByIdQuery{ - Id: c.Query.DatasourceId, - OrgId: context.Rule.OrgId, + Id: c.Query.DatasourceID, + OrgId: context.Rule.OrgID, } if err := bus.Dispatch(getDsInfo); err != nil { @@ -154,16 +159,16 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t return req } -func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { +func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { condition := QueryCondition{} condition.Index = index condition.HandleRequest = tsdb.HandleRequest - queryJson := model.Get("query") + queryJSON := model.Get("query") - condition.Query.Model = queryJson.Get("model") - condition.Query.From = queryJson.Get("params").MustArray()[1].(string) - condition.Query.To = queryJson.Get("params").MustArray()[2].(string) + condition.Query.Model = queryJSON.Get("model") + condition.Query.From = queryJSON.Get("params").MustArray()[1].(string) + condition.Query.To = queryJSON.Get("params").MustArray()[2].(string) if err := validateFromValue(condition.Query.From); err != nil { return nil, err @@ -173,20 +178,20 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro return nil, err } - condition.Query.DatasourceId = queryJson.Get("datasourceId").MustInt64() + condition.Query.DatasourceID = queryJSON.Get("datasourceId").MustInt64() - reducerJson := model.Get("reducer") - condition.Reducer = NewSimpleReducer(reducerJson.Get("type").MustString()) + reducerJSON := model.Get("reducer") + condition.Reducer = newSimpleReducer(reducerJSON.Get("type").MustString()) - evaluatorJson := model.Get("evaluator") - evaluator, err := NewAlertEvaluator(evaluatorJson) + evaluatorJSON := model.Get("evaluator") + evaluator, err := NewAlertEvaluator(evaluatorJSON) if err != nil { return nil, err } condition.Evaluator = evaluator - operatorJson := model.Get("operator") - operator := operatorJson.Get("type").MustString("and") + operatorJSON := model.Get("operator") + operator := operatorJSON.Get("type").MustString("and") condition.Operator = operator return &condition, nil diff --git a/pkg/services/alerting/conditions/query_test.go b/pkg/services/alerting/conditions/query_test.go index 2e1ecf5f39c5..4c2b1689277a 100644 --- a/pkg/services/alerting/conditions/query_test.go +++ b/pkg/services/alerting/conditions/query_test.go @@ -27,16 +27,15 @@ func TestQueryCondition(t *testing.T) { So(ctx.condition.Query.From, ShouldEqual, "5m") So(ctx.condition.Query.To, ShouldEqual, "now") - So(ctx.condition.Query.DatasourceId, ShouldEqual, 1) + So(ctx.condition.Query.DatasourceID, ShouldEqual, 1) Convey("Can read query reducer", func() { - reducer, ok := ctx.condition.Reducer.(*SimpleReducer) - So(ok, ShouldBeTrue) + reducer := ctx.condition.Reducer So(reducer.Type, ShouldEqual, "avg") }) Convey("Can read evaluator", func() { - evaluator, ok := ctx.condition.Evaluator.(*ThresholdEvaluator) + evaluator, ok := ctx.condition.Evaluator.(*thresholdEvaluator) So(ok, ShouldBeTrue) So(evaluator.Type, ShouldEqual, "gt") }) @@ -163,7 +162,7 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error) }`)) So(err, ShouldBeNil) - condition, err := NewQueryCondition(jsonModel, 0) + condition, err := newQueryCondition(jsonModel, 0) So(err, ShouldBeNil) ctx.condition = condition diff --git a/pkg/services/alerting/conditions/reducer.go b/pkg/services/alerting/conditions/reducer.go index f55545be311f..bf57110ea1c9 100644 --- a/pkg/services/alerting/conditions/reducer.go +++ b/pkg/services/alerting/conditions/reducer.go @@ -9,15 +9,15 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -type QueryReducer interface { - Reduce(timeSeries *tsdb.TimeSeries) null.Float -} +// queryReducer reduces an timeserie to a nullable float +type queryReducer struct { -type SimpleReducer struct { + // Type is how the timeserie should be reduced. + // Ex avg, sum, max, min, count Type string } -func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { +func (s *queryReducer) Reduce(series *tsdb.TimeSeries) null.Float { if len(series.Points) == 0 { return null.FloatFromPtr(nil) } @@ -31,7 +31,7 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { for _, point := range series.Points { if point[0].Valid { value += point[0].Float64 - validPointsCount += 1 + validPointsCount++ allNull = false } } @@ -117,8 +117,8 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { return null.FloatFrom(value) } -func NewSimpleReducer(typ string) *SimpleReducer { - return &SimpleReducer{Type: typ} +func newSimpleReducer(t string) *queryReducer { + return &queryReducer{Type: t} } func calculateDiff(series *tsdb.TimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) { diff --git a/pkg/services/alerting/conditions/reducer_test.go b/pkg/services/alerting/conditions/reducer_test.go index d2c21771d0b1..eac71378f3d7 100644 --- a/pkg/services/alerting/conditions/reducer_test.go +++ b/pkg/services/alerting/conditions/reducer_test.go @@ -53,7 +53,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("median should ignore null values", func() { - reducer := NewSimpleReducer("median") + reducer := newSimpleReducer("median") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -76,7 +76,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("avg with only nulls", func() { - reducer := NewSimpleReducer("avg") + reducer := newSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -87,7 +87,7 @@ func TestSimpleReducer(t *testing.T) { Convey("count_non_null", func() { Convey("with null values and real values", func() { - reducer := NewSimpleReducer("count_non_null") + reducer := newSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -102,7 +102,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("with null values", func() { - reducer := NewSimpleReducer("count_non_null") + reducer := newSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -115,7 +115,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("avg of number values and null values should ignore nulls", func() { - reducer := NewSimpleReducer("avg") + reducer := newSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -144,7 +144,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("diff with only nulls", func() { - reducer := NewSimpleReducer("diff") + reducer := newSimpleReducer("diff") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -171,7 +171,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("percent_diff with only nulls", func() { - reducer := NewSimpleReducer("percent_diff") + reducer := newSimpleReducer("percent_diff") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -184,8 +184,8 @@ func TestSimpleReducer(t *testing.T) { }) } -func testReducer(typ string, datapoints ...float64) float64 { - reducer := NewSimpleReducer(typ) +func testReducer(reducerType string, datapoints ...float64) float64 { + reducer := newSimpleReducer(reducerType) series := &tsdb.TimeSeries{ Name: "test time serie", } diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index c67b59839d79..8794eb51c325 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -17,45 +17,44 @@ import ( "golang.org/x/sync/errgroup" ) -type AlertingService struct { +// AlertEngine is the background process that +// schedules alert evaluations and makes sure notifications +// are sent. +type AlertEngine struct { RenderService rendering.Service `inject:""` - execQueue chan *Job - //clock clock.Clock + execQueue chan *Job ticker *Ticker - scheduler Scheduler - evalHandler EvalHandler - ruleReader RuleReader + scheduler scheduler + evalHandler evalHandler + ruleReader ruleReader log log.Logger - resultHandler ResultHandler + resultHandler resultHandler } func init() { - registry.RegisterService(&AlertingService{}) + registry.RegisterService(&AlertEngine{}) } -func NewEngine() *AlertingService { - e := &AlertingService{} - e.Init() - return e -} - -func (e *AlertingService) IsDisabled() bool { +// IsDisabled returns true if the alerting service is disable for this instance. +func (e *AlertEngine) IsDisabled() bool { return !setting.AlertingEnabled || !setting.ExecuteAlerts } -func (e *AlertingService) Init() error { +// Init initalizes the AlertingService. +func (e *AlertEngine) Init() error { e.ticker = NewTicker(time.Now(), time.Second*0, clock.New()) e.execQueue = make(chan *Job, 1000) - e.scheduler = NewScheduler() + e.scheduler = newScheduler() e.evalHandler = NewEvalHandler() - e.ruleReader = NewRuleReader() + e.ruleReader = newRuleReader() e.log = log.New("alerting.engine") - e.resultHandler = NewResultHandler(e.RenderService) + e.resultHandler = newResultHandler(e.RenderService) return nil } -func (e *AlertingService) Run(ctx context.Context) error { +// Run starts the alerting service background process. +func (e *AlertEngine) Run(ctx context.Context) error { alertGroup, ctx := errgroup.WithContext(ctx) alertGroup.Go(func() error { return e.alertingTicker(ctx) }) alertGroup.Go(func() error { return e.runJobDispatcher(ctx) }) @@ -64,7 +63,7 @@ func (e *AlertingService) Run(ctx context.Context) error { return err } -func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { +func (e *AlertEngine) alertingTicker(grafanaCtx context.Context) error { defer func() { if err := recover(); err != nil { e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1)) @@ -80,7 +79,7 @@ func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { case tick := <-e.ticker.C: // TEMP SOLUTION update rules ever tenth tick if tickIndex%10 == 0 { - e.scheduler.Update(e.ruleReader.Fetch()) + e.scheduler.Update(e.ruleReader.fetch()) } e.scheduler.Tick(tick, e.execQueue) @@ -89,7 +88,7 @@ func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { } } -func (e *AlertingService) runJobDispatcher(grafanaCtx context.Context) error { +func (e *AlertEngine) runJobDispatcher(grafanaCtx context.Context) error { dispatcherGroup, alertCtx := errgroup.WithContext(grafanaCtx) for { @@ -106,7 +105,7 @@ var ( unfinishedWorkTimeout = time.Second * 5 ) -func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *Job) error { +func (e *AlertEngine) processJobWithRetry(grafanaCtx context.Context, job *Job) error { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -141,7 +140,7 @@ func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *J } } -func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { +func (e *AlertEngine) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { job.Running = false close(cancelChan) for cancelFn := range cancelChan { @@ -150,7 +149,7 @@ func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, return err } -func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { +func (e *AlertEngine) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -181,8 +180,8 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel e.evalHandler.Eval(evalContext) - span.SetTag("alertId", evalContext.Rule.Id) - span.SetTag("dashboardId", evalContext.Rule.DashboardId) + span.SetTag("alertId", evalContext.Rule.ID) + span.SetTag("dashboardId", evalContext.Rule.DashboardID) span.SetTag("firing", evalContext.Firing) span.SetTag("nodatapoints", evalContext.NoDataFound) span.SetTag("attemptID", attemptID) @@ -195,7 +194,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel ) if attemptID < setting.AlertingMaxAttempts { span.Finish() - e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) + e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.ID, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) attemptChan <- (attemptID + 1) return } @@ -211,9 +210,9 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel // dont reuse the evalContext and get its own context. evalContext.Ctx = resultHandleCtx evalContext.Rule.State = evalContext.GetNewState() - e.resultHandler.Handle(evalContext) + e.resultHandler.handle(evalContext) span.Finish() - e.log.Debug("Job Execution completed", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) + e.log.Debug("Job Execution completed", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.ID, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) close(attemptChan) }() } diff --git a/pkg/services/alerting/engine_integration_test.go b/pkg/services/alerting/engine_integration_test.go index 3d54bdc3b4af..6b6fab389d02 100644 --- a/pkg/services/alerting/engine_integration_test.go +++ b/pkg/services/alerting/engine_integration_test.go @@ -17,7 +17,8 @@ import ( func TestEngineTimeouts(t *testing.T) { Convey("Alerting engine timeout tests", t, func() { - engine := NewEngine() + engine := &AlertEngine{} + engine.Init() setting.AlertingNotificationTimeout = 30 * time.Second setting.AlertingMaxAttempts = 3 engine.resultHandler = &FakeResultHandler{} @@ -89,7 +90,7 @@ func (handler *FakeCommonTimeoutHandler) Eval(evalContext *EvalContext) { evalContext.Error = errors.New("Fake evaluation timeout test failure; wrong response") } -func (handler *FakeCommonTimeoutHandler) Handle(evalContext *EvalContext) error { +func (handler *FakeCommonTimeoutHandler) handle(evalContext *EvalContext) error { // 1. prepare mock server path := "/resulthandle" srv := runBusyServer(path, handler.ServerBusySleepDuration) diff --git a/pkg/services/alerting/engine_test.go b/pkg/services/alerting/engine_test.go index 2e2ed0c9b163..86980c21bd48 100644 --- a/pkg/services/alerting/engine_test.go +++ b/pkg/services/alerting/engine_test.go @@ -6,9 +6,10 @@ import ( "math" "testing" + "time" + "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" - "time" ) type FakeEvalHandler struct { @@ -32,13 +33,14 @@ func (handler *FakeEvalHandler) Eval(evalContext *EvalContext) { type FakeResultHandler struct{} -func (handler *FakeResultHandler) Handle(evalContext *EvalContext) error { +func (handler *FakeResultHandler) handle(evalContext *EvalContext) error { return nil } func TestEngineProcessJob(t *testing.T) { Convey("Alerting engine job processing", t, func() { - engine := NewEngine() + engine := &AlertEngine{} + engine.Init() setting.AlertingEvaluationTimeout = 30 * time.Second setting.AlertingNotificationTimeout = 30 * time.Second setting.AlertingMaxAttempts = 3 diff --git a/pkg/services/alerting/eval_context.go b/pkg/services/alerting/eval_context.go index 7d9a9014086b..8436e9c9a780 100644 --- a/pkg/services/alerting/eval_context.go +++ b/pkg/services/alerting/eval_context.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/grafana/pkg/setting" ) +// EvalContext is the context object for an alert evaluation. type EvalContext struct { Firing bool IsTestRun bool @@ -25,7 +26,7 @@ type EvalContext struct { dashboardRef *models.DashboardRef - ImagePublicUrl string + ImagePublicURL string ImageOnDiskPath string NoDataFound bool PrevAlertState models.AlertStateType @@ -33,6 +34,7 @@ type EvalContext struct { Ctx context.Context } +// NewEvalContext is the EvalContext constructor. func NewEvalContext(alertCtx context.Context, rule *Rule) *EvalContext { return &EvalContext{ Ctx: alertCtx, @@ -45,12 +47,14 @@ func NewEvalContext(alertCtx context.Context, rule *Rule) *EvalContext { } } +// StateDescription contains visual information about the alert state. type StateDescription struct { Color string Text string Data string } +// GetStateModel returns the `StateDescription` based on current state. func (c *EvalContext) GetStateModel() *StateDescription { switch c.Rule.State { case models.AlertStateOK: @@ -78,24 +82,27 @@ func (c *EvalContext) GetStateModel() *StateDescription { } } -func (c *EvalContext) ShouldUpdateAlertState() bool { +func (c *EvalContext) shouldUpdateAlertState() bool { return c.Rule.State != c.PrevAlertState } -func (a *EvalContext) GetDurationMs() float64 { - return float64(a.EndTime.Nanosecond()-a.StartTime.Nanosecond()) / float64(1000000) +// GetDurationMs returns the duration of the alert evaluation. +func (c *EvalContext) GetDurationMs() float64 { + return float64(c.EndTime.Nanosecond()-c.StartTime.Nanosecond()) / float64(1000000) } +// GetNotificationTitle returns the title of the alert rule including alert state. func (c *EvalContext) GetNotificationTitle() string { return "[" + c.GetStateModel().Text + "] " + c.Rule.Name } +// GetDashboardUID returns the dashboard uid for the alert rule. func (c *EvalContext) GetDashboardUID() (*models.DashboardRef, error) { if c.dashboardRef != nil { return c.dashboardRef, nil } - uidQuery := &models.GetDashboardRefByIdQuery{Id: c.Rule.DashboardId} + uidQuery := &models.GetDashboardRefByIdQuery{Id: c.Rule.DashboardID} if err := bus.Dispatch(uidQuery); err != nil { return nil, err } @@ -106,7 +113,8 @@ func (c *EvalContext) GetDashboardUID() (*models.DashboardRef, error) { const urlFormat = "%s?fullscreen&edit&tab=alert&panelId=%d&orgId=%d" -func (c *EvalContext) GetRuleUrl() (string, error) { +// GetRuleURL returns the url to the dashboard containing the alert. +func (c *EvalContext) GetRuleURL() (string, error) { if c.IsTestRun { return setting.AppUrl, nil } @@ -115,10 +123,10 @@ func (c *EvalContext) GetRuleUrl() (string, error) { if err != nil { return "", err } - return fmt.Sprintf(urlFormat, models.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil + return fmt.Sprintf(urlFormat, models.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelID, c.Rule.OrgID), nil } -// GetNewState returns the new state from the alert rule evaluation +// GetNewState returns the new state from the alert rule evaluation. func (c *EvalContext) GetNewState() models.AlertStateType { ns := getNewStateInternal(c) if ns != models.AlertStateAlerting || c.Rule.For == 0 { @@ -140,7 +148,7 @@ func (c *EvalContext) GetNewState() models.AlertStateType { func getNewStateInternal(c *EvalContext) models.AlertStateType { if c.Error != nil { c.log.Error("Alert Rule Result Error", - "ruleId", c.Rule.Id, + "ruleId", c.Rule.ID, "name", c.Rule.Name, "error", c.Error, "changing state to", c.Rule.ExecutionErrorState.ToAlertState()) @@ -157,7 +165,7 @@ func getNewStateInternal(c *EvalContext) models.AlertStateType { if c.NoDataFound { c.log.Info("Alert Rule returned no data", - "ruleId", c.Rule.Id, + "ruleId", c.Rule.ID, "name", c.Rule.Name, "changing state to", c.Rule.NoDataState.ToAlertState()) diff --git a/pkg/services/alerting/eval_context_test.go b/pkg/services/alerting/eval_context_test.go index 06d3ae16ed80..76cbc90c640c 100644 --- a/pkg/services/alerting/eval_context_test.go +++ b/pkg/services/alerting/eval_context_test.go @@ -18,7 +18,7 @@ func TestStateIsUpdatedWhenNeeded(t *testing.T) { ctx.PrevAlertState = models.AlertStateOK ctx.Rule.State = models.AlertStateAlerting - if !ctx.ShouldUpdateAlertState() { + if !ctx.shouldUpdateAlertState() { t.Fatalf("expected should updated to be true") } }) @@ -27,7 +27,7 @@ func TestStateIsUpdatedWhenNeeded(t *testing.T) { ctx.PrevAlertState = models.AlertStateOK ctx.Rule.State = models.AlertStateOK - if ctx.ShouldUpdateAlertState() { + if ctx.shouldUpdateAlertState() { t.Fatalf("expected should updated to be false") } }) diff --git a/pkg/services/alerting/eval_handler.go b/pkg/services/alerting/eval_handler.go index 22d172568f55..572345362a86 100644 --- a/pkg/services/alerting/eval_handler.go +++ b/pkg/services/alerting/eval_handler.go @@ -9,11 +9,13 @@ import ( "github.com/grafana/grafana/pkg/infra/metrics" ) +// DefaultEvalHandler is responsible for evaluating the alert rule. type DefaultEvalHandler struct { log log.Logger alertJobTimeout time.Duration } +// NewEvalHandler is the `DefaultEvalHandler` constructor. func NewEvalHandler() *DefaultEvalHandler { return &DefaultEvalHandler{ log: log.New("alerting.evalHandler"), @@ -21,6 +23,7 @@ func NewEvalHandler() *DefaultEvalHandler { } } +// Eval evaluated the alert rule. func (e *DefaultEvalHandler) Eval(context *EvalContext) { firing := true noDataFound := true diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go index 6bf5e786c198..6fb109d71a23 100644 --- a/pkg/services/alerting/extractor.go +++ b/pkg/services/alerting/extractor.go @@ -11,7 +11,7 @@ import ( "github.com/grafana/grafana/pkg/models" ) -// DashAlertExtractor extracts alerts from the dashboard json +// DashAlertExtractor extracts alerts from the dashboard json. type DashAlertExtractor struct { User *models.SignedInUser Dash *models.Dashboard @@ -19,7 +19,7 @@ type DashAlertExtractor struct { log log.Logger } -// NewDashAlertExtractor returns a new DashAlertExtractor +// NewDashAlertExtractor returns a new DashAlertExtractor. func NewDashAlertExtractor(dash *models.Dashboard, orgID int64, user *models.SignedInUser) *DashAlertExtractor { return &DashAlertExtractor{ User: user, @@ -207,7 +207,7 @@ func validateAlertRule(alert *models.Alert) bool { return alert.ValidToSave() } -// GetAlerts extracts alerts from the dashboard json and does full validation on the alert json data +// GetAlerts extracts alerts from the dashboard json and does full validation on the alert json data. func (e *DashAlertExtractor) GetAlerts() ([]*models.Alert, error) { return e.extractAlerts(validateAlertRule) } @@ -247,7 +247,7 @@ func (e *DashAlertExtractor) extractAlerts(validateFunc func(alert *models.Alert } // ValidateAlerts validates alerts in the dashboard json but does not require a valid dashboard id -// in the first validation pass +// in the first validation pass. func (e *DashAlertExtractor) ValidateAlerts() error { _, err := e.extractAlerts(func(alert *models.Alert) bool { return alert.OrgId != 0 && alert.PanelId != 0 }) return err diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index 716ff746cd84..86f0725e4cec 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -52,10 +52,10 @@ func TestAlertRuleExtraction(t *testing.T) { So(err, ShouldBeNil) Convey("Extractor should not modify the original json", func() { - dashJson, err := simplejson.NewJson(json) + dashJSON, err := simplejson.NewJson(json) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) getTarget := func(j *simplejson.Json) string { rowObj := j.Get("rows").MustArray()[0] @@ -68,23 +68,23 @@ func TestAlertRuleExtraction(t *testing.T) { } Convey("Dashboard json rows.panels.alert.query.model.target should be empty", func() { - So(getTarget(dashJson), ShouldEqual, "") + So(getTarget(dashJSON), ShouldEqual, "") }) extractor := NewDashAlertExtractor(dash, 1, nil) _, _ = extractor.GetAlerts() Convey("Dashboard json should not be updated after extracting rules", func() { - So(getTarget(dashJson), ShouldEqual, "") + So(getTarget(dashJSON), ShouldEqual, "") }) }) Convey("Parsing and validating dashboard containing graphite alerts", func() { - dashJson, err := simplejson.NewJson(json) + dashJSON, err := simplejson.NewJson(json) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) extractor := NewDashAlertExtractor(dash, 1, nil) alerts, err := extractor.GetAlerts() @@ -147,12 +147,12 @@ func TestAlertRuleExtraction(t *testing.T) { }) Convey("Panels missing id should return error", func() { - panelWithoutId, err := ioutil.ReadFile("./testdata/panels-missing-id.json") + panelWithoutID, err := ioutil.ReadFile("./testdata/panels-missing-id.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson(panelWithoutId) + dashJSON, err := simplejson.NewJson(panelWithoutID) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) extractor := NewDashAlertExtractor(dash, 1, nil) _, err = extractor.GetAlerts() @@ -163,12 +163,12 @@ func TestAlertRuleExtraction(t *testing.T) { }) Convey("Panel with id set to zero should return error", func() { - panelWithIdZero, err := ioutil.ReadFile("./testdata/panel-with-id-0.json") + panelWithIDZero, err := ioutil.ReadFile("./testdata/panel-with-id-0.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson(panelWithIdZero) + dashJSON, err := simplejson.NewJson(panelWithIDZero) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) extractor := NewDashAlertExtractor(dash, 1, nil) _, err = extractor.GetAlerts() @@ -182,9 +182,9 @@ func TestAlertRuleExtraction(t *testing.T) { json, err := ioutil.ReadFile("./testdata/v5-dashboard.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson(json) + dashJSON, err := simplejson.NewJson(json) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) extractor := NewDashAlertExtractor(dash, 1, nil) alerts, err := extractor.GetAlerts() @@ -211,9 +211,9 @@ func TestAlertRuleExtraction(t *testing.T) { json, err := ioutil.ReadFile("./testdata/influxdb-alert.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson(json) + dashJSON, err := simplejson.NewJson(json) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) extractor := NewDashAlertExtractor(dash, 1, nil) alerts, err := extractor.GetAlerts() @@ -240,10 +240,10 @@ func TestAlertRuleExtraction(t *testing.T) { json, err := ioutil.ReadFile("./testdata/collapsed-panels.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson(json) + dashJSON, err := simplejson.NewJson(json) So(err, ShouldBeNil) - dash := models.NewDashboardFromJson(dashJson) + dash := models.NewDashboardFromJson(dashJSON) extractor := NewDashAlertExtractor(dash, 1, nil) alerts, err := extractor.GetAlerts() diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index bd7ca0877699..93d3127d6bdd 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -7,15 +7,16 @@ import ( "github.com/grafana/grafana/pkg/models" ) -type EvalHandler interface { +type evalHandler interface { Eval(evalContext *EvalContext) } -type Scheduler interface { +type scheduler interface { Tick(time time.Time, execQueue chan *Job) Update(rules []*Rule) } +// Notifier is responsible for sending alert notifications. type Notifier interface { Notify(evalContext *EvalContext) error GetType() string @@ -24,7 +25,7 @@ type Notifier interface { // ShouldNotify checks this evaluation should send an alert notification ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool - GetNotifierUid() string + GetNotifierUID() string GetIsDefault() bool GetSendReminder() bool GetDisableResolveMessage() bool @@ -48,6 +49,7 @@ func (notifiers notifierStateSlice) ShouldUploadImage() bool { return false } +// ConditionResult is the result of a condition evaluation. type ConditionResult struct { Firing bool NoDataFound bool @@ -55,6 +57,7 @@ type ConditionResult struct { EvalMatches []*EvalMatch } +// Condition is responsible for evaluating an alert condition. type Condition interface { Eval(result *EvalContext) (*ConditionResult, error) } diff --git a/pkg/services/alerting/models.go b/pkg/services/alerting/models.go index bbd8b98eea27..e802ab5a7a17 100644 --- a/pkg/services/alerting/models.go +++ b/pkg/services/alerting/models.go @@ -2,6 +2,8 @@ package alerting import "github.com/grafana/grafana/pkg/components/null" +// Job holds state about when the alert rule should +// be evaluated. type Job struct { Offset int64 OffsetWait bool @@ -10,18 +12,15 @@ type Job struct { Rule *Rule } +// ResultLogEntry represents log data for the alert evaluation. type ResultLogEntry struct { Message string Data interface{} } +// EvalMatch represents the serie violating the threshold. type EvalMatch struct { Value null.Float `json:"value"` Metric string `json:"metric"` Tags map[string]string `json:"tags"` } - -type Level struct { - Operator string - Value float64 -} diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index a2824da4a67c..84a26e9a64a0 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -13,6 +13,7 @@ import ( "github.com/grafana/grafana/pkg/setting" ) +// NotifierPlugin holds meta information about a notifier. type NotifierPlugin struct { Type string `json:"type"` Name string `json:"name"` @@ -21,11 +22,7 @@ type NotifierPlugin struct { Factory NotifierFactory `json:"-"` } -type NotificationService interface { - SendIfNeeded(context *EvalContext) error -} - -func NewNotificationService(renderService rendering.Service) NotificationService { +func newNotificationService(renderService rendering.Service) *notificationService { return ¬ificationService{ log: log.New("alerting.notifier"), renderService: renderService, @@ -38,7 +35,7 @@ type notificationService struct { } func (n *notificationService) SendIfNeeded(context *EvalContext) error { - notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) + notifierStates, err := n.getNeededNotifiers(context.Rule.OrgID, context.Rule.Notifications, context) if err != nil { return err } @@ -59,13 +56,13 @@ func (n *notificationService) SendIfNeeded(context *EvalContext) error { func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error { notifier := notifierState.notifier - n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUid(), "isDefault", notifier.GetIsDefault()) + n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault()) metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc() err := notifier.Notify(evalContext) if err != nil { - n.log.Error("failed to send notification", "uid", notifier.GetNotifierUid(), "error", err) + n.log.Error("failed to send notification", "uid", notifier.GetNotifierUID(), "error", err) } if evalContext.IsTestRun { @@ -109,7 +106,7 @@ func (n *notificationService) sendNotifications(evalContext *EvalContext, notifi for _, notifierState := range notifierStates { err := n.sendNotification(evalContext, notifierState) if err != nil { - n.log.Error("failed to send notification", "uid", notifierState.notifier.GetNotifierUid(), "error", err) + n.log.Error("failed to send notification", "uid", notifierState.notifier.GetNotifierUID(), "error", err) } } @@ -126,7 +123,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { Width: 1000, Height: 500, Timeout: setting.AlertingEvaluationTimeout, - OrgId: context.Rule.OrgId, + OrgId: context.Rule.OrgID, OrgRole: models.ROLE_ADMIN, ConcurrentLimit: setting.AlertingRenderLimit, } @@ -136,7 +133,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return err } - renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgId, context.Rule.PanelId) + renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgID, context.Rule.PanelID) result, err := n.renderService.Render(context.Ctx, renderOpts) if err != nil { @@ -144,20 +141,20 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { } context.ImageOnDiskPath = result.FilePath - context.ImagePublicUrl, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) + context.ImagePublicURL, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) if err != nil { return err } - if context.ImagePublicUrl != "" { - n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicUrl) + if context.ImagePublicURL != "" { + n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicURL) } return nil } -func (n *notificationService) getNeededNotifiers(orgId int64, notificationUids []string, evalContext *EvalContext) (notifierStateSlice, error) { - query := &models.GetAlertNotificationsWithUidToSendQuery{OrgId: orgId, Uids: notificationUids} +func (n *notificationService) getNeededNotifiers(orgID int64, notificationUids []string, evalContext *EvalContext) (notifierStateSlice, error) { + query := &models.GetAlertNotificationsWithUidToSendQuery{OrgId: orgID, Uids: notificationUids} if err := bus.Dispatch(query); err != nil { return nil, err @@ -173,8 +170,8 @@ func (n *notificationService) getNeededNotifiers(orgId int64, notificationUids [ query := &models.GetOrCreateNotificationStateQuery{ NotifierId: notification.Id, - AlertId: evalContext.Rule.Id, - OrgId: evalContext.Rule.OrgId, + AlertId: evalContext.Rule.ID, + OrgId: evalContext.Rule.OrgID, } err = bus.DispatchCtx(evalContext.Ctx, query) @@ -194,7 +191,7 @@ func (n *notificationService) getNeededNotifiers(orgId int64, notificationUids [ return result, nil } -// InitNotifier instantiate a new notifier based on the model +// InitNotifier instantiate a new notifier based on the model. func InitNotifier(model *models.AlertNotification) (Notifier, error) { notifierPlugin, found := notifierFactories[model.Type] if !found { @@ -204,6 +201,7 @@ func InitNotifier(model *models.AlertNotification) (Notifier, error) { return notifierPlugin.Factory(model) } +// NotifierFactory is a signature for creating notifiers. type NotifierFactory func(notification *models.AlertNotification) (Notifier, error) var notifierFactories = make(map[string]*NotifierPlugin) @@ -213,6 +211,7 @@ func RegisterNotifier(plugin *NotifierPlugin) { notifierFactories[plugin.Type] = plugin } +// GetNotifiers returns a list of metadata about available notifiers. func GetNotifiers() []*NotifierPlugin { list := make([]*NotifierPlugin, 0) diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go index bc2807d0d3cd..a8fd7db2f5ee 100644 --- a/pkg/services/alerting/notifiers/alertmanager.go +++ b/pkg/services/alerting/notifiers/alertmanager.go @@ -51,7 +51,7 @@ type AlertmanagerNotifier struct { // ShouldNotify returns true if the notifiers should be used depending on state func (am *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *models.AlertNotificationState) bool { - am.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) + am.log.Debug("Should notify", "ruleId", evalContext.Rule.ID, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) // Do not notify when we become OK for the first time. if (evalContext.PrevAlertState == models.AlertStatePending) && (evalContext.Rule.State == models.AlertStateOK) { @@ -89,8 +89,8 @@ func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, m if description != "" { alertJSON.SetPath([]string{"annotations", "description"}, description) } - if evalContext.ImagePublicUrl != "" { - alertJSON.SetPath([]string{"annotations", "image"}, evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + alertJSON.SetPath([]string{"annotations", "image"}, evalContext.ImagePublicURL) } // Labels (from metrics tags + mandatory alertname). @@ -111,9 +111,9 @@ func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, m // Notify sends alert notifications to the alert manager func (am *AlertmanagerNotifier) Notify(evalContext *alerting.EvalContext) error { - am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.Id, "notification", am.Name) + am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.ID, "notification", am.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { am.log.Error("Failed get rule link", "error", err) return err diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index 6bd375b53bcb..f31c8b36d9c0 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -13,10 +13,11 @@ const ( triggMetrString = "Triggered metrics:\n\n" ) +// NotifierBase is the base implementation of a notifier. type NotifierBase struct { Name string Type string - Uid string + UID string IsDeault bool UploadImage bool SendReminder bool @@ -26,6 +27,7 @@ type NotifierBase struct { log log.Logger } +// NewNotifierBase returns a new `NotifierBase`. func NewNotifierBase(model *models.AlertNotification) NotifierBase { uploadImage := true value, exist := model.Settings.CheckGet("uploadImage") @@ -34,7 +36,7 @@ func NewNotifierBase(model *models.AlertNotification) NotifierBase { } return NotifierBase{ - Uid: model.Uid, + UID: model.Uid, Name: model.Name, IsDeault: model.IsDefault, Type: model.Type, @@ -108,30 +110,40 @@ func (n *NotifierBase) ShouldNotify(ctx context.Context, context *alerting.EvalC return true } +// GetType returns the notifier type. func (n *NotifierBase) GetType() string { return n.Type } +// NeedsImage returns true if an image is expected in the notification. func (n *NotifierBase) NeedsImage() bool { return n.UploadImage } -func (n *NotifierBase) GetNotifierUid() string { - return n.Uid +// GetNotifierUID returns the notifier `uid`. +func (n *NotifierBase) GetNotifierUID() string { + return n.UID } +// GetIsDefault returns true if the notifiers should +// be used for all alerts. func (n *NotifierBase) GetIsDefault() bool { return n.IsDeault } +// GetSendReminder returns true if reminders should be sent. func (n *NotifierBase) GetSendReminder() bool { return n.SendReminder } +// GetDisableResolveMessage returns true if ok alert notifications +// should be skipped. func (n *NotifierBase) GetDisableResolveMessage() bool { return n.DisableResolveMessage } +// GetFrequency returns the freqency for how often +// alerts should be evaluated. func (n *NotifierBase) GetFrequency() time.Duration { return n.Frequency } diff --git a/pkg/services/alerting/notifiers/base_test.go b/pkg/services/alerting/notifiers/base_test.go index 84294bfb29d5..799a843de2d4 100644 --- a/pkg/services/alerting/notifiers/base_test.go +++ b/pkg/services/alerting/notifiers/base_test.go @@ -178,24 +178,24 @@ func TestShouldSendAlertNotification(t *testing.T) { func TestBaseNotifier(t *testing.T) { Convey("default constructor for notifiers", t, func() { - bJson := simplejson.New() + bJSON := simplejson.New() model := &models.AlertNotification{ Uid: "1", Name: "name", Type: "email", - Settings: bJson, + Settings: bJSON, } Convey("can parse false value", func() { - bJson.Set("uploadImage", false) + bJSON.Set("uploadImage", false) base := NewNotifierBase(model) So(base.UploadImage, ShouldBeFalse) }) Convey("can parse true value", func() { - bJson.Set("uploadImage", true) + bJSON.Set("uploadImage", true) base := NewNotifierBase(model) So(base.UploadImage, ShouldBeTrue) diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go index 45ce24c9aaa1..fc8ce477ecb9 100644 --- a/pkg/services/alerting/notifiers/dingding.go +++ b/pkg/services/alerting/notifiers/dingding.go @@ -12,8 +12,8 @@ import ( "github.com/grafana/grafana/pkg/services/alerting" ) -const DefaultDingdingMsgType = "link" -const DingdingOptionsTemplate = ` +const defaultDingdingMsgType = "link" +const dingdingOptionsTemplate = `

DingDing settings

Url @@ -21,7 +21,7 @@ const DingdingOptionsTemplate = `
MessageType - +
` @@ -30,57 +30,59 @@ func init() { Type: "dingding", Name: "DingDing", Description: "Sends HTTP POST request to DingDing", - Factory: NewDingDingNotifier, - OptionsTemplate: DingdingOptionsTemplate, + Factory: newDingDingNotifier, + OptionsTemplate: dingdingOptionsTemplate, }) } -func NewDingDingNotifier(model *models.AlertNotification) (alerting.Notifier, error) { +func newDingDingNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if url == "" { return nil, alerting.ValidationError{Reason: "Could not find url property in settings"} } - msgType := model.Settings.Get("msgType").MustString(DefaultDingdingMsgType) + msgType := model.Settings.Get("msgType").MustString(defaultDingdingMsgType) return &DingDingNotifier{ NotifierBase: NewNotifierBase(model), MsgType: msgType, - Url: url, + URL: url, log: log.New("alerting.notifier.dingding"), }, nil } +// DingDingNotifier is responsible for sending alert notifications to ding ding. type DingDingNotifier struct { NotifierBase MsgType string - Url string + URL string log log.Logger } -func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending dingding") +// Notify sends the alert notification to dingding. +func (dd *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { + dd.log.Info("Sending dingding") - messageUrl, err := evalContext.GetRuleUrl() + messageURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed to get messageUrl", "error", err, "dingding", this.Name) - messageUrl = "" + dd.log.Error("Failed to get messageUrl", "error", err, "dingding", dd.Name) + messageURL = "" } q := url.Values{ "pc_slide": {"false"}, - "url": {messageUrl}, + "url": {messageURL}, } // Use special link to auto open the message url outside of Dingding // Refer: https://open-doc.dingtalk.com/docs/doc.htm?treeId=385&articleId=104972&docType=1#s9 - messageUrl = "dingtalk://dingtalkclient/page/link?" + q.Encode() + messageURL = "dingtalk://dingtalkclient/page/link?" + q.Encode() - this.log.Info("messageUrl:" + messageUrl) + dd.log.Info("messageUrl:" + messageURL) message := evalContext.Rule.Message - picUrl := evalContext.ImagePublicUrl + picURL := evalContext.ImagePublicURL title := evalContext.GetNotificationTitle() if message == "" { message = title @@ -91,10 +93,10 @@ func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { } var bodyStr string - if this.MsgType == "actionCard" { + if dd.MsgType == "actionCard" { // Embed the pic into the markdown directly because actionCard doesn't have a picUrl field - if picUrl != "" { - message = "![](" + picUrl + ")\\n\\n" + message + if picURL != "" { + message = "![](" + picURL + ")\\n\\n" + message } bodyStr = `{ @@ -103,7 +105,7 @@ func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { "text": "` + strings.Replace(message, `"`, "'", -1) + `", "title": "` + strings.Replace(title, `"`, "'", -1) + `", "singleTitle": "More", - "singleURL": "` + messageUrl + `" + "singleURL": "` + messageURL + `" } }` } else { @@ -112,8 +114,8 @@ func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { "link": { "text": "` + message + `", "title": "` + title + `", - "picUrl": "` + picUrl + `", - "messageUrl": "` + messageUrl + `" + "picUrl": "` + picURL + `", + "messageUrl": "` + messageURL + `" } }` } @@ -121,7 +123,7 @@ func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON, err := simplejson.NewJson([]byte(bodyStr)) if err != nil { - this.log.Error("Failed to create Json data", "error", err, "dingding", this.Name) + dd.log.Error("Failed to create Json data", "error", err, "dingding", dd.Name) } body, err := bodyJSON.MarshalJSON() @@ -130,12 +132,12 @@ func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { } cmd := &models.SendWebhookSync{ - Url: this.Url, + Url: dd.URL, Body: string(body), } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send DingDing", "error", err, "dingding", this.Name) + dd.log.Error("Failed to send DingDing", "error", err, "dingding", dd.Name) return err } diff --git a/pkg/services/alerting/notifiers/dingding_test.go b/pkg/services/alerting/notifiers/dingding_test.go index 7d645d7efb62..09101af2f8f6 100644 --- a/pkg/services/alerting/notifiers/dingding_test.go +++ b/pkg/services/alerting/notifiers/dingding_test.go @@ -20,7 +20,7 @@ func TestDingDingNotifier(t *testing.T) { Settings: settingsJSON, } - _, err := NewDingDingNotifier(model) + _, err := newDingDingNotifier(model) So(err, ShouldNotBeNil) }) @@ -34,14 +34,13 @@ func TestDingDingNotifier(t *testing.T) { Settings: settingsJSON, } - not, err := NewDingDingNotifier(model) + not, err := newDingDingNotifier(model) notifier := not.(*DingDingNotifier) So(err, ShouldBeNil) So(notifier.Name, ShouldEqual, "dingding_testing") So(notifier.Type, ShouldEqual, "dingding") - So(notifier.Url, ShouldEqual, "https://www.google.com") + So(notifier.URL, ShouldEqual, "https://www.google.com") }) - }) } diff --git a/pkg/services/alerting/notifiers/discord.go b/pkg/services/alerting/notifiers/discord.go index 9933ad5e5871..e011ec0c3e93 100644 --- a/pkg/services/alerting/notifiers/discord.go +++ b/pkg/services/alerting/notifiers/discord.go @@ -21,18 +21,30 @@ func init() { Type: "discord", Name: "Discord", Description: "Sends notifications to Discord", - Factory: NewDiscordNotifier, + Factory: newDiscordNotifier, OptionsTemplate: `

Discord settings

-
- Webhook URL - +
+ Message Content + + + + Mention a group using @ or a user using <@ID> when notifying in a channel + +
+
+ Webhook URL +
`, }) } -func NewDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, error) { +func newDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, error) { + content := model.Settings.Get("content").MustString() url := model.Settings.Get("url").MustString() if url == "" { return nil, alerting.ValidationError{Reason: "Could not find webhook url property in settings"} @@ -40,29 +52,38 @@ func NewDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, err return &DiscordNotifier{ NotifierBase: NewNotifierBase(model), + Content: content, WebhookURL: url, log: log.New("alerting.notifier.discord"), }, nil } +// DiscordNotifier is responsible for sending alert +// notifications to discord. type DiscordNotifier struct { NotifierBase + Content string WebhookURL string log log.Logger } -func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending alert notification to", "webhook_url", this.WebhookURL) +// Notify send an alert notification to Discord. +func (dn *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { + dn.log.Info("Sending alert notification to", "webhook_url", dn.WebhookURL) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + dn.log.Error("Failed get rule link", "error", err) return err } bodyJSON := simplejson.New() bodyJSON.Set("username", "Grafana") + if dn.Content != "" { + bodyJSON.Set("content", dn.Content) + } + fields := make([]map[string]interface{}, 0) for _, evt := range evalContext.EvalMatches { @@ -85,7 +106,7 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { embed.Set("title", evalContext.GetNotificationTitle()) //Discord takes integer for color embed.Set("color", color) - embed.Set("url", ruleUrl) + embed.Set("url", ruleURL) embed.Set("description", evalContext.Rule.Message) embed.Set("type", "rich") embed.Set("fields", fields) @@ -94,9 +115,9 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { var image map[string]interface{} var embeddedImage = false - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { image = map[string]interface{}{ - "url": evalContext.ImagePublicUrl, + "url": evalContext.ImagePublicURL, } embed.Set("image", image) } else { @@ -112,7 +133,7 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { json, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: this.WebhookURL, + Url: dn.WebhookURL, HttpMethod: "POST", ContentType: "application/json", } @@ -120,22 +141,22 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { if !embeddedImage { cmd.Body = string(json) } else { - err := this.embedImage(cmd, evalContext.ImageOnDiskPath, json) + err := dn.embedImage(cmd, evalContext.ImageOnDiskPath, json) if err != nil { - this.log.Error("failed to embed image", "error", err) + dn.log.Error("failed to embed image", "error", err) return err } } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to Discord", "error", err) + dn.log.Error("Failed to send notification to Discord", "error", err) return err } return nil } -func (this *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath string, existingJSONBody []byte) error { +func (dn *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath string, existingJSONBody []byte) error { f, err := os.Open(imagePath) defer f.Close() if err != nil { @@ -171,7 +192,7 @@ func (this *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath s w.Close() - cmd.Body = string(b.Bytes()) + cmd.Body = b.String() cmd.ContentType = w.FormDataContentType() return nil diff --git a/pkg/services/alerting/notifiers/discord_test.go b/pkg/services/alerting/notifiers/discord_test.go index dfc6bbe9aee0..d1cbff6b859a 100644 --- a/pkg/services/alerting/notifiers/discord_test.go +++ b/pkg/services/alerting/notifiers/discord_test.go @@ -22,14 +22,15 @@ func TestDiscordNotifier(t *testing.T) { Settings: settingsJSON, } - _, err := NewDiscordNotifier(model) + _, err := newDiscordNotifier(model) So(err, ShouldNotBeNil) }) Convey("settings should trigger incident", func() { json := ` { - "url": "https://web.hook/" + "content": "@everyone Please check this notification", + "url": "https://web.hook/" }` settingsJSON, _ := simplejson.NewJson([]byte(json)) @@ -39,12 +40,13 @@ func TestDiscordNotifier(t *testing.T) { Settings: settingsJSON, } - not, err := NewDiscordNotifier(model) + not, err := newDiscordNotifier(model) discordNotifier := not.(*DiscordNotifier) So(err, ShouldBeNil) So(discordNotifier.Name, ShouldEqual, "discord_testing") So(discordNotifier.Type, ShouldEqual, "discord") + So(discordNotifier.Content, ShouldEqual, "@everyone Please check this notification") So(discordNotifier.WebhookURL, ShouldEqual, "https://web.hook/") }) }) diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go index 61b7b11d893c..5d3422e608b5 100644 --- a/pkg/services/alerting/notifiers/email.go +++ b/pkg/services/alerting/notifiers/email.go @@ -30,12 +30,16 @@ func init() { }) } +// EmailNotifier is responsible for sending +// alert notifications over email. type EmailNotifier struct { NotifierBase Addresses []string log log.Logger } +// NewEmailNotifier is the constructor function +// for the EmailNotifier. func NewEmailNotifier(model *models.AlertNotification) (alerting.Notifier, error) { addressesString := model.Settings.Get("addresses").MustString() @@ -59,12 +63,13 @@ func NewEmailNotifier(model *models.AlertNotification) (alerting.Notifier, error }, nil } -func (this *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending alert notification to", "addresses", this.Addresses) +// Notify sends the alert notification. +func (en *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { + en.log.Info("Sending alert notification to", "addresses", en.Addresses) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + en.log.Error("Failed get rule link", "error", err) return err } @@ -83,20 +88,20 @@ func (this *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { "StateModel": evalContext.GetStateModel(), "Message": evalContext.Rule.Message, "Error": error, - "RuleUrl": ruleUrl, + "RuleUrl": ruleURL, "ImageLink": "", "EmbeddedImage": "", "AlertPageUrl": setting.AppUrl + "alerting", "EvalMatches": evalContext.EvalMatches, }, - To: this.Addresses, + To: en.Addresses, Template: "alert_notification.html", EmbededFiles: []string{}, }, } - if evalContext.ImagePublicUrl != "" { - cmd.Data["ImageLink"] = evalContext.ImagePublicUrl + if evalContext.ImagePublicURL != "" { + cmd.Data["ImageLink"] = evalContext.ImagePublicURL } else { file, err := os.Stat(evalContext.ImageOnDiskPath) if err == nil { @@ -108,9 +113,9 @@ func (this *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { err = bus.DispatchCtx(evalContext.Ctx, cmd) if err != nil { - this.log.Error("Failed to send alert notification email", "error", err) + en.log.Error("Failed to send alert notification email", "error", err) return err } - return nil + return nil } diff --git a/pkg/services/alerting/notifiers/googlechat.go b/pkg/services/alerting/notifiers/googlechat.go index c00089e0dc57..2d81787fb916 100644 --- a/pkg/services/alerting/notifiers/googlechat.go +++ b/pkg/services/alerting/notifiers/googlechat.go @@ -18,7 +18,7 @@ func init() { Name: "Google Hangouts Chat", Description: "Sends notifications to Google Hangouts Chat via webhooks based on the official JSON message " + "format (https://developers.google.com/hangouts/chat/reference/message-formats/).", - Factory: NewGoogleChatNotifier, + Factory: newGoogleChatNotifier, OptionsTemplate: `

Google Hangouts Chat settings

@@ -29,7 +29,7 @@ func init() { }) } -func NewGoogleChatNotifier(model *models.AlertNotification) (alerting.Notifier, error) { +func newGoogleChatNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if url == "" { return nil, alerting.ValidationError{Reason: "Could not find url property in settings"} @@ -37,14 +37,16 @@ func NewGoogleChatNotifier(model *models.AlertNotification) (alerting.Notifier, return &GoogleChatNotifier{ NotifierBase: NewNotifierBase(model), - Url: url, + URL: url, log: log.New("alerting.notifier.googlechat"), }, nil } +// GoogleChatNotifier is responsible for sending +// alert notifications to Google chat. type GoogleChatNotifier struct { NotifierBase - Url string + URL string log log.Logger } @@ -90,7 +92,7 @@ type imageWidget struct { } type image struct { - ImageUrl string `json:"imageUrl"` + ImageURL string `json:"imageUrl"` } type button struct { @@ -107,19 +109,20 @@ type onClick struct { } type openLink struct { - Url string `json:"url"` + URL string `json:"url"` } -func (this *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Executing Google Chat notification") +// Notify send an alert notification to Google Chat. +func (gcn *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error { + gcn.log.Info("Executing Google Chat notification") headers := map[string]string{ "Content-Type": "application/json; charset=UTF-8", } - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("evalContext returned an invalid rule URL") + gcn.log.Error("evalContext returned an invalid rule URL") } // add a text paragraph widget for the message @@ -149,14 +152,14 @@ func (this *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error widgets = append(widgets, fields) // if an image exists, add it as an image widget - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { widgets = append(widgets, imageWidget{ Image: image{ - ImageUrl: evalContext.ImagePublicUrl, + ImageURL: evalContext.ImagePublicURL, }, }) } else { - this.log.Info("Could not retrieve a public image URL.") + gcn.log.Info("Could not retrieve a public image URL.") } // add a button widget (link to Grafana) @@ -167,7 +170,7 @@ func (this *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error Text: "OPEN IN GRAFANA", OnClick: onClick{ OpenLink: openLink{ - Url: ruleUrl, + URL: ruleURL, }, }, }, @@ -200,14 +203,14 @@ func (this *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error body, _ := json.Marshal(res1D) cmd := &models.SendWebhookSync{ - Url: this.Url, + Url: gcn.URL, HttpMethod: "POST", HttpHeader: headers, Body: string(body), } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send Google Hangouts Chat alert", "error", err, "webhook", this.Name) + gcn.log.Error("Failed to send Google Hangouts Chat alert", "error", err, "webhook", gcn.Name) return err } diff --git a/pkg/services/alerting/notifiers/googlechat_test.go b/pkg/services/alerting/notifiers/googlechat_test.go index 5368eb63c96b..25dd5053397d 100644 --- a/pkg/services/alerting/notifiers/googlechat_test.go +++ b/pkg/services/alerting/notifiers/googlechat_test.go @@ -22,7 +22,7 @@ func TestGoogleChatNotifier(t *testing.T) { Settings: settingsJSON, } - _, err := NewGoogleChatNotifier(model) + _, err := newGoogleChatNotifier(model) So(err, ShouldNotBeNil) }) @@ -39,15 +39,14 @@ func TestGoogleChatNotifier(t *testing.T) { Settings: settingsJSON, } - not, err := NewGoogleChatNotifier(model) + not, err := newGoogleChatNotifier(model) webhookNotifier := not.(*GoogleChatNotifier) So(err, ShouldBeNil) So(webhookNotifier.Name, ShouldEqual, "ops") So(webhookNotifier.Type, ShouldEqual, "googlechat") - So(webhookNotifier.Url, ShouldEqual, "http://google.com") + So(webhookNotifier.URL, ShouldEqual, "http://google.com") }) - }) }) } diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go index 6b94c41065fb..2e8be00576bb 100644 --- a/pkg/services/alerting/notifiers/hipchat.go +++ b/pkg/services/alerting/notifiers/hipchat.go @@ -46,6 +46,8 @@ const ( maxFieldCount int = 4 ) +// NewHipChatNotifier is the constructor functions +// for the HipChatNotifier func NewHipChatNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if strings.HasSuffix(url, "/") { @@ -56,31 +58,34 @@ func NewHipChatNotifier(model *models.AlertNotification) (alerting.Notifier, err } apikey := model.Settings.Get("apikey").MustString() - roomId := model.Settings.Get("roomid").MustString() + roomID := model.Settings.Get("roomid").MustString() return &HipChatNotifier{ NotifierBase: NewNotifierBase(model), - Url: url, - ApiKey: apikey, - RoomId: roomId, + URL: url, + APIKey: apikey, + RoomID: roomID, log: log.New("alerting.notifier.hipchat"), }, nil } +// HipChatNotifier is responsible for sending +// alert notifications to Hipchat. type HipChatNotifier struct { NotifierBase - Url string - ApiKey string - RoomId string + URL string + APIKey string + RoomID string log log.Logger } -func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Executing hipchat notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) +// Notify sends an alert notification to HipChat +func (hc *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { + hc.log.Info("Executing hipchat notification", "ruleId", evalContext.Rule.ID, "notification", hc.Name) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + hc.log.Error("Failed get rule link", "error", err) return err } @@ -133,7 +138,7 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { // Add a card with link to the dashboard card := map[string]interface{}{ "style": "application", - "url": ruleUrl, + "url": ruleURL, "id": "1", "title": evalContext.GetNotificationTitle(), "description": message, @@ -143,10 +148,10 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { "date": evalContext.EndTime.Unix(), "attributes": attributes, } - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { card["thumbnail"] = map[string]interface{}{ - "url": evalContext.ImagePublicUrl, - "url@2x": evalContext.ImagePublicUrl, + "url": evalContext.ImagePublicURL, + "url@2x": evalContext.ImagePublicURL, "width": 1193, "height": 564, } @@ -160,13 +165,13 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { "card": card, } - hipUrl := fmt.Sprintf("%s/v2/room/%s/notification?auth_token=%s", this.Url, this.RoomId, this.ApiKey) + hipURL := fmt.Sprintf("%s/v2/room/%s/notification?auth_token=%s", hc.URL, hc.RoomID, hc.APIKey) data, _ := json.Marshal(&body) - this.log.Info("Request payload", "json", string(data)) - cmd := &models.SendWebhookSync{Url: hipUrl, Body: string(data)} + hc.log.Info("Request payload", "json", string(data)) + cmd := &models.SendWebhookSync{Url: hipURL, Body: string(data)} if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send hipchat notification", "error", err, "webhook", this.Name) + hc.log.Error("Failed to send hipchat notification", "error", err, "webhook", hc.Name) return err } diff --git a/pkg/services/alerting/notifiers/hipchat_test.go b/pkg/services/alerting/notifiers/hipchat_test.go index 57ad03ed7c21..6d9b38f83933 100644 --- a/pkg/services/alerting/notifiers/hipchat_test.go +++ b/pkg/services/alerting/notifiers/hipchat_test.go @@ -45,9 +45,9 @@ func TestHipChatNotifier(t *testing.T) { So(err, ShouldBeNil) So(hipchatNotifier.Name, ShouldEqual, "ops") So(hipchatNotifier.Type, ShouldEqual, "hipchat") - So(hipchatNotifier.Url, ShouldEqual, "http://google.com") - So(hipchatNotifier.ApiKey, ShouldEqual, "") - So(hipchatNotifier.RoomId, ShouldEqual, "") + So(hipchatNotifier.URL, ShouldEqual, "http://google.com") + So(hipchatNotifier.APIKey, ShouldEqual, "") + So(hipchatNotifier.RoomID, ShouldEqual, "") }) Convey("from settings with Recipient and Mention", func() { @@ -71,11 +71,10 @@ func TestHipChatNotifier(t *testing.T) { So(err, ShouldBeNil) So(hipchatNotifier.Name, ShouldEqual, "ops") So(hipchatNotifier.Type, ShouldEqual, "hipchat") - So(hipchatNotifier.Url, ShouldEqual, "http://www.hipchat.com") - So(hipchatNotifier.ApiKey, ShouldEqual, "1234") - So(hipchatNotifier.RoomId, ShouldEqual, "1234") + So(hipchatNotifier.URL, ShouldEqual, "http://www.hipchat.com") + So(hipchatNotifier.APIKey, ShouldEqual, "1234") + So(hipchatNotifier.RoomID, ShouldEqual, "1234") }) - }) }) } diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go index 168b1646b16b..ed795453c42a 100644 --- a/pkg/services/alerting/notifiers/kafka.go +++ b/pkg/services/alerting/notifiers/kafka.go @@ -32,6 +32,7 @@ func init() { }) } +// NewKafkaNotifier is the constructor function for the Kafka notifier. func NewKafkaNotifier(model *models.AlertNotification) (alerting.Notifier, error) { endpoint := model.Settings.Get("kafkaRestProxy").MustString() if endpoint == "" { @@ -50,6 +51,8 @@ func NewKafkaNotifier(model *models.AlertNotification) (alerting.Notifier, error }, nil } +// KafkaNotifier is responsible for sending +// alert notifications to Kafka. type KafkaNotifier struct { NotifierBase Endpoint string @@ -57,8 +60,8 @@ type KafkaNotifier struct { log log.Logger } -func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { - +// Notify sends the alert notification. +func (kn *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { state := evalContext.Rule.State customData := triggMetrString @@ -66,7 +69,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } - this.log.Info("Notifying Kafka", "alert_state", state) + kn.log.Info("Notifying Kafka", "alert_state", state) recordJSON := simplejson.New() records := make([]interface{}, 1) @@ -75,20 +78,20 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("description", evalContext.Rule.Name+" - "+evalContext.Rule.Message) bodyJSON.Set("client", "Grafana") bodyJSON.Set("details", customData) - bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + kn.log.Error("Failed get rule link", "error", err) return err } - bodyJSON.Set("client_url", ruleUrl) + bodyJSON.Set("client_url", ruleURL) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { contexts := make([]interface{}, 1) imageJSON := simplejson.New() imageJSON.Set("type", "image") - imageJSON.Set("src", evalContext.ImagePublicUrl) + imageJSON.Set("src", evalContext.ImagePublicURL) contexts[0] = imageJSON bodyJSON.Set("contexts", contexts) } @@ -99,10 +102,10 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { recordJSON.Set("records", records) body, _ := recordJSON.MarshalJSON() - topicUrl := this.Endpoint + "/topics/" + this.Topic + topicURL := kn.Endpoint + "/topics/" + kn.Topic cmd := &models.SendWebhookSync{ - Url: topicUrl, + Url: topicURL, Body: string(body), HttpMethod: "POST", HttpHeader: map[string]string{ @@ -112,7 +115,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to Kafka", "error", err, "body", string(body)) + kn.log.Error("Failed to send notification to Kafka", "error", err, "body", string(body)) return err } diff --git a/pkg/services/alerting/notifiers/kafka_test.go b/pkg/services/alerting/notifiers/kafka_test.go index 03a343835e15..ba3b60ec23e9 100644 --- a/pkg/services/alerting/notifiers/kafka_test.go +++ b/pkg/services/alerting/notifiers/kafka_test.go @@ -49,7 +49,6 @@ func TestKafkaNotifier(t *testing.T) { So(kafkaNotifier.Endpoint, ShouldEqual, "http://localhost:8082") So(kafkaNotifier.Topic, ShouldEqual, "topic1") }) - }) }) } diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go index d8bf70f8b9c9..2048495b6465 100644 --- a/pkg/services/alerting/notifiers/line.go +++ b/pkg/services/alerting/notifiers/line.go @@ -29,9 +29,10 @@ func init() { } const ( - lineNotifyUrl string = "https://notify-api.line.me/api/notify" + lineNotifyURL string = "https://notify-api.line.me/api/notify" ) +// NewLINENotifier is the constructor for the LINE notifier func NewLINENotifier(model *models.AlertNotification) (alerting.Notifier, error) { token := model.Settings.Get("token").MustString() if token == "" { @@ -45,52 +46,55 @@ func NewLINENotifier(model *models.AlertNotification) (alerting.Notifier, error) }, nil } +// LineNotifier is responsible for sending +// alert notifications to LINE. type LineNotifier struct { NotifierBase Token string log log.Logger } -func (this *LineNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Executing line notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) +// Notify send an alert notification to LINE +func (ln *LineNotifier) Notify(evalContext *alerting.EvalContext) error { + ln.log.Info("Executing line notification", "ruleId", evalContext.Rule.ID, "notification", ln.Name) var err error switch evalContext.Rule.State { case models.AlertStateAlerting: - err = this.createAlert(evalContext) + err = ln.createAlert(evalContext) } return err } -func (this *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { - this.log.Info("Creating Line notify", "ruleId", evalContext.Rule.Id, "notification", this.Name) - ruleUrl, err := evalContext.GetRuleUrl() +func (ln *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { + ln.log.Info("Creating Line notify", "ruleId", evalContext.Rule.ID, "notification", ln.Name) + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + ln.log.Error("Failed get rule link", "error", err) return err } form := url.Values{} - body := fmt.Sprintf("%s - %s\n%s", evalContext.Rule.Name, ruleUrl, evalContext.Rule.Message) + body := fmt.Sprintf("%s - %s\n%s", evalContext.Rule.Name, ruleURL, evalContext.Rule.Message) form.Add("message", body) - if evalContext.ImagePublicUrl != "" { - form.Add("imageThumbnail", evalContext.ImagePublicUrl) - form.Add("imageFullsize", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + form.Add("imageThumbnail", evalContext.ImagePublicURL) + form.Add("imageFullsize", evalContext.ImagePublicURL) } cmd := &models.SendWebhookSync{ - Url: lineNotifyUrl, + Url: lineNotifyURL, HttpMethod: "POST", HttpHeader: map[string]string{ - "Authorization": fmt.Sprintf("Bearer %s", this.Token), + "Authorization": fmt.Sprintf("Bearer %s", ln.Token), "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8", }, Body: form.Encode(), } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to LINE", "error", err, "body", body) + ln.log.Error("Failed to send notification to LINE", "error", err, "body", body) return err } diff --git a/pkg/services/alerting/notifiers/line_test.go b/pkg/services/alerting/notifiers/line_test.go index 69082d0e066e..f8f50cc9b958 100644 --- a/pkg/services/alerting/notifiers/line_test.go +++ b/pkg/services/alerting/notifiers/line_test.go @@ -44,6 +44,5 @@ func TestLineNotifier(t *testing.T) { So(lineNotifier.Type, ShouldEqual, "line") So(lineNotifier.Token, ShouldEqual, "abcdefgh0123456789") }) - }) } diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 23dd453fe92e..833927dee9f5 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -44,54 +44,57 @@ var ( opsgenieAlertURL = "https://api.opsgenie.com/v2/alerts" ) +// NewOpsGenieNotifier is the constructor for OpsGenie. func NewOpsGenieNotifier(model *models.AlertNotification) (alerting.Notifier, error) { autoClose := model.Settings.Get("autoClose").MustBool(true) apiKey := model.Settings.Get("apiKey").MustString() - apiUrl := model.Settings.Get("apiUrl").MustString() + apiURL := model.Settings.Get("apiUrl").MustString() if apiKey == "" { return nil, alerting.ValidationError{Reason: "Could not find api key property in settings"} } - if apiUrl == "" { - apiUrl = opsgenieAlertURL + if apiURL == "" { + apiURL = opsgenieAlertURL } return &OpsGenieNotifier{ NotifierBase: NewNotifierBase(model), - ApiKey: apiKey, - ApiUrl: apiUrl, + APIKey: apiKey, + APIUrl: apiURL, AutoClose: autoClose, log: log.New("alerting.notifier.opsgenie"), }, nil } +// OpsGenieNotifier is responsible for sending +// alert notifications to OpsGenie type OpsGenieNotifier struct { NotifierBase - ApiKey string - ApiUrl string + APIKey string + APIUrl string AutoClose bool log log.Logger } -func (this *OpsGenieNotifier) Notify(evalContext *alerting.EvalContext) error { - +// Notify sends an alert notification to OpsGenie. +func (on *OpsGenieNotifier) Notify(evalContext *alerting.EvalContext) error { var err error switch evalContext.Rule.State { case models.AlertStateOK: - if this.AutoClose { - err = this.closeAlert(evalContext) + if on.AutoClose { + err = on.closeAlert(evalContext) } case models.AlertStateAlerting: - err = this.createAlert(evalContext) + err = on.createAlert(evalContext) } return err } -func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error { - this.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", this.Name) +func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error { + on.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.ID, "notification", on.Name) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + on.log.Error("Failed get rule link", "error", err) return err } @@ -103,54 +106,54 @@ func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) err bodyJSON := simplejson.New() bodyJSON.Set("message", evalContext.Rule.Name) bodyJSON.Set("source", "Grafana") - bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) - bodyJSON.Set("description", fmt.Sprintf("%s - %s\n%s\n%s", evalContext.Rule.Name, ruleUrl, evalContext.Rule.Message, customData)) + bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) + bodyJSON.Set("description", fmt.Sprintf("%s - %s\n%s\n%s", evalContext.Rule.Name, ruleURL, evalContext.Rule.Message, customData)) details := simplejson.New() - details.Set("url", ruleUrl) - if evalContext.ImagePublicUrl != "" { - details.Set("image", evalContext.ImagePublicUrl) + details.Set("url", ruleURL) + if evalContext.ImagePublicURL != "" { + details.Set("image", evalContext.ImagePublicURL) } bodyJSON.Set("details", details) body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: this.ApiUrl, + Url: on.APIUrl, Body: string(body), HttpMethod: "POST", HttpHeader: map[string]string{ "Content-Type": "application/json", - "Authorization": fmt.Sprintf("GenieKey %s", this.ApiKey), + "Authorization": fmt.Sprintf("GenieKey %s", on.APIKey), }, } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to OpsGenie", "error", err, "body", string(body)) + on.log.Error("Failed to send notification to OpsGenie", "error", err, "body", string(body)) } return nil } -func (this *OpsGenieNotifier) closeAlert(evalContext *alerting.EvalContext) error { - this.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", this.Name) +func (on *OpsGenieNotifier) closeAlert(evalContext *alerting.EvalContext) error { + on.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.ID, "notification", on.Name) bodyJSON := simplejson.New() bodyJSON.Set("source", "Grafana") body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: fmt.Sprintf("%s/alertId-%d/close?identifierType=alias", this.ApiUrl, evalContext.Rule.Id), + Url: fmt.Sprintf("%s/alertId-%d/close?identifierType=alias", on.APIUrl, evalContext.Rule.ID), Body: string(body), HttpMethod: "POST", HttpHeader: map[string]string{ "Content-Type": "application/json", - "Authorization": fmt.Sprintf("GenieKey %s", this.ApiKey), + "Authorization": fmt.Sprintf("GenieKey %s", on.APIKey), }, } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to OpsGenie", "error", err, "body", string(body)) + on.log.Error("Failed to send notification to OpsGenie", "error", err, "body", string(body)) return err } diff --git a/pkg/services/alerting/notifiers/opsgenie_test.go b/pkg/services/alerting/notifiers/opsgenie_test.go index e4954ed904af..1df57676b38b 100644 --- a/pkg/services/alerting/notifiers/opsgenie_test.go +++ b/pkg/services/alerting/notifiers/opsgenie_test.go @@ -45,7 +45,7 @@ func TestOpsGenieNotifier(t *testing.T) { So(err, ShouldBeNil) So(opsgenieNotifier.Name, ShouldEqual, "opsgenie_testing") So(opsgenieNotifier.Type, ShouldEqual, "opsgenie") - So(opsgenieNotifier.ApiKey, ShouldEqual, "abcdefgh0123456789") + So(opsgenieNotifier.APIKey, ShouldEqual, "abcdefgh0123456789") }) }) }) diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index 2b60058ecd93..d771bfd1ad68 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -40,9 +40,10 @@ func init() { } var ( - pagerdutyEventApiUrl = "https://events.pagerduty.com/v2/enqueue" + pagerdutyEventAPIURL = "https://events.pagerduty.com/v2/enqueue" ) +// NewPagerdutyNotifier is the constructor for the PagerDuty notifier func NewPagerdutyNotifier(model *models.AlertNotification) (alerting.Notifier, error) { autoResolve := model.Settings.Get("autoResolve").MustBool(false) key := model.Settings.Get("integrationKey").MustString() @@ -58,6 +59,8 @@ func NewPagerdutyNotifier(model *models.AlertNotification) (alerting.Notifier, e }, nil } +// PagerdutyNotifier is responsible for sending +// alert notifications to pagerduty type PagerdutyNotifier struct { NotifierBase Key string @@ -65,10 +68,11 @@ type PagerdutyNotifier struct { log log.Logger } -func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { +// Notify sends an alert notification to PagerDuty +func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { - if evalContext.Rule.State == models.AlertStateOK && !this.AutoResolve { - this.log.Info("Not sending a trigger to Pagerduty", "state", evalContext.Rule.State, "auto resolve", this.AutoResolve) + if evalContext.Rule.State == models.AlertStateOK && !pn.AutoResolve { + pn.log.Info("Not sending a trigger to Pagerduty", "state", evalContext.Rule.State, "auto resolve", pn.AutoResolve) return nil } @@ -81,7 +85,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } - this.log.Info("Notifying Pagerduty", "event_type", eventType) + pn.log.Info("Notifying Pagerduty", "event_type", eventType) payloadJSON := simplejson.New() payloadJSON.Set("summary", evalContext.Rule.Name+" - "+evalContext.Rule.Message) @@ -94,28 +98,28 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { payloadJSON.Set("custom_details", customData) bodyJSON := simplejson.New() - bodyJSON.Set("routing_key", this.Key) + bodyJSON.Set("routing_key", pn.Key) bodyJSON.Set("event_action", eventType) - bodyJSON.Set("dedup_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("dedup_key", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) bodyJSON.Set("payload", payloadJSON) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + pn.log.Error("Failed get rule link", "error", err) return err } links := make([]interface{}, 1) linkJSON := simplejson.New() - linkJSON.Set("href", ruleUrl) - bodyJSON.Set("client_url", ruleUrl) + linkJSON.Set("href", ruleURL) + bodyJSON.Set("client_url", ruleURL) bodyJSON.Set("client", "Grafana") links[0] = linkJSON bodyJSON.Set("links", links) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { contexts := make([]interface{}, 1) imageJSON := simplejson.New() - imageJSON.Set("src", evalContext.ImagePublicUrl) + imageJSON.Set("src", evalContext.ImagePublicURL) contexts[0] = imageJSON bodyJSON.Set("images", contexts) } @@ -123,7 +127,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: pagerdutyEventApiUrl, + Url: pagerdutyEventAPIURL, Body: string(body), HttpMethod: "POST", HttpHeader: map[string]string{ @@ -132,7 +136,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to Pagerduty", "error", err, "body", string(body)) + pn.log.Error("Failed to send notification to Pagerduty", "error", err, "body", string(body)) return err } diff --git a/pkg/services/alerting/notifiers/pushover.go b/pkg/services/alerting/notifiers/pushover.go index a54fb1ee084d..5da1a457e679 100644 --- a/pkg/services/alerting/notifiers/pushover.go +++ b/pkg/services/alerting/notifiers/pushover.go @@ -14,7 +14,7 @@ import ( "github.com/grafana/grafana/pkg/services/alerting" ) -const PUSHOVER_ENDPOINT = "https://api.pushover.net/1/messages.json" +const pushoverEndpoint = "https://api.pushover.net/1/messages.json" func init() { sounds := ` @@ -95,9 +95,10 @@ func init() { }) } +// NewPushoverNotifier is the constructor for the Pushover Notifier func NewPushoverNotifier(model *models.AlertNotification) (alerting.Notifier, error) { userKey := model.Settings.Get("userKey").MustString() - apiToken := model.Settings.Get("apiToken").MustString() + APIToken := model.Settings.Get("apiToken").MustString() device := model.Settings.Get("device").MustString() priority, _ := strconv.Atoi(model.Settings.Get("priority").MustString()) retry, _ := strconv.Atoi(model.Settings.Get("retry").MustString()) @@ -109,13 +110,13 @@ func NewPushoverNotifier(model *models.AlertNotification) (alerting.Notifier, er if userKey == "" { return nil, alerting.ValidationError{Reason: "User key not given"} } - if apiToken == "" { + if APIToken == "" { return nil, alerting.ValidationError{Reason: "API token not given"} } return &PushoverNotifier{ NotifierBase: NewNotifierBase(model), UserKey: userKey, - ApiToken: apiToken, + APIToken: APIToken, Priority: priority, Retry: retry, Expire: expire, @@ -127,10 +128,12 @@ func NewPushoverNotifier(model *models.AlertNotification) (alerting.Notifier, er }, nil } +// PushoverNotifier is responsible for sending +// alert notifications to Pushover type PushoverNotifier struct { NotifierBase UserKey string - ApiToken string + APIToken string Priority int Retry int Expire int @@ -141,10 +144,11 @@ type PushoverNotifier struct { log log.Logger } -func (this *PushoverNotifier) Notify(evalContext *alerting.EvalContext) error { - ruleUrl, err := evalContext.GetRuleUrl() +// Notify sends a alert notification to Pushover +func (pn *PushoverNotifier) Notify(evalContext *alerting.EvalContext) error { + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + pn.log.Error("Failed get rule link", "error", err) return err } @@ -163,34 +167,34 @@ func (this *PushoverNotifier) Notify(evalContext *alerting.EvalContext) error { message = "Notification message missing (Set a notification message to replace this text.)" } - headers, uploadBody, err := this.genPushoverBody(evalContext, message, ruleUrl) + headers, uploadBody, err := pn.genPushoverBody(evalContext, message, ruleURL) if err != nil { - this.log.Error("Failed to generate body for pushover", "error", err) + pn.log.Error("Failed to generate body for pushover", "error", err) return err } cmd := &models.SendWebhookSync{ - Url: PUSHOVER_ENDPOINT, + Url: pushoverEndpoint, HttpMethod: "POST", HttpHeader: headers, Body: uploadBody.String(), } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send pushover notification", "error", err, "webhook", this.Name) + pn.log.Error("Failed to send pushover notification", "error", err, "webhook", pn.Name) return err } return nil } -func (this *PushoverNotifier) genPushoverBody(evalContext *alerting.EvalContext, message string, ruleUrl string) (map[string]string, bytes.Buffer, error) { +func (pn *PushoverNotifier) genPushoverBody(evalContext *alerting.EvalContext, message string, ruleURL string) (map[string]string, bytes.Buffer, error) { var b bytes.Buffer var err error w := multipart.NewWriter(&b) // Add image only if requested and available - if this.Upload && evalContext.ImageOnDiskPath != "" { + if pn.Upload && evalContext.ImageOnDiskPath != "" { f, err := os.Open(evalContext.ImageOnDiskPath) if err != nil { return nil, b, err @@ -209,47 +213,47 @@ func (this *PushoverNotifier) genPushoverBody(evalContext *alerting.EvalContext, } // Add the user token - err = w.WriteField("user", this.UserKey) + err = w.WriteField("user", pn.UserKey) if err != nil { return nil, b, err } // Add the api token - err = w.WriteField("token", this.ApiToken) + err = w.WriteField("token", pn.APIToken) if err != nil { return nil, b, err } // Add priority - err = w.WriteField("priority", strconv.Itoa(this.Priority)) + err = w.WriteField("priority", strconv.Itoa(pn.Priority)) if err != nil { return nil, b, err } - if this.Priority == 2 { - err = w.WriteField("retry", strconv.Itoa(this.Retry)) + if pn.Priority == 2 { + err = w.WriteField("retry", strconv.Itoa(pn.Retry)) if err != nil { return nil, b, err } - err = w.WriteField("expire", strconv.Itoa(this.Expire)) + err = w.WriteField("expire", strconv.Itoa(pn.Expire)) if err != nil { return nil, b, err } } // Add device - if this.Device != "" { - err = w.WriteField("device", this.Device) + if pn.Device != "" { + err = w.WriteField("device", pn.Device) if err != nil { return nil, b, err } } // Add sound - sound := this.AlertingSound + sound := pn.AlertingSound if evalContext.Rule.State == models.AlertStateOK { - sound = this.OkSound + sound = pn.OkSound } if sound != "default" { err = w.WriteField("sound", sound) @@ -265,7 +269,7 @@ func (this *PushoverNotifier) genPushoverBody(evalContext *alerting.EvalContext, } // Add URL - err = w.WriteField("url", ruleUrl) + err = w.WriteField("url", ruleURL) if err != nil { return nil, b, err } diff --git a/pkg/services/alerting/notifiers/pushover_test.go b/pkg/services/alerting/notifiers/pushover_test.go index f862a500618a..7b2d51a176f3 100644 --- a/pkg/services/alerting/notifiers/pushover_test.go +++ b/pkg/services/alerting/notifiers/pushover_test.go @@ -53,7 +53,7 @@ func TestPushoverNotifier(t *testing.T) { So(err, ShouldBeNil) So(pushoverNotifier.Name, ShouldEqual, "Pushover") So(pushoverNotifier.Type, ShouldEqual, "pushover") - So(pushoverNotifier.ApiToken, ShouldEqual, "4SrUFQL4A5V5TQ1z5Pg9nxHXPXSTve") + So(pushoverNotifier.APIToken, ShouldEqual, "4SrUFQL4A5V5TQ1z5Pg9nxHXPXSTve") So(pushoverNotifier.UserKey, ShouldEqual, "tzNZYf36y0ohWwXo4XoUrB61rz1A4o") So(pushoverNotifier.Priority, ShouldEqual, 1) So(pushoverNotifier.AlertingSound, ShouldEqual, "pushover") diff --git a/pkg/services/alerting/notifiers/sensu.go b/pkg/services/alerting/notifiers/sensu.go index cad9fc2286a3..7f60178d10f5 100644 --- a/pkg/services/alerting/notifiers/sensu.go +++ b/pkg/services/alerting/notifiers/sensu.go @@ -44,6 +44,7 @@ func init() { } +// NewSensuNotifier is the constructor for the Sensu Notifier. func NewSensuNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if url == "" { @@ -52,7 +53,7 @@ func NewSensuNotifier(model *models.AlertNotification) (alerting.Notifier, error return &SensuNotifier{ NotifierBase: NewNotifierBase(model), - Url: url, + URL: url, User: model.Settings.Get("username").MustString(), Source: model.Settings.Get("source").MustString(), Password: model.Settings.Get("password").MustString(), @@ -61,9 +62,11 @@ func NewSensuNotifier(model *models.AlertNotification) (alerting.Notifier, error }, nil } +// SensuNotifier is responsible for sending +// alert notifications to Sensu. type SensuNotifier struct { NotifierBase - Url string + URL string Source string User string Password string @@ -71,19 +74,20 @@ type SensuNotifier struct { log log.Logger } -func (this *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending sensu result") +// Notify send alert notification to Sensu +func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { + sn.log.Info("Sending sensu result") bodyJSON := simplejson.New() - bodyJSON.Set("ruleId", evalContext.Rule.Id) + bodyJSON.Set("ruleId", evalContext.Rule.ID) // Sensu alerts cannot have spaces in them bodyJSON.Set("name", strings.Replace(evalContext.Rule.Name, " ", "_", -1)) // Sensu alerts require a source. We set it to the user-specified value (optional), // else we fallback and use the grafana ruleID. - if this.Source != "" { - bodyJSON.Set("source", this.Source) + if sn.Source != "" { + bodyJSON.Set("source", sn.Source) } else { - bodyJSON.Set("source", "grafana_rule_"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("source", "grafana_rule_"+strconv.FormatInt(evalContext.Rule.ID, 10)) } // Finally, sensu expects an output // We set it to a default output @@ -98,17 +102,17 @@ func (this *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("status", 0) } - if this.Handler != "" { - bodyJSON.Set("handler", this.Handler) + if sn.Handler != "" { + bodyJSON.Set("handler", sn.Handler) } - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { - bodyJSON.Set("ruleUrl", ruleUrl) + bodyJSON.Set("ruleUrl", ruleURL) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("imageUrl", evalContext.ImagePublicURL) } if evalContext.Rule.Message != "" { @@ -118,15 +122,15 @@ func (this *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: this.Url, - User: this.User, - Password: this.Password, + Url: sn.URL, + User: sn.User, + Password: sn.Password, Body: string(body), HttpMethod: "POST", } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send sensu event", "error", err, "sensu", this.Name) + sn.log.Error("Failed to send sensu event", "error", err, "sensu", sn.Name) return err } diff --git a/pkg/services/alerting/notifiers/sensu_test.go b/pkg/services/alerting/notifiers/sensu_test.go index 40d39a5d1c3f..0c73a493ee8b 100644 --- a/pkg/services/alerting/notifiers/sensu_test.go +++ b/pkg/services/alerting/notifiers/sensu_test.go @@ -47,7 +47,7 @@ func TestSensuNotifier(t *testing.T) { So(err, ShouldBeNil) So(sensuNotifier.Name, ShouldEqual, "sensu") So(sensuNotifier.Type, ShouldEqual, "sensu") - So(sensuNotifier.Url, ShouldEqual, "http://sensu-api.example.com:4567/results") + So(sensuNotifier.URL, ShouldEqual, "http://sensu-api.example.com:4567/results") So(sensuNotifier.Source, ShouldEqual, "grafana_instance_01") So(sensuNotifier.Handler, ShouldEqual, "myhandler") }) diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go index 117674448877..b9a10c4d5d10 100644 --- a/pkg/services/alerting/notifiers/slack.go +++ b/pkg/services/alerting/notifiers/slack.go @@ -99,6 +99,7 @@ func init() { } +// NewSlackNotifier is the constructor for the Slack notifier func NewSlackNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if url == "" { @@ -108,18 +109,18 @@ func NewSlackNotifier(model *models.AlertNotification) (alerting.Notifier, error recipient := model.Settings.Get("recipient").MustString() username := model.Settings.Get("username").MustString() iconEmoji := model.Settings.Get("icon_emoji").MustString() - iconUrl := model.Settings.Get("icon_url").MustString() + iconURL := model.Settings.Get("icon_url").MustString() mention := model.Settings.Get("mention").MustString() token := model.Settings.Get("token").MustString() uploadImage := model.Settings.Get("uploadImage").MustBool(true) return &SlackNotifier{ NotifierBase: NewNotifierBase(model), - Url: url, + URL: url, Recipient: recipient, Username: username, IconEmoji: iconEmoji, - IconUrl: iconUrl, + IconURL: iconURL, Mention: mention, Token: token, Upload: uploadImage, @@ -127,25 +128,28 @@ func NewSlackNotifier(model *models.AlertNotification) (alerting.Notifier, error }, nil } +// SlackNotifier is responsible for sending +// alert notification to Slack. type SlackNotifier struct { NotifierBase - Url string + URL string Recipient string Username string IconEmoji string - IconUrl string + IconURL string Mention string Token string Upload bool log log.Logger } -func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Executing slack notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) +// Notify send alert notification to Slack. +func (sn *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { + sn.log.Info("Executing slack notification", "ruleId", evalContext.Rule.ID, "notification", sn.Name) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + sn.log.Error("Failed get rule link", "error", err) return err } @@ -170,14 +174,14 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { }) } - message := this.Mention + message := sn.Mention if evalContext.Rule.State != models.AlertStateOK { //don't add message when going back to alert state ok. message += " " + evalContext.Rule.Message } - image_url := "" + imageURL := "" // default to file.upload API method if a token is provided - if this.Token == "" { - image_url = evalContext.ImagePublicUrl + if sn.Token == "" { + imageURL = evalContext.ImagePublicURL } body := map[string]interface{}{ @@ -186,10 +190,10 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { "fallback": evalContext.GetNotificationTitle(), "color": evalContext.GetStateModel().Color, "title": evalContext.GetNotificationTitle(), - "title_link": ruleUrl, + "title_link": ruleURL, "text": message, "fields": fields, - "image_url": image_url, + "image_url": imageURL, "footer": "Grafana v" + setting.BuildVersion, "footer_icon": "https://grafana.com/assets/img/fav32.png", "ts": time.Now().Unix(), @@ -199,26 +203,26 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { } //recipient override - if this.Recipient != "" { - body["channel"] = this.Recipient + if sn.Recipient != "" { + body["channel"] = sn.Recipient } - if this.Username != "" { - body["username"] = this.Username + if sn.Username != "" { + body["username"] = sn.Username } - if this.IconEmoji != "" { - body["icon_emoji"] = this.IconEmoji + if sn.IconEmoji != "" { + body["icon_emoji"] = sn.IconEmoji } - if this.IconUrl != "" { - body["icon_url"] = this.IconUrl + if sn.IconURL != "" { + body["icon_url"] = sn.IconURL } data, _ := json.Marshal(&body) - cmd := &models.SendWebhookSync{Url: this.Url, Body: string(data)} + cmd := &models.SendWebhookSync{Url: sn.URL, Body: string(data)} if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send slack notification", "error", err, "webhook", this.Name) + sn.log.Error("Failed to send slack notification", "error", err, "webhook", sn.Name) return err } - if this.Token != "" && this.UploadImage { - err = SlackFileUpload(evalContext, this.log, "https://slack.com/api/files.upload", this.Recipient, this.Token) + if sn.Token != "" && sn.UploadImage { + err = slackFileUpload(evalContext, sn.log, "https://slack.com/api/files.upload", sn.Recipient, sn.Token) if err != nil { return err } @@ -226,12 +230,12 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { return nil } -func SlackFileUpload(evalContext *alerting.EvalContext, log log.Logger, url string, recipient string, token string) error { +func slackFileUpload(evalContext *alerting.EvalContext, log log.Logger, url string, recipient string, token string) error { if evalContext.ImageOnDiskPath == "" { evalContext.ImageOnDiskPath = filepath.Join(setting.HomePath, "public/img/mixed_styles.png") } log.Info("Uploading to slack via file.upload API") - headers, uploadBody, err := GenerateSlackBody(evalContext.ImageOnDiskPath, token, recipient) + headers, uploadBody, err := generateSlackBody(evalContext.ImageOnDiskPath, token, recipient) if err != nil { return err } @@ -246,7 +250,7 @@ func SlackFileUpload(evalContext *alerting.EvalContext, log log.Logger, url stri return nil } -func GenerateSlackBody(file string, token string, recipient string) (map[string]string, bytes.Buffer, error) { +func generateSlackBody(file string, token string, recipient string) (map[string]string, bytes.Buffer, error) { // Slack requires all POSTs to files.upload to present // an "application/x-www-form-urlencoded" encoded querystring // See https://api.slack.com/methods/files.upload diff --git a/pkg/services/alerting/notifiers/slack_test.go b/pkg/services/alerting/notifiers/slack_test.go index 7dceb12676c9..c8c5b1220cf6 100644 --- a/pkg/services/alerting/notifiers/slack_test.go +++ b/pkg/services/alerting/notifiers/slack_test.go @@ -45,11 +45,11 @@ func TestSlackNotifier(t *testing.T) { So(err, ShouldBeNil) So(slackNotifier.Name, ShouldEqual, "ops") So(slackNotifier.Type, ShouldEqual, "slack") - So(slackNotifier.Url, ShouldEqual, "http://google.com") + So(slackNotifier.URL, ShouldEqual, "http://google.com") So(slackNotifier.Recipient, ShouldEqual, "") So(slackNotifier.Username, ShouldEqual, "") So(slackNotifier.IconEmoji, ShouldEqual, "") - So(slackNotifier.IconUrl, ShouldEqual, "") + So(slackNotifier.IconURL, ShouldEqual, "") So(slackNotifier.Mention, ShouldEqual, "") So(slackNotifier.Token, ShouldEqual, "") }) @@ -79,11 +79,11 @@ func TestSlackNotifier(t *testing.T) { So(err, ShouldBeNil) So(slackNotifier.Name, ShouldEqual, "ops") So(slackNotifier.Type, ShouldEqual, "slack") - So(slackNotifier.Url, ShouldEqual, "http://google.com") + So(slackNotifier.URL, ShouldEqual, "http://google.com") So(slackNotifier.Recipient, ShouldEqual, "#ds-opentsdb") So(slackNotifier.Username, ShouldEqual, "Grafana Alerts") So(slackNotifier.IconEmoji, ShouldEqual, ":smile:") - So(slackNotifier.IconUrl, ShouldEqual, "https://grafana.com/img/fav32.png") + So(slackNotifier.IconURL, ShouldEqual, "https://grafana.com/img/fav32.png") So(slackNotifier.Mention, ShouldEqual, "@carl") So(slackNotifier.Token, ShouldEqual, "xoxb-XXXXXXXX-XXXXXXXX-XXXXXXXXXX") }) diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 57f5d6e91c04..4d0c47ddad23 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -26,6 +26,7 @@ func init() { } +// NewTeamsNotifier is the constructor for Teams notifier. func NewTeamsNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if url == "" { @@ -34,23 +35,26 @@ func NewTeamsNotifier(model *models.AlertNotification) (alerting.Notifier, error return &TeamsNotifier{ NotifierBase: NewNotifierBase(model), - Url: url, + URL: url, log: log.New("alerting.notifier.teams"), }, nil } +// TeamsNotifier is responsible for sending +// alert notifications to Microsoft teams. type TeamsNotifier struct { NotifierBase - Url string + URL string log log.Logger } -func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Executing teams notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) +// Notify send an alert notification to Microsoft teams. +func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { + tn.log.Info("Executing teams notification", "ruleId", evalContext.Rule.ID, "notification", tn.Name) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + tn.log.Error("Failed get rule link", "error", err) return err } @@ -79,9 +83,9 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { } images := make([]map[string]interface{}, 0) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { images = append(images, map[string]interface{}{ - "image": evalContext.ImagePublicUrl, + "image": evalContext.ImagePublicURL, }) } @@ -108,7 +112,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { "name": "View Rule", "targets": []map[string]interface{}{ { - "os": "default", "uri": ruleUrl, + "os": "default", "uri": ruleURL, }, }, }, @@ -118,7 +122,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { "name": "View Graph", "targets": []map[string]interface{}{ { - "os": "default", "uri": evalContext.ImagePublicUrl, + "os": "default", "uri": evalContext.ImagePublicURL, }, }, }, @@ -126,10 +130,10 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { } data, _ := json.Marshal(&body) - cmd := &models.SendWebhookSync{Url: this.Url, Body: string(data)} + cmd := &models.SendWebhookSync{Url: tn.URL, Body: string(data)} if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send teams notification", "error", err, "webhook", this.Name) + tn.log.Error("Failed to send teams notification", "error", err, "webhook", tn.Name) return err } diff --git a/pkg/services/alerting/notifiers/teams_test.go b/pkg/services/alerting/notifiers/teams_test.go index 1dd35c899605..10ec7edca285 100644 --- a/pkg/services/alerting/notifiers/teams_test.go +++ b/pkg/services/alerting/notifiers/teams_test.go @@ -45,7 +45,7 @@ func TestTeamsNotifier(t *testing.T) { So(err, ShouldBeNil) So(teamsNotifier.Name, ShouldEqual, "ops") So(teamsNotifier.Type, ShouldEqual, "teams") - So(teamsNotifier.Url, ShouldEqual, "http://google.com") + So(teamsNotifier.URL, ShouldEqual, "http://google.com") }) Convey("from settings with Recipient and Mention", func() { @@ -67,9 +67,8 @@ func TestTeamsNotifier(t *testing.T) { So(err, ShouldBeNil) So(teamsNotifier.Name, ShouldEqual, "ops") So(teamsNotifier.Type, ShouldEqual, "teams") - So(teamsNotifier.Url, ShouldEqual, "http://google.com") + So(teamsNotifier.URL, ShouldEqual, "http://google.com") }) - }) }) } diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go index 741c0fe732c5..be354bc2733e 100644 --- a/pkg/services/alerting/notifiers/telegram.go +++ b/pkg/services/alerting/notifiers/telegram.go @@ -18,7 +18,7 @@ const ( ) var ( - telegramApiUrl = "https://api.telegram.org/bot%s/%s" + telegramAPIURL = "https://api.telegram.org/bot%s/%s" ) func init() { @@ -52,6 +52,8 @@ func init() { } +// TelegramNotifier is responsible for sending +// alert notifications to Telegram. type TelegramNotifier struct { NotifierBase BotToken string @@ -60,54 +62,55 @@ type TelegramNotifier struct { log log.Logger } +// NewTelegramNotifier is the constructor for the Telegram notifier func NewTelegramNotifier(model *models.AlertNotification) (alerting.Notifier, error) { if model.Settings == nil { return nil, alerting.ValidationError{Reason: "No Settings Supplied"} } botToken := model.Settings.Get("bottoken").MustString() - chatId := model.Settings.Get("chatid").MustString() + chatID := model.Settings.Get("chatid").MustString() uploadImage := model.Settings.Get("uploadImage").MustBool() if botToken == "" { return nil, alerting.ValidationError{Reason: "Could not find Bot Token in settings"} } - if chatId == "" { + if chatID == "" { return nil, alerting.ValidationError{Reason: "Could not find Chat Id in settings"} } return &TelegramNotifier{ NotifierBase: NewNotifierBase(model), BotToken: botToken, - ChatID: chatId, + ChatID: chatID, UploadImage: uploadImage, log: log.New("alerting.notifier.telegram"), }, nil } -func (this *TelegramNotifier) buildMessage(evalContext *alerting.EvalContext, sendImageInline bool) *models.SendWebhookSync { +func (tn *TelegramNotifier) buildMessage(evalContext *alerting.EvalContext, sendImageInline bool) *models.SendWebhookSync { if sendImageInline { - cmd, err := this.buildMessageInlineImage(evalContext) + cmd, err := tn.buildMessageInlineImage(evalContext) if err == nil { return cmd } - this.log.Error("Could not generate Telegram message with inline image.", "err", err) + tn.log.Error("Could not generate Telegram message with inline image.", "err", err) } - return this.buildMessageLinkedImage(evalContext) + return tn.buildMessageLinkedImage(evalContext) } -func (this *TelegramNotifier) buildMessageLinkedImage(evalContext *alerting.EvalContext) *models.SendWebhookSync { +func (tn *TelegramNotifier) buildMessageLinkedImage(evalContext *alerting.EvalContext) *models.SendWebhookSync { message := fmt.Sprintf("%s\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { - message = message + fmt.Sprintf("URL: %s\n", ruleUrl) + message = message + fmt.Sprintf("URL: %s\n", ruleURL) } - if evalContext.ImagePublicUrl != "" { - message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicURL) } metrics := generateMetricsMessage(evalContext) @@ -115,14 +118,14 @@ func (this *TelegramNotifier) buildMessageLinkedImage(evalContext *alerting.Eval message = message + fmt.Sprintf("\nMetrics:%s", metrics) } - cmd := this.generateTelegramCmd(message, "text", "sendMessage", func(w *multipart.Writer) { + cmd := tn.generateTelegramCmd(message, "text", "sendMessage", func(w *multipart.Writer) { fw, _ := w.CreateFormField("parse_mode") fw.Write([]byte("html")) }) return cmd } -func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.EvalContext) (*models.SendWebhookSync, error) { +func (tn *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.EvalContext) (*models.SendWebhookSync, error) { var imageFile *os.File var err error @@ -130,7 +133,7 @@ func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.Eval defer func() { err := imageFile.Close() if err != nil { - this.log.Error("Could not close Telegram inline image.", "err", err) + tn.log.Error("Could not close Telegram inline image.", "err", err) } }() @@ -138,27 +141,27 @@ func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.Eval return nil, err } - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { return nil, err } metrics := generateMetricsMessage(evalContext) - message := generateImageCaption(evalContext, ruleUrl, metrics) + message := generateImageCaption(evalContext, ruleURL, metrics) - cmd := this.generateTelegramCmd(message, "caption", "sendPhoto", func(w *multipart.Writer) { + cmd := tn.generateTelegramCmd(message, "caption", "sendPhoto", func(w *multipart.Writer) { fw, _ := w.CreateFormFile("photo", evalContext.ImageOnDiskPath) io.Copy(fw, imageFile) }) return cmd, nil } -func (this *TelegramNotifier) generateTelegramCmd(message string, messageField string, apiAction string, extraConf func(writer *multipart.Writer)) *models.SendWebhookSync { +func (tn *TelegramNotifier) generateTelegramCmd(message string, messageField string, apiAction string, extraConf func(writer *multipart.Writer)) *models.SendWebhookSync { var body bytes.Buffer w := multipart.NewWriter(&body) fw, _ := w.CreateFormField("chat_id") - fw.Write([]byte(this.ChatID)) + fw.Write([]byte(tn.ChatID)) fw, _ = w.CreateFormField(messageField) fw.Write([]byte(message)) @@ -167,8 +170,8 @@ func (this *TelegramNotifier) generateTelegramCmd(message string, messageField s w.Close() - this.log.Info("Sending telegram notification", "chat_id", this.ChatID, "bot_token", this.BotToken, "apiAction", apiAction) - url := fmt.Sprintf(telegramApiUrl, this.BotToken, apiAction) + tn.log.Info("Sending telegram notification", "chat_id", tn.ChatID, "bot_token", tn.BotToken, "apiAction", apiAction) + url := fmt.Sprintf(telegramAPIURL, tn.BotToken, apiAction) cmd := &models.SendWebhookSync{ Url: url, @@ -193,7 +196,7 @@ func generateMetricsMessage(evalContext *alerting.EvalContext) string { return metrics } -func generateImageCaption(evalContext *alerting.EvalContext, ruleUrl string, metrics string) string { +func generateImageCaption(evalContext *alerting.EvalContext, ruleURL string, metrics string) string { message := evalContext.GetNotificationTitle() if len(evalContext.Rule.Message) > 0 { @@ -205,8 +208,8 @@ func generateImageCaption(evalContext *alerting.EvalContext, ruleUrl string, met } - if len(ruleUrl) > 0 { - urlLine := fmt.Sprintf("\nURL: %s", ruleUrl) + if len(ruleURL) > 0 { + urlLine := fmt.Sprintf("\nURL: %s", ruleURL) message = appendIfPossible(message, urlLine, captionLengthLimit) } @@ -226,16 +229,17 @@ func appendIfPossible(message string, extra string, sizeLimit int) string { return message } -func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { +// Notify send an alert notification to Telegram. +func (tn *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { var cmd *models.SendWebhookSync - if evalContext.ImagePublicUrl == "" && this.UploadImage { - cmd = this.buildMessage(evalContext, true) + if evalContext.ImagePublicURL == "" && tn.UploadImage { + cmd = tn.buildMessage(evalContext, true) } else { - cmd = this.buildMessage(evalContext, false) + cmd = tn.buildMessage(evalContext, false) } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send webhook", "error", err, "webhook", this.Name) + tn.log.Error("Failed to send webhook", "error", err, "webhook", tn.Name) return err } diff --git a/pkg/services/alerting/notifiers/threema.go b/pkg/services/alerting/notifiers/threema.go index 6e4aa7bc946e..560e8c12e80b 100644 --- a/pkg/services/alerting/notifiers/threema.go +++ b/pkg/services/alerting/notifiers/threema.go @@ -68,6 +68,8 @@ func init() { } +// ThreemaNotifier is responsible for sending +// alert notifications to Threema. type ThreemaNotifier struct { NotifierBase GatewayID string @@ -76,6 +78,7 @@ type ThreemaNotifier struct { log log.Logger } +// NewThreemaNotifier is the constructor for the Threema notifer func NewThreemaNotifier(model *models.AlertNotification) (alerting.Notifier, error) { if model.Settings == nil { return nil, alerting.ValidationError{Reason: "No Settings Supplied"} @@ -114,6 +117,7 @@ func NewThreemaNotifier(model *models.AlertNotification) (alerting.Notifier, err }, nil } +// Notify send an alert notification to Threema func (notifier *ThreemaNotifier) Notify(evalContext *alerting.EvalContext) error { notifier.log.Info("Sending alert notification from", "threema_id", notifier.GatewayID) notifier.log.Info("Sending alert notification to", "threema_id", notifier.RecipientID) @@ -139,12 +143,12 @@ func (notifier *ThreemaNotifier) Notify(evalContext *alerting.EvalContext) error message := fmt.Sprintf("%s%s\n\n*State:* %s\n*Message:* %s\n", stateEmoji, evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { message = message + fmt.Sprintf("*URL:* %s\n", ruleURL) } - if evalContext.ImagePublicUrl != "" { - message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicURL) } data.Set("text", message) diff --git a/pkg/services/alerting/notifiers/threema_test.go b/pkg/services/alerting/notifiers/threema_test.go index 2c50b7d2058c..8ec77db42129 100644 --- a/pkg/services/alerting/notifiers/threema_test.go +++ b/pkg/services/alerting/notifiers/threema_test.go @@ -113,7 +113,6 @@ func TestThreemaNotifier(t *testing.T) { So(not, ShouldBeNil) So(err.(alerting.ValidationError).Reason, ShouldEqual, "Invalid Threema Recipient ID: Must be 8 characters long") }) - }) }) } diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go index 74988c21ed8f..d19ea356547b 100644 --- a/pkg/services/alerting/notifiers/victorops.go +++ b/pkg/services/alerting/notifiers/victorops.go @@ -14,7 +14,7 @@ import ( // AlertStateCritical - Victorops uses "CRITICAL" string to indicate "Alerting" state const AlertStateCritical = "CRITICAL" -const AlertStateRecovery = "RECOVERY" +const alertStateRecovery = "RECOVERY" func init() { alerting.RegisterNotifier(&alerting.NotifierPlugin{ @@ -69,17 +69,17 @@ type VictoropsNotifier struct { } // Notify sends notification to Victorops via POST to URL endpoint -func (this *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) +func (vn *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { + vn.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.ID, "notification", vn.Name) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + vn.log.Error("Failed get rule link", "error", err) return err } - if evalContext.Rule.State == models.AlertStateOK && !this.AutoResolve { - this.log.Info("Not alerting VictorOps", "state", evalContext.Rule.State, "auto resolve", this.AutoResolve) + if evalContext.Rule.State == models.AlertStateOK && !vn.AutoResolve { + vn.log.Info("Not alerting VictorOps", "state", evalContext.Rule.State, "auto resolve", vn.AutoResolve) return nil } @@ -89,10 +89,10 @@ func (this *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { } if evalContext.Rule.State == models.AlertStateOK { - messageType = AlertStateRecovery + messageType = alertStateRecovery } - fields := make(map[string]interface{}, 0) + fields := make(map[string]interface{}) fieldLimitCount := 4 for index, evt := range evalContext.EvalMatches { fields[evt.Metric] = evt.Value @@ -109,22 +109,22 @@ func (this *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("state_start_time", evalContext.StartTime.Unix()) bodyJSON.Set("state_message", evalContext.Rule.Message) bodyJSON.Set("monitoring_tool", "Grafana v"+setting.BuildVersion) - bodyJSON.Set("alert_url", ruleUrl) + bodyJSON.Set("alert_url", ruleURL) bodyJSON.Set("metrics", fields) if evalContext.Error != nil { bodyJSON.Set("error_message", evalContext.Error.Error()) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("image_url", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("image_url", evalContext.ImagePublicURL) } data, _ := bodyJSON.MarshalJSON() - cmd := &models.SendWebhookSync{Url: this.URL, Body: string(data)} + cmd := &models.SendWebhookSync{Url: vn.URL, Body: string(data)} if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send Victorops notification", "error", err, "webhook", this.Name) + vn.log.Error("Failed to send Victorops notification", "error", err, "webhook", vn.Name) return err } diff --git a/pkg/services/alerting/notifiers/webhook.go b/pkg/services/alerting/notifiers/webhook.go index 31b5f7a82080..f5ee99245127 100644 --- a/pkg/services/alerting/notifiers/webhook.go +++ b/pkg/services/alerting/notifiers/webhook.go @@ -40,6 +40,8 @@ func init() { } +// NewWebHookNotifier is the constructor for +// the WebHook notifier. func NewWebHookNotifier(model *models.AlertNotification) (alerting.Notifier, error) { url := model.Settings.Get("url").MustString() if url == "" { @@ -48,40 +50,44 @@ func NewWebHookNotifier(model *models.AlertNotification) (alerting.Notifier, err return &WebhookNotifier{ NotifierBase: NewNotifierBase(model), - Url: url, + URL: url, User: model.Settings.Get("username").MustString(), Password: model.Settings.Get("password").MustString(), - HttpMethod: model.Settings.Get("httpMethod").MustString("POST"), + HTTPMethod: model.Settings.Get("httpMethod").MustString("POST"), log: log.New("alerting.notifier.webhook"), }, nil } +// WebhookNotifier is responsible for sending +// alert notifications as webhooks. type WebhookNotifier struct { NotifierBase - Url string + URL string User string Password string - HttpMethod string + HTTPMethod string log log.Logger } -func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending webhook") +// Notify send alert notifications as +// webhook as http requests. +func (wn *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error { + wn.log.Info("Sending webhook") bodyJSON := simplejson.New() bodyJSON.Set("title", evalContext.GetNotificationTitle()) - bodyJSON.Set("ruleId", evalContext.Rule.Id) + bodyJSON.Set("ruleId", evalContext.Rule.ID) bodyJSON.Set("ruleName", evalContext.Rule.Name) bodyJSON.Set("state", evalContext.Rule.State) bodyJSON.Set("evalMatches", evalContext.EvalMatches) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { - bodyJSON.Set("ruleUrl", ruleUrl) + bodyJSON.Set("ruleUrl", ruleURL) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("imageUrl", evalContext.ImagePublicURL) } if evalContext.Rule.Message != "" { @@ -91,15 +97,15 @@ func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error { body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: this.Url, - User: this.User, - Password: this.Password, + Url: wn.URL, + User: wn.User, + Password: wn.Password, Body: string(body), - HttpMethod: this.HttpMethod, + HttpMethod: wn.HTTPMethod, } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send webhook", "error", err, "webhook", this.Name) + wn.log.Error("Failed to send webhook", "error", err, "webhook", wn.Name) return err } diff --git a/pkg/services/alerting/notifiers/webhook_test.go b/pkg/services/alerting/notifiers/webhook_test.go index af48f1f1aa6d..b8c001b6e60b 100644 --- a/pkg/services/alerting/notifiers/webhook_test.go +++ b/pkg/services/alerting/notifiers/webhook_test.go @@ -45,7 +45,7 @@ func TestWebhookNotifier(t *testing.T) { So(err, ShouldBeNil) So(webhookNotifier.Name, ShouldEqual, "ops") So(webhookNotifier.Type, ShouldEqual, "webhook") - So(webhookNotifier.Url, ShouldEqual, "http://google.com") + So(webhookNotifier.URL, ShouldEqual, "http://google.com") }) }) }) diff --git a/pkg/services/alerting/reader.go b/pkg/services/alerting/reader.go index 0df826e9ea5d..c8020510ef63 100644 --- a/pkg/services/alerting/reader.go +++ b/pkg/services/alerting/reader.go @@ -2,7 +2,6 @@ package alerting import ( "sync" - "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/log" @@ -10,35 +9,24 @@ import ( "github.com/grafana/grafana/pkg/models" ) -type RuleReader interface { - Fetch() []*Rule +type ruleReader interface { + fetch() []*Rule } -type DefaultRuleReader struct { +type defaultRuleReader struct { sync.RWMutex - serverPosition int - clusterSize int - log log.Logger + log log.Logger } -func NewRuleReader() *DefaultRuleReader { - ruleReader := &DefaultRuleReader{ +func newRuleReader() *defaultRuleReader { + ruleReader := &defaultRuleReader{ log: log.New("alerting.ruleReader"), } - go ruleReader.initReader() return ruleReader } -func (arr *DefaultRuleReader) initReader() { - heartbeat := time.NewTicker(time.Second * 10) - - for range heartbeat.C { - arr.heartbeat() - } -} - -func (arr *DefaultRuleReader) Fetch() []*Rule { +func (arr *defaultRuleReader) fetch() []*Rule { cmd := &models.GetAllAlertsQuery{} if err := bus.Dispatch(cmd); err != nil { @@ -58,8 +46,3 @@ func (arr *DefaultRuleReader) Fetch() []*Rule { metrics.M_Alerting_Active_Alerts.Set(float64(len(res))) return res } - -func (arr *DefaultRuleReader) heartbeat() { - arr.clusterSize = 1 - arr.serverPosition = 1 -} diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index d82421d7506d..814a3f8a21e2 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -13,23 +13,23 @@ import ( "github.com/grafana/grafana/pkg/services/rendering" ) -type ResultHandler interface { - Handle(evalContext *EvalContext) error +type resultHandler interface { + handle(evalContext *EvalContext) error } -type DefaultResultHandler struct { - notifier NotificationService +type defaultResultHandler struct { + notifier *notificationService log log.Logger } -func NewResultHandler(renderService rendering.Service) *DefaultResultHandler { - return &DefaultResultHandler{ +func newResultHandler(renderService rendering.Service) *defaultResultHandler { + return &defaultResultHandler{ log: log.New("alerting.resultHandler"), - notifier: NewNotificationService(renderService), + notifier: newNotificationService(renderService), } } -func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { +func (handler *defaultResultHandler) handle(evalContext *EvalContext) error { executionError := "" annotationData := simplejson.New() @@ -45,12 +45,12 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc() - if evalContext.ShouldUpdateAlertState() { - handler.log.Info("New state change", "alertId", evalContext.Rule.Id, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) + if evalContext.shouldUpdateAlertState() { + handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) cmd := &models.SetAlertStateCommand{ - AlertId: evalContext.Rule.Id, - OrgId: evalContext.Rule.OrgId, + AlertId: evalContext.Rule.ID, + OrgId: evalContext.Rule.OrgID, State: evalContext.Rule.State, Error: executionError, EvalData: annotationData, @@ -81,10 +81,10 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { // save annotation item := annotations.Item{ - OrgId: evalContext.Rule.OrgId, - DashboardId: evalContext.Rule.DashboardId, - PanelId: evalContext.Rule.PanelId, - AlertId: evalContext.Rule.Id, + OrgId: evalContext.Rule.OrgID, + DashboardId: evalContext.Rule.DashboardID, + PanelId: evalContext.Rule.PanelID, + AlertId: evalContext.Rule.ID, Text: "", NewState: string(evalContext.Rule.State), PrevState: string(evalContext.PrevAlertState), diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index b5b6f4660e64..9a4065e279da 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -12,15 +12,19 @@ import ( ) var ( + // ErrFrequencyCannotBeZeroOrLess frequency cannot be below zero ErrFrequencyCannotBeZeroOrLess = errors.New(`"evaluate every" cannot be zero or below`) - ErrFrequencyCouldNotBeParsed = errors.New(`"evaluate every" field could not be parsed`) + + // ErrFrequencyCouldNotBeParsed frequency cannot be parsed + ErrFrequencyCouldNotBeParsed = errors.New(`"evaluate every" field could not be parsed`) ) +// Rule is the in-memory version of an alert rule. type Rule struct { - Id int64 - OrgId int64 - DashboardId int64 - PanelId int64 + ID int64 + OrgID int64 + DashboardID int64 + PanelID int64 Frequency int64 Name string Message string @@ -35,26 +39,28 @@ type Rule struct { StateChanges int64 } +// ValidationError is a typed error with meta data +// about the validation error. type ValidationError struct { Reason string Err error - Alertid int64 - DashboardId int64 - PanelId int64 + AlertID int64 + DashboardID int64 + PanelID int64 } func (e ValidationError) Error() string { extraInfo := e.Reason - if e.Alertid != 0 { - extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid) + if e.AlertID != 0 { + extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.AlertID) } - if e.PanelId != 0 { - extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelId) + if e.PanelID != 0 { + extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelID) } - if e.DashboardId != 0 { - extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardId) + if e.DashboardID != 0 { + extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardID) } if e.Err != nil { @@ -65,8 +71,8 @@ func (e ValidationError) Error() string { } var ( - ValueFormatRegex = regexp.MustCompile(`^\d+`) - UnitFormatRegex = regexp.MustCompile(`\w{1}$`) + valueFormatRegex = regexp.MustCompile(`^\d+`) + unitFormatRegex = regexp.MustCompile(`\w{1}$`) ) var unitMultiplier = map[string]int{ @@ -79,7 +85,7 @@ var unitMultiplier = map[string]int{ func getTimeDurationStringToSeconds(str string) (int64, error) { multiplier := 1 - matches := ValueFormatRegex.FindAllString(str, 1) + matches := valueFormatRegex.FindAllString(str, 1) if len(matches) <= 0 { return 0, ErrFrequencyCouldNotBeParsed @@ -94,7 +100,7 @@ func getTimeDurationStringToSeconds(str string) (int64, error) { return 0, ErrFrequencyCannotBeZeroOrLess } - unit := UnitFormatRegex.FindAllString(str, 1)[0] + unit := unitFormatRegex.FindAllString(str, 1)[0] if val, ok := unitMultiplier[unit]; ok { multiplier = val @@ -103,12 +109,14 @@ func getTimeDurationStringToSeconds(str string) (int64, error) { return int64(value * multiplier), nil } +// NewRuleFromDBAlert mappes an db version of +// alert to an in-memory version. func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { model := &Rule{} - model.Id = ruleDef.Id - model.OrgId = ruleDef.OrgId - model.DashboardId = ruleDef.DashboardId - model.PanelId = ruleDef.PanelId + model.ID = ruleDef.Id + model.OrgID = ruleDef.OrgId + model.DashboardID = ruleDef.DashboardId + model.PanelID = ruleDef.PanelId model.Name = ruleDef.Name model.Message = ruleDef.Message model.State = ruleDef.State @@ -132,7 +140,7 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { } else { uid, err := jsonModel.Get("uid").String() if err != nil { - return nil, ValidationError{Reason: "Neither id nor uid is specified, " + err.Error(), DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Reason: "Neither id nor uid is specified, " + err.Error(), DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } model.Notifications = append(model.Notifications, uid) } @@ -143,11 +151,11 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { conditionType := conditionModel.Get("type").MustString() factory, exist := conditionFactories[conditionType] if !exist { - return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } queryCondition, err := factory(conditionModel, index) if err != nil { - return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Err: err, DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } model.Conditions = append(model.Conditions, queryCondition) } @@ -159,10 +167,12 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { return model, nil } +// ConditionFactory is the function signature for creating `Conditions`. type ConditionFactory func(model *simplejson.Json, index int) (Condition, error) var conditionFactories = make(map[string]ConditionFactory) +// RegisterCondition adds support for alerting conditions. func RegisterCondition(typeName string, factory ConditionFactory) { conditionFactories[typeName] = factory } diff --git a/pkg/services/alerting/scheduler.go b/pkg/services/alerting/scheduler.go index 9a0769d25f24..b01618f40955 100644 --- a/pkg/services/alerting/scheduler.go +++ b/pkg/services/alerting/scheduler.go @@ -8,27 +8,27 @@ import ( "github.com/grafana/grafana/pkg/models" ) -type SchedulerImpl struct { +type schedulerImpl struct { jobs map[int64]*Job log log.Logger } -func NewScheduler() Scheduler { - return &SchedulerImpl{ +func newScheduler() scheduler { + return &schedulerImpl{ jobs: make(map[int64]*Job), log: log.New("alerting.scheduler"), } } -func (s *SchedulerImpl) Update(rules []*Rule) { +func (s *schedulerImpl) Update(rules []*Rule) { s.log.Debug("Scheduling update", "ruleCount", len(rules)) jobs := make(map[int64]*Job) for i, rule := range rules { var job *Job - if s.jobs[rule.Id] != nil { - job = s.jobs[rule.Id] + if s.jobs[rule.ID] != nil { + job = s.jobs[rule.ID] } else { job = &Job{ Running: false, @@ -42,13 +42,13 @@ func (s *SchedulerImpl) Update(rules []*Rule) { if job.Offset == 0 { //zero offset causes division with 0 panics. job.Offset = 1 } - jobs[rule.Id] = job + jobs[rule.ID] = job } s.jobs = jobs } -func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { +func (s *schedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { now := tickTime.Unix() for _, job := range s.jobs { @@ -72,7 +72,7 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { } } -func (s *SchedulerImpl) enqueue(job *Job, execQueue chan *Job) { - s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.Id) +func (s *schedulerImpl) enqueue(job *Job, execQueue chan *Job) { + s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.ID) execQueue <- job } diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 5cb18d2b42ee..311109ed6078 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -11,6 +11,8 @@ import ( "github.com/grafana/grafana/pkg/models" ) +// NotificationTestCommand initiates an test +// execution of an alert notification. type NotificationTestCommand struct { State models.AlertStateType Name string @@ -27,7 +29,7 @@ func init() { } func handleNotificationTestCommand(cmd *NotificationTestCommand) error { - notifier := NewNotificationService(nil).(*notificationService) + notifier := newNotificationService(nil) model := &models.AlertNotification{ Name: cmd.Name, @@ -47,8 +49,8 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error { func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { testRule := &Rule{ - DashboardId: 1, - PanelId: 1, + DashboardID: 1, + PanelID: 1, Name: "Test notification", Message: "Someone is testing the alert notification within grafana.", State: models.AlertStateAlerting, @@ -56,7 +58,7 @@ func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { ctx := NewEvalContext(context.Background(), testRule) if cmd.Settings.Get("uploadImage").MustBool(true) { - ctx.ImagePublicUrl = "https://grafana.com/assets/img/blog/mixed_styles.png" + ctx.ImagePublicURL = "https://grafana.com/assets/img/blog/mixed_styles.png" } ctx.IsTestRun = true ctx.Firing = true diff --git a/pkg/services/alerting/test_rule.go b/pkg/services/alerting/test_rule.go index 736dd287dbec..1575490ea324 100644 --- a/pkg/services/alerting/test_rule.go +++ b/pkg/services/alerting/test_rule.go @@ -9,10 +9,12 @@ import ( "github.com/grafana/grafana/pkg/models" ) +// AlertTestCommand initiates an test evaluation +// of an alert rule. type AlertTestCommand struct { Dashboard *simplejson.Json - PanelId int64 - OrgId int64 + PanelID int64 + OrgID int64 User *models.SignedInUser Result *EvalContext @@ -26,14 +28,14 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error { dash := models.NewDashboardFromJson(cmd.Dashboard) - extractor := NewDashAlertExtractor(dash, cmd.OrgId, cmd.User) + extractor := NewDashAlertExtractor(dash, cmd.OrgID, cmd.User) alerts, err := extractor.GetAlerts() if err != nil { return err } for _, alert := range alerts { - if alert.PanelId == cmd.PanelId { + if alert.PanelId == cmd.PanelID { rule, err := NewRuleFromDBAlert(alert) if err != nil { return err @@ -44,7 +46,7 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error { } } - return fmt.Errorf("Could not find alert with panel id %d", cmd.PanelId) + return fmt.Errorf("Could not find alert with panel id %d", cmd.PanelID) } func testAlertRule(rule *Rule) *EvalContext { diff --git a/pkg/services/alerting/ticker.go b/pkg/services/alerting/ticker.go index 8cee2653ee9d..9702a2cda63a 100644 --- a/pkg/services/alerting/ticker.go +++ b/pkg/services/alerting/ticker.go @@ -6,7 +6,7 @@ import ( "github.com/benbjohnson/clock" ) -// ticker is a ticker to power the alerting scheduler. it's like a time.Ticker, except: +// Ticker is a ticker to power the alerting scheduler. it's like a time.Ticker, except: // * it doesn't drop ticks for slow receivers, rather, it queues up. so that callers are in control to instrument what's going on. // * it automatically ticks every second, which is the right thing in our current design // * it ticks on second marks or very shortly after. this provides a predictable load pattern diff --git a/pkg/services/auth/auth_token.go b/pkg/services/auth/auth_token.go index 527d054f6ee9..af23d773f65c 100644 --- a/pkg/services/auth/auth_token.go +++ b/pkg/services/auth/auth_token.go @@ -4,6 +4,7 @@ import ( "context" "crypto/sha256" "encoding/hex" + "strings" "time" "github.com/grafana/grafana/pkg/infra/serverlock" @@ -305,6 +306,36 @@ func (s *UserAuthTokenService) RevokeAllUserTokens(ctx context.Context, userId i }) } +func (s *UserAuthTokenService) BatchRevokeAllUserTokens(ctx context.Context, userIds []int64) error { + return s.SQLStore.WithTransactionalDbSession(ctx, func(dbSession *sqlstore.DBSession) error { + if len(userIds) == 0 { + return nil + } + + user_id_params := strings.Repeat(",?", len(userIds)-1) + sql := "DELETE from user_auth_token WHERE user_id IN (?" + user_id_params + ")" + + params := []interface{}{sql} + for _, v := range userIds { + params = append(params, v) + } + + res, err := dbSession.Exec(params...) + if err != nil { + return err + } + + affected, err := res.RowsAffected() + if err != nil { + return err + } + + s.log.Debug("all user tokens for given users revoked", "usersCount", len(userIds), "count", affected) + + return err + }) +} + func (s *UserAuthTokenService) GetUserToken(ctx context.Context, userId, userTokenId int64) (*models.UserToken, error) { var result models.UserToken diff --git a/pkg/services/auth/auth_token_test.go b/pkg/services/auth/auth_token_test.go index 802b4602cbfc..bf12d914e970 100644 --- a/pkg/services/auth/auth_token_test.go +++ b/pkg/services/auth/auth_token_test.go @@ -117,6 +117,26 @@ func TestUserAuthToken(t *testing.T) { So(model2, ShouldBeNil) }) }) + + Convey("When revoking users tokens in a batch", func() { + Convey("Can revoke all users tokens", func() { + userIds := []int64{} + for i := 0; i < 3; i++ { + userId := userID + int64(i+1) + userIds = append(userIds, userId) + userAuthTokenService.CreateToken(context.Background(), userId, "192.168.10.11:1234", "some user agent") + } + + err := userAuthTokenService.BatchRevokeAllUserTokens(context.Background(), userIds) + So(err, ShouldBeNil) + + for _, v := range userIds { + tokens, err := userAuthTokenService.GetUserTokens(context.Background(), v) + So(err, ShouldBeNil) + So(len(tokens), ShouldEqual, 0) + } + }) + }) }) Convey("expires correctly", func() { diff --git a/pkg/services/ldap/hooks.go b/pkg/services/ldap/hooks.go deleted file mode 100644 index ece98e5e73b0..000000000000 --- a/pkg/services/ldap/hooks.go +++ /dev/null @@ -1,5 +0,0 @@ -package ldap - -var ( - hookDial func(*Auth) error -) diff --git a/pkg/services/ldap/ldap.go b/pkg/services/ldap/ldap.go index d6b5b69d7525..418673be4463 100644 --- a/pkg/services/ldap/ldap.go +++ b/pkg/services/ldap/ldap.go @@ -8,39 +8,37 @@ import ( "io/ioutil" "strings" - "github.com/davecgh/go-spew/spew" - LDAP "gopkg.in/ldap.v3" + "gopkg.in/ldap.v3" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/log" - models "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/models" ) // IConnection is interface for LDAP connection manipulation type IConnection interface { Bind(username, password string) error UnauthenticatedBind(username string) error - Search(*LDAP.SearchRequest) (*LDAP.SearchResult, error) + Add(*ldap.AddRequest) error + Del(*ldap.DelRequest) error + Search(*ldap.SearchRequest) (*ldap.SearchResult, error) StartTLS(*tls.Config) error Close() } -// IAuth is interface for LDAP authorization -type IAuth interface { - Login(query *models.LoginUserQuery) error - SyncUser(query *models.LoginUserQuery) error - GetGrafanaUserFor( - ctx *models.ReqContext, - user *UserInfo, - ) (*models.User, error) - Users() ([]*UserInfo, error) +// IServer is interface for LDAP authorization +type IServer interface { + Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) + Users([]string) ([]*models.ExternalUserInfo, error) + InitialBind(string, string) error + Dial() error + Close() } -// Auth is basic struct of LDAP authorization -type Auth struct { - server *ServerConfig - conn IConnection +// Server is basic struct of LDAP authorization +type Server struct { + Config *ServerConfig + Connection IConnection requireSecondBind bool log log.Logger } @@ -52,28 +50,24 @@ var ( ) var dial = func(network, addr string) (IConnection, error) { - return LDAP.Dial(network, addr) + return ldap.Dial(network, addr) } // New creates the new LDAP auth -func New(server *ServerConfig) IAuth { - return &Auth{ - server: server, +func New(config *ServerConfig) IServer { + return &Server{ + Config: config, log: log.New("ldap"), } } // Dial dials in the LDAP -func (auth *Auth) Dial() error { - if hookDial != nil { - return hookDial(auth) - } - +func (server *Server) Dial() error { var err error var certPool *x509.CertPool - if auth.server.RootCACert != "" { + if server.Config.RootCACert != "" { certPool = x509.NewCertPool() - for _, caCertFile := range strings.Split(auth.server.RootCACert, " ") { + for _, caCertFile := range strings.Split(server.Config.RootCACert, " ") { pem, err := ioutil.ReadFile(caCertFile) if err != nil { return err @@ -84,35 +78,35 @@ func (auth *Auth) Dial() error { } } var clientCert tls.Certificate - if auth.server.ClientCert != "" && auth.server.ClientKey != "" { - clientCert, err = tls.LoadX509KeyPair(auth.server.ClientCert, auth.server.ClientKey) + if server.Config.ClientCert != "" && server.Config.ClientKey != "" { + clientCert, err = tls.LoadX509KeyPair(server.Config.ClientCert, server.Config.ClientKey) if err != nil { return err } } - for _, host := range strings.Split(auth.server.Host, " ") { - address := fmt.Sprintf("%s:%d", host, auth.server.Port) - if auth.server.UseSSL { + for _, host := range strings.Split(server.Config.Host, " ") { + address := fmt.Sprintf("%s:%d", host, server.Config.Port) + if server.Config.UseSSL { tlsCfg := &tls.Config{ - InsecureSkipVerify: auth.server.SkipVerifySSL, + InsecureSkipVerify: server.Config.SkipVerifySSL, ServerName: host, RootCAs: certPool, } if len(clientCert.Certificate) > 0 { tlsCfg.Certificates = append(tlsCfg.Certificates, clientCert) } - if auth.server.StartTLS { - auth.conn, err = dial("tcp", address) + if server.Config.StartTLS { + server.Connection, err = dial("tcp", address) if err == nil { - if err = auth.conn.StartTLS(tlsCfg); err == nil { + if err = server.Connection.StartTLS(tlsCfg); err == nil { return nil } } } else { - auth.conn, err = LDAP.DialTLS("tcp", address, tlsCfg) + server.Connection, err = ldap.DialTLS("tcp", address, tlsCfg) } } else { - auth.conn, err = dial("tcp", address) + server.Connection, err = dial("tcp", address) } if err == nil { @@ -122,91 +116,180 @@ func (auth *Auth) Dial() error { return err } -// Login logs in the user -func (auth *Auth) Login(query *models.LoginUserQuery) error { - // connect to ldap server - if err := auth.Dial(); err != nil { - return err - } - defer auth.conn.Close() +// Close closes the LDAP connection +func (server *Server) Close() { + server.Connection.Close() +} - // perform initial authentication - if err := auth.initialBind(query.Username, query.Password); err != nil { - return err +// Login user by searching and serializing it +func (server *Server) Login(query *models.LoginUserQuery) ( + *models.ExternalUserInfo, error, +) { + + // Perform initial authentication + err := server.InitialBind(query.Username, query.Password) + if err != nil { + return nil, err } - // find user entry & attributes - user, err := auth.searchForUser(query.Username) + // Find user entry & attributes + users, err := server.Users([]string{query.Username}) if err != nil { - return err + return nil, err + } + + // If we couldn't find the user - + // we should show incorrect credentials err + if len(users) == 0 { + server.disableExternalUser(query.Username) + return nil, ErrInvalidCredentials } - auth.log.Debug("Ldap User found", "info", spew.Sdump(user)) + // Check if a second user bind is needed + user := users[0] - // check if a second user bind is needed - if auth.requireSecondBind { - err = auth.secondBind(user, query.Password) + if err := server.validateGrafanaUser(user); err != nil { + return nil, err + } + + if server.requireSecondBind { + err = server.secondBind(user, query.Password) if err != nil { - return err + return nil, err + } + } + + return user, nil +} + +// Users gets LDAP users +func (server *Server) Users(logins []string) ( + []*models.ExternalUserInfo, + error, +) { + var result *ldap.SearchResult + var err error + var Config = server.Config + + for _, base := range Config.SearchBaseDNs { + result, err = server.Connection.Search( + server.getSearchRequest(base, logins), + ) + if err != nil { + return nil, err + } + + if len(result.Entries) > 0 { + break } } - grafanaUser, err := auth.GetGrafanaUserFor(query.ReqContext, user) + serializedUsers, err := server.serializeUsers(result) if err != nil { - return err + return nil, err + } + + return serializedUsers, nil +} + +// validateGrafanaUser validates user access. +// If there are no ldap group mappings access is true +// otherwise a single group must match +func (server *Server) validateGrafanaUser(user *models.ExternalUserInfo) error { + if len(server.Config.Groups) > 0 && len(user.OrgRoles) < 1 { + server.log.Error( + "user does not belong in any of the specified LDAP groups", + "username", user.Login, + "groups", user.Groups, + ) + return ErrInvalidCredentials } - query.User = grafanaUser return nil } -// SyncUser syncs user with Grafana -func (auth *Auth) SyncUser(query *models.LoginUserQuery) error { - // connect to ldap server - err := auth.Dial() - if err != nil { - return err +// disableExternalUser marks external user as disabled in Grafana db +func (server *Server) disableExternalUser(username string) error { + // Check if external user exist in Grafana + userQuery := &models.GetExternalUserInfoByLoginQuery{ + LoginOrEmail: username, } - defer auth.conn.Close() - err = auth.serverBind() - if err != nil { + if err := bus.Dispatch(userQuery); err != nil { return err } - // find user entry & attributes - user, err := auth.searchForUser(query.Username) - if err != nil { - auth.log.Error("Failed searching for user in ldap", "error", err) - return err + userInfo := userQuery.Result + if !userInfo.IsDisabled { + server.log.Debug("Disabling external user", "user", userQuery.Result.Login) + // Mark user as disabled in grafana db + disableUserCmd := &models.DisableUserCommand{ + UserId: userQuery.Result.UserId, + IsDisabled: true, + } + + if err := bus.Dispatch(disableUserCmd); err != nil { + server.log.Debug("Error disabling external user", "user", userQuery.Result.Login, "message", err.Error()) + return err + } } + return nil +} - auth.log.Debug("Ldap User found", "info", spew.Sdump(user)) +// getSearchRequest returns LDAP search request for users +func (server *Server) getSearchRequest( + base string, + logins []string, +) *ldap.SearchRequest { + attributes := []string{} + + inputs := server.Config.Attr + attributes = appendIfNotEmpty( + attributes, + inputs.Username, + inputs.Surname, + inputs.Email, + inputs.Name, + inputs.MemberOf, + ) + + search := "" + for _, login := range logins { + query := strings.Replace( + server.Config.SearchFilter, + "%s", ldap.EscapeFilter(login), + -1, + ) - grafanaUser, err := auth.GetGrafanaUserFor(query.ReqContext, user) - if err != nil { - return err + search = search + query } - query.User = grafanaUser - return nil + filter := fmt.Sprintf("(|%s)", search) + + return &ldap.SearchRequest{ + BaseDN: base, + Scope: ldap.ScopeWholeSubtree, + DerefAliases: ldap.NeverDerefAliases, + Attributes: attributes, + Filter: filter, + } } -func (auth *Auth) GetGrafanaUserFor( - ctx *models.ReqContext, - user *UserInfo, -) (*models.User, error) { +// buildGrafanaUser extracts info from UserInfo model to ExternalUserInfo +func (server *Server) buildGrafanaUser(user *UserInfo) *models.ExternalUserInfo { extUser := &models.ExternalUserInfo{ - AuthModule: "ldap", + AuthModule: models.AuthModuleLDAP, AuthId: user.DN, - Name: fmt.Sprintf("%s %s", user.FirstName, user.LastName), - Login: user.Username, - Email: user.Email, - Groups: user.MemberOf, - OrgRoles: map[int64]models.RoleType{}, + Name: strings.TrimSpace( + fmt.Sprintf("%s %s", user.FirstName, user.LastName), + ), + Login: user.Username, + Email: user.Email, + Groups: user.MemberOf, + OrgRoles: map[int64]models.RoleType{}, } - for _, group := range auth.server.Groups { + for _, group := range server.Config.Groups { // only use the first match for each org if extUser.OrgRoles[group.OrgId] != "" { continue @@ -220,49 +303,28 @@ func (auth *Auth) GetGrafanaUserFor( } } - // validate that the user has access - // if there are no ldap group mappings access is true - // otherwise a single group must match - if len(auth.server.Groups) > 0 && len(extUser.OrgRoles) < 1 { - auth.log.Info( - "Ldap Auth: user does not belong in any of the specified ldap groups", - "username", user.Username, - "groups", user.MemberOf, - ) - return nil, ErrInvalidCredentials - } - - // add/update user in grafana - upsertUserCmd := &models.UpsertUserCommand{ - ReqContext: ctx, - ExternalUser: extUser, - SignupAllowed: setting.LdapAllowSignup, - } - - err := bus.Dispatch(upsertUserCmd) - if err != nil { - return nil, err - } - - return upsertUserCmd.Result, nil + return extUser } -func (auth *Auth) serverBind() error { +func (server *Server) serverBind() error { bindFn := func() error { - return auth.conn.Bind(auth.server.BindDN, auth.server.BindPassword) + return server.Connection.Bind( + server.Config.BindDN, + server.Config.BindPassword, + ) } - if auth.server.BindPassword == "" { + if server.Config.BindPassword == "" { bindFn = func() error { - return auth.conn.UnauthenticatedBind(auth.server.BindDN) + return server.Connection.UnauthenticatedBind(server.Config.BindDN) } } // bind_dn and bind_password to bind if err := bindFn(); err != nil { - auth.log.Info("LDAP initial bind failed, %v", err) + server.log.Info("LDAP initial bind failed, %v", err) - if ldapErr, ok := err.(*LDAP.Error); ok { + if ldapErr, ok := err.(*ldap.Error); ok { if ldapErr.ResultCode == 49 { return ErrInvalidCredentials } @@ -273,11 +335,15 @@ func (auth *Auth) serverBind() error { return nil } -func (auth *Auth) secondBind(user *UserInfo, userPassword string) error { - if err := auth.conn.Bind(user.DN, userPassword); err != nil { - auth.log.Info("Second bind failed", "error", err) +func (server *Server) secondBind( + user *models.ExternalUserInfo, + userPassword string, +) error { + err := server.Connection.Bind(user.AuthId, userPassword) + if err != nil { + server.log.Info("Second bind failed", "error", err) - if ldapErr, ok := err.(*LDAP.Error); ok { + if ldapErr, ok := err.(*ldap.Error); ok { if ldapErr.ResultCode == 49 { return ErrInvalidCredentials } @@ -288,31 +354,32 @@ func (auth *Auth) secondBind(user *UserInfo, userPassword string) error { return nil } -func (auth *Auth) initialBind(username, userPassword string) error { - if auth.server.BindPassword != "" || auth.server.BindDN == "" { - userPassword = auth.server.BindPassword - auth.requireSecondBind = true +// InitialBind intiates first bind to LDAP server +func (server *Server) InitialBind(username, userPassword string) error { + if server.Config.BindPassword != "" || server.Config.BindDN == "" { + userPassword = server.Config.BindPassword + server.requireSecondBind = true } - bindPath := auth.server.BindDN + bindPath := server.Config.BindDN if strings.Contains(bindPath, "%s") { - bindPath = fmt.Sprintf(auth.server.BindDN, username) + bindPath = fmt.Sprintf(server.Config.BindDN, username) } bindFn := func() error { - return auth.conn.Bind(bindPath, userPassword) + return server.Connection.Bind(bindPath, userPassword) } if userPassword == "" { bindFn = func() error { - return auth.conn.UnauthenticatedBind(bindPath) + return server.Connection.UnauthenticatedBind(bindPath) } } if err := bindFn(); err != nil { - auth.log.Info("Initial bind failed", "error", err) + server.log.Info("Initial bind failed", "error", err) - if ldapErr, ok := err.(*LDAP.Error); ok { + if ldapErr, ok := err.(*ldap.Error); ok { if ldapErr.ResultCode == 49 { return ErrInvalidCredentials } @@ -323,199 +390,124 @@ func (auth *Auth) initialBind(username, userPassword string) error { return nil } -func (auth *Auth) searchForUser(username string) (*UserInfo, error) { - var searchResult *LDAP.SearchResult - var err error - - for _, searchBase := range auth.server.SearchBaseDNs { - attributes := make([]string, 0) - inputs := auth.server.Attr - attributes = appendIfNotEmpty(attributes, - inputs.Username, - inputs.Surname, - inputs.Email, - inputs.Name, - inputs.MemberOf) - - searchReq := LDAP.SearchRequest{ - BaseDN: searchBase, - Scope: LDAP.ScopeWholeSubtree, - DerefAliases: LDAP.NeverDerefAliases, - Attributes: attributes, - Filter: strings.Replace( - auth.server.SearchFilter, - "%s", LDAP.EscapeFilter(username), - -1, - ), - } - - auth.log.Debug("Ldap Search For User Request", "info", spew.Sdump(searchReq)) - - searchResult, err = auth.conn.Search(&searchReq) - if err != nil { - return nil, err - } - - if len(searchResult.Entries) > 0 { - break - } - } - - if len(searchResult.Entries) == 0 { - return nil, ErrInvalidCredentials - } - - if len(searchResult.Entries) > 1 { - return nil, errors.New("Ldap search matched more than one entry, please review your filter setting") - } - +// requestMemberOf use this function when POSIX LDAP schema does not support memberOf, so it manually search the groups +func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string, error) { var memberOf []string - if auth.server.GroupSearchFilter == "" { - memberOf = getLdapAttrArray(auth.server.Attr.MemberOf, searchResult) - } else { - // If we are using a POSIX LDAP schema it won't support memberOf, so we manually search the groups - var groupSearchResult *LDAP.SearchResult - for _, groupSearchBase := range auth.server.GroupSearchBaseDNs { - var filter_replace string - if auth.server.GroupSearchFilterUserAttribute == "" { - filter_replace = getLdapAttr(auth.server.Attr.Username, searchResult) - } else { - filter_replace = getLdapAttr(auth.server.GroupSearchFilterUserAttribute, searchResult) - } - filter := strings.Replace( - auth.server.GroupSearchFilter, "%s", - LDAP.EscapeFilter(filter_replace), - -1, - ) - - auth.log.Info("Searching for user's groups", "filter", filter) - - // support old way of reading settings - groupIdAttribute := auth.server.Attr.MemberOf - // but prefer dn attribute if default settings are used - if groupIdAttribute == "" || groupIdAttribute == "memberOf" { - groupIdAttribute = "dn" - } - - groupSearchReq := LDAP.SearchRequest{ - BaseDN: groupSearchBase, - Scope: LDAP.ScopeWholeSubtree, - DerefAliases: LDAP.NeverDerefAliases, - Attributes: []string{groupIdAttribute}, - Filter: filter, - } - - groupSearchResult, err = auth.conn.Search(&groupSearchReq) - if err != nil { - return nil, err - } - - if len(groupSearchResult.Entries) > 0 { - for i := range groupSearchResult.Entries { - memberOf = append(memberOf, getLdapAttrN(groupIdAttribute, groupSearchResult, i)) - } - break - } + for _, groupSearchBase := range server.Config.GroupSearchBaseDNs { + var filterReplace string + if server.Config.GroupSearchFilterUserAttribute == "" { + filterReplace = getLDAPAttr(server.Config.Attr.Username, searchResult) + } else { + filterReplace = getLDAPAttr(server.Config.GroupSearchFilterUserAttribute, searchResult) } - } - - return &UserInfo{ - DN: searchResult.Entries[0].DN, - LastName: getLdapAttr(auth.server.Attr.Surname, searchResult), - FirstName: getLdapAttr(auth.server.Attr.Name, searchResult), - Username: getLdapAttr(auth.server.Attr.Username, searchResult), - Email: getLdapAttr(auth.server.Attr.Email, searchResult), - MemberOf: memberOf, - }, nil -} - -func (ldap *Auth) Users() ([]*UserInfo, error) { - var result *LDAP.SearchResult - var err error - server := ldap.server - if err := ldap.Dial(); err != nil { - return nil, err - } - defer ldap.conn.Close() - - for _, base := range server.SearchBaseDNs { - attributes := make([]string, 0) - inputs := server.Attr - attributes = appendIfNotEmpty( - attributes, - inputs.Username, - inputs.Surname, - inputs.Email, - inputs.Name, - inputs.MemberOf, + filter := strings.Replace( + server.Config.GroupSearchFilter, "%s", + ldap.EscapeFilter(filterReplace), + -1, ) - req := LDAP.SearchRequest{ - BaseDN: base, - Scope: LDAP.ScopeWholeSubtree, - DerefAliases: LDAP.NeverDerefAliases, - Attributes: attributes, + server.log.Info("Searching for user's groups", "filter", filter) + + // support old way of reading settings + groupIDAttribute := server.Config.Attr.MemberOf + // but prefer dn attribute if default settings are used + if groupIDAttribute == "" || groupIDAttribute == "memberOf" { + groupIDAttribute = "dn" + } - // Doing a star here to get all the users in one go - Filter: strings.Replace(server.SearchFilter, "%s", "*", -1), + groupSearchReq := ldap.SearchRequest{ + BaseDN: groupSearchBase, + Scope: ldap.ScopeWholeSubtree, + DerefAliases: ldap.NeverDerefAliases, + Attributes: []string{groupIDAttribute}, + Filter: filter, } - result, err = ldap.conn.Search(&req) + groupSearchResult, err := server.Connection.Search(&groupSearchReq) if err != nil { return nil, err } - if len(result.Entries) > 0 { + if len(groupSearchResult.Entries) > 0 { + for i := range groupSearchResult.Entries { + memberOf = append(memberOf, getLDAPAttrN(groupIDAttribute, groupSearchResult, i)) + } break } } - return ldap.serializeUsers(result), nil + return memberOf, nil } -func (ldap *Auth) serializeUsers(users *LDAP.SearchResult) []*UserInfo { - var serialized []*UserInfo +// serializeUsers serializes the users +// from LDAP result to ExternalInfo struct +func (server *Server) serializeUsers( + users *ldap.SearchResult, +) ([]*models.ExternalUserInfo, error) { + var serialized []*models.ExternalUserInfo for index := range users.Entries { - serialize := &UserInfo{ - DN: getLdapAttrN( + memberOf, err := server.getMemberOf(users) + if err != nil { + return nil, err + } + + userInfo := &UserInfo{ + DN: getLDAPAttrN( "dn", users, index, ), - LastName: getLdapAttrN( - ldap.server.Attr.Surname, - users, - index, - ), - FirstName: getLdapAttrN( - ldap.server.Attr.Name, + LastName: getLDAPAttrN( + server.Config.Attr.Surname, users, index, ), - Username: getLdapAttrN( - ldap.server.Attr.Username, + FirstName: getLDAPAttrN( + server.Config.Attr.Name, users, index, ), - Email: getLdapAttrN( - ldap.server.Attr.Email, + Username: getLDAPAttrN( + server.Config.Attr.Username, users, index, ), - MemberOf: getLdapAttrArrayN( - ldap.server.Attr.MemberOf, + Email: getLDAPAttrN( + server.Config.Attr.Email, users, index, ), + MemberOf: memberOf, } - serialized = append(serialized, serialize) + serialized = append( + serialized, + server.buildGrafanaUser(userInfo), + ) + } + + return serialized, nil +} + +// getMemberOf finds memberOf property or request it +func (server *Server) getMemberOf(search *ldap.SearchResult) ( + []string, error, +) { + if server.Config.GroupSearchFilter == "" { + memberOf := getLDAPAttrArray(server.Config.Attr.MemberOf, search) + + return memberOf, nil + } + + memberOf, err := server.requestMemberOf(search) + if err != nil { + return nil, err } - return serialized + return memberOf, nil } func appendIfNotEmpty(slice []string, values ...string) []string { @@ -527,11 +519,11 @@ func appendIfNotEmpty(slice []string, values ...string) []string { return slice } -func getLdapAttr(name string, result *LDAP.SearchResult) string { - return getLdapAttrN(name, result, 0) +func getLDAPAttr(name string, result *ldap.SearchResult) string { + return getLDAPAttrN(name, result, 0) } -func getLdapAttrN(name string, result *LDAP.SearchResult, n int) string { +func getLDAPAttrN(name string, result *ldap.SearchResult, n int) string { if strings.ToLower(name) == "dn" { return result.Entries[n].DN } @@ -545,11 +537,11 @@ func getLdapAttrN(name string, result *LDAP.SearchResult, n int) string { return "" } -func getLdapAttrArray(name string, result *LDAP.SearchResult) []string { - return getLdapAttrArrayN(name, result, 0) +func getLDAPAttrArray(name string, result *ldap.SearchResult) []string { + return getLDAPAttrArrayN(name, result, 0) } -func getLdapAttrArrayN(name string, result *LDAP.SearchResult, n int) []string { +func getLDAPAttrArrayN(name string, result *ldap.SearchResult, n int) []string { for _, attr := range result.Entries[n].Attributes { if attr.Name == name { return attr.Values diff --git a/pkg/services/ldap/ldap_helpers_test.go b/pkg/services/ldap/ldap_helpers_test.go new file mode 100644 index 000000000000..48e6bce8b5ba --- /dev/null +++ b/pkg/services/ldap/ldap_helpers_test.go @@ -0,0 +1,140 @@ +package ldap + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" + "gopkg.in/ldap.v3" + + "github.com/grafana/grafana/pkg/infra/log" +) + +func TestLDAPHelpers(t *testing.T) { + Convey("serializeUsers()", t, func() { + Convey("simple case", func() { + server := &Server{ + Config: &ServerConfig{ + Attr: AttributeMap{ + Username: "username", + Name: "name", + MemberOf: "memberof", + Email: "email", + }, + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: &MockConnection{}, + log: log.New("test-logger"), + } + + entry := ldap.Entry{ + DN: "dn", Attributes: []*ldap.EntryAttribute{ + {Name: "username", Values: []string{"roelgerrits"}}, + {Name: "surname", Values: []string{"Gerrits"}}, + {Name: "email", Values: []string{"roel@test.com"}}, + {Name: "name", Values: []string{"Roel"}}, + {Name: "memberof", Values: []string{"admins"}}, + }} + users := &ldap.SearchResult{Entries: []*ldap.Entry{&entry}} + + result, err := server.serializeUsers(users) + + So(err, ShouldBeNil) + So(result[0].Login, ShouldEqual, "roelgerrits") + So(result[0].Email, ShouldEqual, "roel@test.com") + So(result[0].Groups, ShouldContain, "admins") + }) + + Convey("without lastname", func() { + server := &Server{ + Config: &ServerConfig{ + Attr: AttributeMap{ + Username: "username", + Name: "name", + MemberOf: "memberof", + Email: "email", + }, + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: &MockConnection{}, + log: log.New("test-logger"), + } + + entry := ldap.Entry{ + DN: "dn", Attributes: []*ldap.EntryAttribute{ + {Name: "username", Values: []string{"roelgerrits"}}, + {Name: "email", Values: []string{"roel@test.com"}}, + {Name: "name", Values: []string{"Roel"}}, + {Name: "memberof", Values: []string{"admins"}}, + }} + users := &ldap.SearchResult{Entries: []*ldap.Entry{&entry}} + + result, err := server.serializeUsers(users) + + So(err, ShouldBeNil) + So(result[0].Name, ShouldEqual, "Roel") + }) + }) + + Convey("serverBind()", t, func() { + Convey("Given bind dn and password configured", func() { + connection := &MockConnection{} + var actualUsername, actualPassword string + connection.bindProvider = func(username, password string) error { + actualUsername = username + actualPassword = password + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "o=users,dc=grafana,dc=org", + BindPassword: "bindpwd", + }, + } + err := server.serverBind() + So(err, ShouldBeNil) + So(actualUsername, ShouldEqual, "o=users,dc=grafana,dc=org") + So(actualPassword, ShouldEqual, "bindpwd") + }) + + Convey("Given bind dn configured", func() { + connection := &MockConnection{} + unauthenticatedBindWasCalled := false + var actualUsername string + connection.unauthenticatedBindProvider = func(username string) error { + unauthenticatedBindWasCalled = true + actualUsername = username + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "o=users,dc=grafana,dc=org", + }, + } + err := server.serverBind() + So(err, ShouldBeNil) + So(unauthenticatedBindWasCalled, ShouldBeTrue) + So(actualUsername, ShouldEqual, "o=users,dc=grafana,dc=org") + }) + + Convey("Given empty bind dn and password", func() { + connection := &MockConnection{} + unauthenticatedBindWasCalled := false + var actualUsername string + connection.unauthenticatedBindProvider = func(username string) error { + unauthenticatedBindWasCalled = true + actualUsername = username + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{}, + } + err := server.serverBind() + So(err, ShouldBeNil) + So(unauthenticatedBindWasCalled, ShouldBeTrue) + So(actualUsername, ShouldBeEmpty) + }) + }) +} diff --git a/pkg/services/ldap/ldap_login_test.go b/pkg/services/ldap/ldap_login_test.go index b8dd502667ea..573a9a560e84 100644 --- a/pkg/services/ldap/ldap_login_test.go +++ b/pkg/services/ldap/ldap_login_test.go @@ -7,23 +7,94 @@ import ( "gopkg.in/ldap.v3" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/user" ) -func TestLdapLogin(t *testing.T) { - Convey("Login using ldap", t, func() { - AuthScenario("When login with invalid credentials", func(scenario *scenarioContext) { - conn := &mockLdapConn{} +func TestLDAPLogin(t *testing.T) { + Convey("Login()", t, func() { + serverScenario("When user is log in and updated", func(sc *scenarioContext) { + // arrange + mockConnection := &MockConnection{} + + server := &Server{ + Config: &ServerConfig{ + Host: "", + RootCACert: "", + Groups: []*GroupToOrgRole{ + {GroupDN: "*", OrgRole: "Admin"}, + }, + Attr: AttributeMap{ + Username: "username", + Surname: "surname", + Email: "email", + Name: "name", + MemberOf: "memberof", + }, + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: mockConnection, + log: log.New("test-logger"), + } + + entry := ldap.Entry{ + DN: "dn", Attributes: []*ldap.EntryAttribute{ + {Name: "username", Values: []string{"roelgerrits"}}, + {Name: "surname", Values: []string{"Gerrits"}}, + {Name: "email", Values: []string{"roel@test.com"}}, + {Name: "name", Values: []string{"Roel"}}, + {Name: "memberof", Values: []string{"admins"}}, + }} + result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} + mockConnection.setSearchResult(&result) + + query := &models.LoginUserQuery{ + Username: "roelgerrits", + } + + sc.userQueryReturns(&models.User{ + Id: 1, + Email: "roel@test.net", + Name: "Roel Gerrits", + Login: "roelgerrits", + }) + sc.userOrgsQueryReturns([]*models.UserOrgDTO{}) + + // act + extUser, _ := server.Login(query) + userInfo, err := user.Upsert(&user.UpsertArgs{ + SignupAllowed: true, + ExternalUser: extUser, + }) + + // assert + + // Check absence of the error + So(err, ShouldBeNil) + + // User should be searched in ldap + So(mockConnection.SearchCalled, ShouldBeTrue) + + // Info should be updated (email differs) + So(userInfo.Email, ShouldEqual, "roel@test.com") + + // User should have admin privileges + So(sc.addOrgUserCmd.Role, ShouldEqual, "Admin") + }) + + serverScenario("When login with invalid credentials", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} - conn.setSearchResult(&result) + connection.setSearchResult(&result) - conn.bindProvider = func(username, password string) error { + connection.bindProvider = func(username, password string) error { return &ldap.Error{ ResultCode: 49, } } - auth := &Auth{ - server: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -31,19 +102,19 @@ func TestLdapLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - conn: conn, - log: log.New("test-logger"), + Connection: connection, + log: log.New("test-logger"), } - err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should return invalid credentials error", func() { So(err, ShouldEqual, ErrInvalidCredentials) }) }) - AuthScenario("When login with valid credentials", func(scenario *scenarioContext) { - conn := &mockLdapConn{} + serverScenario("When login with valid credentials", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{ DN: "dn", Attributes: []*ldap.EntryAttribute{ {Name: "username", Values: []string{"markelog"}}, @@ -54,13 +125,13 @@ func TestLdapLogin(t *testing.T) { }, } result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} - conn.setSearchResult(&result) + connection.setSearchResult(&result) - conn.bindProvider = func(username, password string) error { + connection.bindProvider = func(username, password string) error { return nil } - auth := &Auth{ - server: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -68,18 +139,110 @@ func TestLdapLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - conn: conn, - log: log.New("test-logger"), + Connection: connection, + log: log.New("test-logger"), } - err := auth.Login(scenario.loginUserQuery) + resp, err := server.Login(scenario.loginUserQuery) - Convey("it should not return error", func() { - So(err, ShouldBeNil) + So(err, ShouldBeNil) + So(resp.Login, ShouldEqual, "markelog") + }) + + serverScenario("When user not found in LDAP, but exist in Grafana", func(scenario *scenarioContext) { + connection := &MockConnection{} + result := ldap.SearchResult{Entries: []*ldap.Entry{}} + connection.setSearchResult(&result) + + externalUser := &models.ExternalUserInfo{UserId: 42, IsDisabled: false} + scenario.getExternalUserInfoByLoginQueryReturns(externalUser) + + connection.bindProvider = func(username, password string) error { + return nil + } + server := &Server{ + Config: &ServerConfig{ + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: connection, + log: log.New("test-logger"), + } + + _, err := server.Login(scenario.loginUserQuery) + + Convey("it should disable user", func() { + So(scenario.disableExternalUserCalled, ShouldBeTrue) + So(scenario.disableUserCmd.IsDisabled, ShouldBeTrue) + So(scenario.disableUserCmd.UserId, ShouldEqual, 42) + }) + + Convey("it should return invalid credentials error", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + }) + }) + + serverScenario("When user not found in LDAP, and disabled in Grafana already", func(scenario *scenarioContext) { + connection := &MockConnection{} + result := ldap.SearchResult{Entries: []*ldap.Entry{}} + connection.setSearchResult(&result) + + externalUser := &models.ExternalUserInfo{UserId: 42, IsDisabled: true} + scenario.getExternalUserInfoByLoginQueryReturns(externalUser) + + connection.bindProvider = func(username, password string) error { + return nil + } + server := &Server{ + Config: &ServerConfig{ + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: connection, + log: log.New("test-logger"), + } + + _, err := server.Login(scenario.loginUserQuery) + + Convey("it should't call disable function", func() { + So(scenario.disableExternalUserCalled, ShouldBeFalse) + }) + + Convey("it should return invalid credentials error", func() { + So(err, ShouldEqual, ErrInvalidCredentials) }) + }) + + serverScenario("When user found in LDAP, and disabled in Grafana", func(scenario *scenarioContext) { + connection := &MockConnection{} + entry := ldap.Entry{} + result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} + connection.setSearchResult(&result) + scenario.userQueryReturns(&models.User{Id: 42, IsDisabled: true}) - Convey("it should get user", func() { - So(scenario.loginUserQuery.User.Login, ShouldEqual, "markelog") + connection.bindProvider = func(username, password string) error { + return nil + } + server := &Server{ + Config: &ServerConfig{ + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: connection, + log: log.New("test-logger"), + } + + extUser, _ := server.Login(scenario.loginUserQuery) + _, err := user.Upsert(&user.UpsertArgs{ + SignupAllowed: true, + ExternalUser: extUser, + }) + + Convey("it should re-enable user", func() { + So(scenario.disableExternalUserCalled, ShouldBeTrue) + So(scenario.disableUserCmd.IsDisabled, ShouldBeFalse) + So(scenario.disableUserCmd.UserId, ShouldEqual, 42) + }) + + Convey("it should not return error", func() { + So(err, ShouldBeNil) }) }) }) diff --git a/pkg/services/ldap/ldap_test.go b/pkg/services/ldap/ldap_test.go index 4da041ae1642..98b15ec44576 100644 --- a/pkg/services/ldap/ldap_test.go +++ b/pkg/services/ldap/ldap_test.go @@ -1,496 +1,118 @@ package ldap import ( - "context" "testing" . "github.com/smartystreets/goconvey/convey" - "gopkg.in/ldap.v3" + ldap "gopkg.in/ldap.v3" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/log" - m "github.com/grafana/grafana/pkg/models" ) -func TestAuth(t *testing.T) { - Convey("initialBind", t, func() { - Convey("Given bind dn and password configured", func() { - conn := &mockLdapConn{} - var actualUsername, actualPassword string - conn.bindProvider = func(username, password string) error { - actualUsername = username - actualPassword = password - return nil +func TestPublicAPI(t *testing.T) { + Convey("Users()", t, func() { + Convey("find one user", func() { + MockConnection := &MockConnection{} + entry := ldap.Entry{ + DN: "dn", Attributes: []*ldap.EntryAttribute{ + {Name: "username", Values: []string{"roelgerrits"}}, + {Name: "surname", Values: []string{"Gerrits"}}, + {Name: "email", Values: []string{"roel@test.com"}}, + {Name: "name", Values: []string{"Roel"}}, + {Name: "memberof", Values: []string{"admins"}}, + }} + result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} + MockConnection.setSearchResult(&result) + + // Set up attribute map without surname and email + server := &Server{ + Config: &ServerConfig{ + Attr: AttributeMap{ + Username: "username", + Name: "name", + MemberOf: "memberof", + }, + SearchBaseDNs: []string{"BaseDNHere"}, + }, + Connection: MockConnection, + log: log.New("test-logger"), } - Auth := &Auth{ - conn: conn, - server: &ServerConfig{ - BindDN: "cn=%s,o=users,dc=grafana,dc=org", - BindPassword: "bindpwd", - }, - } - err := Auth.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(Auth.requireSecondBind, ShouldBeTrue) - So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") - So(actualPassword, ShouldEqual, "bindpwd") - }) - Convey("Given bind dn configured", func() { - conn := &mockLdapConn{} - var actualUsername, actualPassword string - conn.bindProvider = func(username, password string) error { - actualUsername = username - actualPassword = password - return nil - } - Auth := &Auth{ - conn: conn, - server: &ServerConfig{ - BindDN: "cn=%s,o=users,dc=grafana,dc=org", - }, - } - err := Auth.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(Auth.requireSecondBind, ShouldBeFalse) - So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") - So(actualPassword, ShouldEqual, "pwd") - }) + searchResult, err := server.Users([]string{"roelgerrits"}) - Convey("Given empty bind dn and password", func() { - conn := &mockLdapConn{} - unauthenticatedBindWasCalled := false - var actualUsername string - conn.unauthenticatedBindProvider = func(username string) error { - unauthenticatedBindWasCalled = true - actualUsername = username - return nil - } - Auth := &Auth{ - conn: conn, - server: &ServerConfig{}, - } - err := Auth.initialBind("user", "pwd") So(err, ShouldBeNil) - So(Auth.requireSecondBind, ShouldBeTrue) - So(unauthenticatedBindWasCalled, ShouldBeTrue) - So(actualUsername, ShouldBeEmpty) + So(searchResult, ShouldNotBeNil) + + // User should be searched in ldap + So(MockConnection.SearchCalled, ShouldBeTrue) + + // No empty attributes should be added to the search request + So(len(MockConnection.SearchAttributes), ShouldEqual, 3) }) }) - Convey("serverBind", t, func() { + Convey("InitialBind", t, func() { Convey("Given bind dn and password configured", func() { - conn := &mockLdapConn{} + connection := &MockConnection{} var actualUsername, actualPassword string - conn.bindProvider = func(username, password string) error { + connection.bindProvider = func(username, password string) error { actualUsername = username actualPassword = password return nil } - Auth := &Auth{ - conn: conn, - server: &ServerConfig{ - BindDN: "o=users,dc=grafana,dc=org", + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "cn=%s,o=users,dc=grafana,dc=org", BindPassword: "bindpwd", }, } - err := Auth.serverBind() + err := server.InitialBind("user", "pwd") So(err, ShouldBeNil) - So(actualUsername, ShouldEqual, "o=users,dc=grafana,dc=org") + So(server.requireSecondBind, ShouldBeTrue) + So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") So(actualPassword, ShouldEqual, "bindpwd") }) Convey("Given bind dn configured", func() { - conn := &mockLdapConn{} - unauthenticatedBindWasCalled := false - var actualUsername string - conn.unauthenticatedBindProvider = func(username string) error { - unauthenticatedBindWasCalled = true + connection := &MockConnection{} + var actualUsername, actualPassword string + connection.bindProvider = func(username, password string) error { actualUsername = username + actualPassword = password return nil } - Auth := &Auth{ - conn: conn, - server: &ServerConfig{ - BindDN: "o=users,dc=grafana,dc=org", + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "cn=%s,o=users,dc=grafana,dc=org", }, } - err := Auth.serverBind() + err := server.InitialBind("user", "pwd") So(err, ShouldBeNil) - So(unauthenticatedBindWasCalled, ShouldBeTrue) - So(actualUsername, ShouldEqual, "o=users,dc=grafana,dc=org") + So(server.requireSecondBind, ShouldBeFalse) + So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") + So(actualPassword, ShouldEqual, "pwd") }) Convey("Given empty bind dn and password", func() { - conn := &mockLdapConn{} + connection := &MockConnection{} unauthenticatedBindWasCalled := false var actualUsername string - conn.unauthenticatedBindProvider = func(username string) error { + connection.unauthenticatedBindProvider = func(username string) error { unauthenticatedBindWasCalled = true actualUsername = username return nil } - Auth := &Auth{ - conn: conn, - server: &ServerConfig{}, + server := &Server{ + Connection: connection, + Config: &ServerConfig{}, } - err := Auth.serverBind() + err := server.InitialBind("user", "pwd") So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeTrue) So(unauthenticatedBindWasCalled, ShouldBeTrue) So(actualUsername, ShouldBeEmpty) }) }) - - Convey("When translating ldap user to grafana user", t, func() { - - var user1 = &m.User{} - - bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpsertUserCommand) error { - cmd.Result = user1 - cmd.Result.Login = "torkelo" - return nil - }) - - Convey("Given no ldap group map match", func() { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{{}}, - }) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{}) - - So(err, ShouldEqual, ErrInvalidCredentials) - }) - - AuthScenario("Given wildcard group match", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "*", OrgRole: "Admin"}, - }, - }) - - sc.userQueryReturns(user1) - - result, err := Auth.GetGrafanaUserFor(nil, &UserInfo{}) - So(err, ShouldBeNil) - So(result, ShouldEqual, user1) - }) - - AuthScenario("Given exact group match", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=users", OrgRole: "Admin"}, - }, - }) - - sc.userQueryReturns(user1) - - result, err := Auth.GetGrafanaUserFor(nil, &UserInfo{MemberOf: []string{"cn=users"}}) - So(err, ShouldBeNil) - So(result, ShouldEqual, user1) - }) - - AuthScenario("Given group match with different case", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=users", OrgRole: "Admin"}, - }, - }) - - sc.userQueryReturns(user1) - - result, err := Auth.GetGrafanaUserFor(nil, &UserInfo{MemberOf: []string{"CN=users"}}) - So(err, ShouldBeNil) - So(result, ShouldEqual, user1) - }) - - AuthScenario("Given no existing grafana user", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=admin", OrgRole: "Admin"}, - {GroupDN: "cn=editor", OrgRole: "Editor"}, - {GroupDN: "*", OrgRole: "Viewer"}, - }, - }) - - sc.userQueryReturns(nil) - - result, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - DN: "torkelo", - Username: "torkelo", - Email: "my@email.com", - MemberOf: []string{"cn=editor"}, - }) - - So(err, ShouldBeNil) - - Convey("Should return new user", func() { - So(result.Login, ShouldEqual, "torkelo") - }) - - Convey("Should set isGrafanaAdmin to false by default", func() { - So(result.IsAdmin, ShouldBeFalse) - }) - - }) - - }) - - Convey("When syncing ldap groups to grafana org roles", t, func() { - AuthScenario("given no current user orgs", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=users", OrgRole: "Admin"}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=users"}, - }) - - Convey("Should create new org user", func() { - So(err, ShouldBeNil) - So(sc.addOrgUserCmd, ShouldNotBeNil) - So(sc.addOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) - }) - }) - - AuthScenario("given different current org role", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=users", OrgId: 1, OrgRole: "Admin"}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_EDITOR}}) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=users"}, - }) - - Convey("Should update org role", func() { - So(err, ShouldBeNil) - So(sc.updateOrgUserCmd, ShouldNotBeNil) - So(sc.updateOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) - So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) - }) - }) - - AuthScenario("given current org role is removed in ldap", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=users", OrgId: 2, OrgRole: "Admin"}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{ - {OrgId: 1, Role: m.ROLE_EDITOR}, - {OrgId: 2, Role: m.ROLE_EDITOR}, - }) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=users"}, - }) - - Convey("Should remove org role", func() { - So(err, ShouldBeNil) - So(sc.removeOrgUserCmd, ShouldNotBeNil) - So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 2) - }) - }) - - AuthScenario("given org role is updated in config", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=admin", OrgId: 1, OrgRole: "Admin"}, - {GroupDN: "cn=users", OrgId: 1, OrgRole: "Viewer"}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_EDITOR}}) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=users"}, - }) - - Convey("Should update org role", func() { - So(err, ShouldBeNil) - So(sc.removeOrgUserCmd, ShouldBeNil) - So(sc.updateOrgUserCmd, ShouldNotBeNil) - So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) - }) - }) - - AuthScenario("given multiple matching ldap groups", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=admins", OrgId: 1, OrgRole: "Admin"}, - {GroupDN: "*", OrgId: 1, OrgRole: "Viewer"}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_ADMIN}}) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=admins"}, - }) - - Convey("Should take first match, and ignore subsequent matches", func() { - So(err, ShouldBeNil) - So(sc.updateOrgUserCmd, ShouldBeNil) - So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) - }) - }) - - AuthScenario("given multiple matching ldap groups and no existing groups", func(sc *scenarioContext) { - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=admins", OrgId: 1, OrgRole: "Admin"}, - {GroupDN: "*", OrgId: 1, OrgRole: "Viewer"}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=admins"}, - }) - - Convey("Should take first match, and ignore subsequent matches", func() { - So(err, ShouldBeNil) - So(sc.addOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) - So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) - }) - - Convey("Should not update permissions unless specified", func() { - So(err, ShouldBeNil) - So(sc.updateUserPermissionsCmd, ShouldBeNil) - }) - }) - - AuthScenario("given ldap groups with grafana_admin=true", func(sc *scenarioContext) { - trueVal := true - - Auth := New(&ServerConfig{ - Groups: []*GroupToOrgRole{ - {GroupDN: "cn=admins", OrgId: 1, OrgRole: "Admin", IsGrafanaAdmin: &trueVal}, - }, - }) - - sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) - _, err := Auth.GetGrafanaUserFor(nil, &UserInfo{ - MemberOf: []string{"cn=admins"}, - }) - - Convey("Should create user with admin set to true", func() { - So(err, ShouldBeNil) - So(sc.updateUserPermissionsCmd.IsGrafanaAdmin, ShouldBeTrue) - }) - }) - }) - - Convey("When calling SyncUser", t, func() { - mockLdapConnection := &mockLdapConn{} - - auth := &Auth{ - server: &ServerConfig{ - Host: "", - RootCACert: "", - Groups: []*GroupToOrgRole{ - {GroupDN: "*", OrgRole: "Admin"}, - }, - Attr: AttributeMap{ - Username: "username", - Surname: "surname", - Email: "email", - Name: "name", - MemberOf: "memberof", - }, - SearchBaseDNs: []string{"BaseDNHere"}, - }, - conn: mockLdapConnection, - log: log.New("test-logger"), - } - - dialCalled := false - dial = func(network, addr string) (IConnection, error) { - dialCalled = true - return mockLdapConnection, nil - } - - entry := ldap.Entry{ - DN: "dn", Attributes: []*ldap.EntryAttribute{ - {Name: "username", Values: []string{"roelgerrits"}}, - {Name: "surname", Values: []string{"Gerrits"}}, - {Name: "email", Values: []string{"roel@test.com"}}, - {Name: "name", Values: []string{"Roel"}}, - {Name: "memberof", Values: []string{"admins"}}, - }} - result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} - mockLdapConnection.setSearchResult(&result) - - AuthScenario("When ldapUser found call syncInfo and orgRoles", func(sc *scenarioContext) { - // arrange - query := &m.LoginUserQuery{ - Username: "roelgerrits", - } - - hookDial = nil - - sc.userQueryReturns(&m.User{ - Id: 1, - Email: "roel@test.net", - Name: "Roel Gerrits", - Login: "roelgerrits", - }) - sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) - - // act - syncErrResult := auth.SyncUser(query) - - // assert - So(dialCalled, ShouldBeTrue) - So(syncErrResult, ShouldBeNil) - // User should be searched in ldap - So(mockLdapConnection.searchCalled, ShouldBeTrue) - // Info should be updated (email differs) - So(sc.updateUserCmd.Email, ShouldEqual, "roel@test.com") - // User should have admin privileges - So(sc.addOrgUserCmd.UserId, ShouldEqual, 1) - So(sc.addOrgUserCmd.Role, ShouldEqual, "Admin") - }) - }) - - Convey("When searching for a user and not all five attributes are mapped", t, func() { - mockLdapConnection := &mockLdapConn{} - entry := ldap.Entry{ - DN: "dn", Attributes: []*ldap.EntryAttribute{ - {Name: "username", Values: []string{"roelgerrits"}}, - {Name: "surname", Values: []string{"Gerrits"}}, - {Name: "email", Values: []string{"roel@test.com"}}, - {Name: "name", Values: []string{"Roel"}}, - {Name: "memberof", Values: []string{"admins"}}, - }} - result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} - mockLdapConnection.setSearchResult(&result) - - // Set up attribute map without surname and email - Auth := &Auth{ - server: &ServerConfig{ - Attr: AttributeMap{ - Username: "username", - Name: "name", - MemberOf: "memberof", - }, - SearchBaseDNs: []string{"BaseDNHere"}, - }, - conn: mockLdapConnection, - log: log.New("test-logger"), - } - - searchResult, err := Auth.searchForUser("roelgerrits") - - So(err, ShouldBeNil) - So(searchResult, ShouldNotBeNil) - - // User should be searched in ldap - So(mockLdapConnection.searchCalled, ShouldBeTrue) - - // No empty attributes should be added to the search request - So(len(mockLdapConnection.searchAttributes), ShouldEqual, 3) - }) } diff --git a/pkg/services/ldap/settings.go b/pkg/services/ldap/settings.go index 0a0f66d9d734..df63f10a6fa6 100644 --- a/pkg/services/ldap/settings.go +++ b/pkg/services/ldap/settings.go @@ -13,10 +13,12 @@ import ( "github.com/grafana/grafana/pkg/util/errutil" ) +// Config holds list of connections to LDAP type Config struct { Servers []*ServerConfig `toml:"servers"` } +// ServerConfig holds connection data to LDAP type ServerConfig struct { Host string `toml:"host"` Port int `toml:"port"` @@ -63,7 +65,7 @@ var loadingMutex = &sync.Mutex{} // IsEnabled checks if ldap is enabled func IsEnabled() bool { - return setting.LdapEnabled + return setting.LDAPEnabled } // ReloadConfig reads the config from the disc and caches it. @@ -76,7 +78,7 @@ func ReloadConfig() error { defer loadingMutex.Unlock() var err error - config, err = readConfig(setting.LdapConfigFile) + config, err = readConfig(setting.LDAPConfigFile) return err } @@ -96,7 +98,7 @@ func GetConfig() (*Config, error) { defer loadingMutex.Unlock() var err error - config, err = readConfig(setting.LdapConfigFile) + config, err = readConfig(setting.LDAPConfigFile) return config, err } @@ -104,15 +106,15 @@ func GetConfig() (*Config, error) { func readConfig(configFile string) (*Config, error) { result := &Config{} - logger.Info("Ldap enabled, reading config file", "file", configFile) + logger.Info("LDAP enabled, reading config file", "file", configFile) _, err := toml.DecodeFile(configFile, result) if err != nil { - return nil, errutil.Wrap("Failed to load ldap config file", err) + return nil, errutil.Wrap("Failed to load LDAP config file", err) } if len(result.Servers) == 0 { - return nil, xerrors.New("ldap enabled but no ldap servers defined in config file") + return nil, xerrors.New("LDAP enabled but no LDAP servers defined in config file") } // set default org id diff --git a/pkg/services/ldap/test.go b/pkg/services/ldap/test.go index 98d169b9a1ad..6319cddd2807 100644 --- a/pkg/services/ldap/test.go +++ b/pkg/services/ldap/test.go @@ -12,15 +12,24 @@ import ( "github.com/grafana/grafana/pkg/services/login" ) -type mockLdapConn struct { - result *ldap.SearchResult - searchCalled bool - searchAttributes []string +// MockConnection struct for testing +type MockConnection struct { + SearchResult *ldap.SearchResult + SearchCalled bool + SearchAttributes []string + + AddParams *ldap.AddRequest + AddCalled bool + + DelParams *ldap.DelRequest + DelCalled bool + bindProvider func(username, password string) error unauthenticatedBindProvider func(username string) error } -func (c *mockLdapConn) Bind(username, password string) error { +// Bind mocks Bind connection function +func (c *MockConnection) Bind(username, password string) error { if c.bindProvider != nil { return c.bindProvider(username, password) } @@ -28,7 +37,8 @@ func (c *mockLdapConn) Bind(username, password string) error { return nil } -func (c *mockLdapConn) UnauthenticatedBind(username string) error { +// UnauthenticatedBind mocks UnauthenticatedBind connection function +func (c *MockConnection) UnauthenticatedBind(username string) error { if c.unauthenticatedBindProvider != nil { return c.unauthenticatedBindProvider(username) } @@ -36,23 +46,40 @@ func (c *mockLdapConn) UnauthenticatedBind(username string) error { return nil } -func (c *mockLdapConn) Close() {} +// Close mocks Close connection function +func (c *MockConnection) Close() {} + +func (c *MockConnection) setSearchResult(result *ldap.SearchResult) { + c.SearchResult = result +} + +// Search mocks Search connection function +func (c *MockConnection) Search(sr *ldap.SearchRequest) (*ldap.SearchResult, error) { + c.SearchCalled = true + c.SearchAttributes = sr.Attributes + return c.SearchResult, nil +} -func (c *mockLdapConn) setSearchResult(result *ldap.SearchResult) { - c.result = result +// Add mocks Add connection function +func (c *MockConnection) Add(request *ldap.AddRequest) error { + c.AddCalled = true + c.AddParams = request + return nil } -func (c *mockLdapConn) Search(sr *ldap.SearchRequest) (*ldap.SearchResult, error) { - c.searchCalled = true - c.searchAttributes = sr.Attributes - return c.result, nil +// Del mocks Del connection function +func (c *MockConnection) Del(request *ldap.DelRequest) error { + c.DelCalled = true + c.DelParams = request + return nil } -func (c *mockLdapConn) StartTLS(*tls.Config) error { +// StartTLS mocks StartTLS connection function +func (c *MockConnection) StartTLS(*tls.Config) error { return nil } -func AuthScenario(desc string, fn scenarioFunc) { +func serverScenario(desc string, fn scenarioFunc) { Convey(desc, func() { defer bus.ClearBusHandlers() @@ -64,10 +91,6 @@ func AuthScenario(desc string, fn scenarioFunc) { }, } - hookDial = func(auth *Auth) error { - return nil - } - loginService := &login.LoginService{ Bus: bus.GetBus(), } @@ -100,6 +123,18 @@ func AuthScenario(desc string, fn scenarioFunc) { return nil }) + bus.AddHandler("test", func(cmd *models.GetExternalUserInfoByLoginQuery) error { + sc.getExternalUserInfoByLoginQuery = cmd + sc.getExternalUserInfoByLoginQuery.Result = &models.ExternalUserInfo{UserId: 42, IsDisabled: false} + return nil + }) + + bus.AddHandler("test", func(cmd *models.DisableUserCommand) error { + sc.disableExternalUserCalled = true + sc.disableUserCmd = cmd + return nil + }) + bus.AddHandler("test", func(cmd *models.AddOrgUserCommand) error { sc.addOrgUserCmd = cmd return nil @@ -130,16 +165,19 @@ func AuthScenario(desc string, fn scenarioFunc) { } type scenarioContext struct { - loginUserQuery *models.LoginUserQuery - getUserByAuthInfoQuery *models.GetUserByAuthInfoQuery - getUserOrgListQuery *models.GetUserOrgListQuery - createUserCmd *models.CreateUserCommand - addOrgUserCmd *models.AddOrgUserCommand - updateOrgUserCmd *models.UpdateOrgUserCommand - removeOrgUserCmd *models.RemoveOrgUserCommand - updateUserCmd *models.UpdateUserCommand - setUsingOrgCmd *models.SetUsingOrgCommand - updateUserPermissionsCmd *models.UpdateUserPermissionsCommand + loginUserQuery *models.LoginUserQuery + getUserByAuthInfoQuery *models.GetUserByAuthInfoQuery + getExternalUserInfoByLoginQuery *models.GetExternalUserInfoByLoginQuery + getUserOrgListQuery *models.GetUserOrgListQuery + createUserCmd *models.CreateUserCommand + disableUserCmd *models.DisableUserCommand + addOrgUserCmd *models.AddOrgUserCommand + updateOrgUserCmd *models.UpdateOrgUserCommand + removeOrgUserCmd *models.RemoveOrgUserCommand + updateUserCmd *models.UpdateUserCommand + setUsingOrgCmd *models.SetUsingOrgCommand + updateUserPermissionsCmd *models.UpdateUserPermissionsCommand + disableExternalUserCalled bool } func (sc *scenarioContext) userQueryReturns(user *models.User) { @@ -162,4 +200,15 @@ func (sc *scenarioContext) userOrgsQueryReturns(orgs []*models.UserOrgDTO) { }) } +func (sc *scenarioContext) getExternalUserInfoByLoginQueryReturns(externalUser *models.ExternalUserInfo) { + bus.AddHandler("test", func(cmd *models.GetExternalUserInfoByLoginQuery) error { + sc.getExternalUserInfoByLoginQuery = cmd + sc.getExternalUserInfoByLoginQuery.Result = &models.ExternalUserInfo{ + UserId: externalUser.UserId, + IsDisabled: externalUser.IsDisabled, + } + return nil + }) +} + type scenarioFunc func(c *scenarioContext) diff --git a/pkg/services/login/login.go b/pkg/services/login/login.go index 791b34e0b30a..cf7b8dd92c55 100644 --- a/pkg/services/login/login.go +++ b/pkg/services/login/login.go @@ -3,7 +3,7 @@ package login import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/log" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/quota" ) @@ -27,10 +27,10 @@ func (ls *LoginService) Init() error { return nil } -func (ls *LoginService) UpsertUser(cmd *m.UpsertUserCommand) error { +func (ls *LoginService) UpsertUser(cmd *models.UpsertUserCommand) error { extUser := cmd.ExternalUser - userQuery := &m.GetUserByAuthInfoQuery{ + userQuery := &models.GetUserByAuthInfoQuery{ AuthModule: extUser.AuthModule, AuthId: extUser.AuthId, UserId: extUser.UserId, @@ -39,7 +39,7 @@ func (ls *LoginService) UpsertUser(cmd *m.UpsertUserCommand) error { } err := bus.Dispatch(userQuery) - if err != m.ErrUserNotFound && err != nil { + if err != models.ErrUserNotFound && err != nil { return err } @@ -64,7 +64,7 @@ func (ls *LoginService) UpsertUser(cmd *m.UpsertUserCommand) error { } if extUser.AuthModule != "" { - cmd2 := &m.SetAuthInfoCommand{ + cmd2 := &models.SetAuthInfoCommand{ UserId: cmd.Result.Id, AuthModule: extUser.AuthModule, AuthId: extUser.AuthId, @@ -90,6 +90,13 @@ func (ls *LoginService) UpsertUser(cmd *m.UpsertUserCommand) error { return err } } + + if extUser.AuthModule == models.AuthModuleLDAP && userQuery.Result.IsDisabled { + // Re-enable user when it found in LDAP + if err := ls.Bus.Dispatch(&models.DisableUserCommand{UserId: cmd.Result.Id, IsDisabled: false}); err != nil { + return err + } + } } err = syncOrgRoles(cmd.Result, extUser) @@ -100,12 +107,12 @@ func (ls *LoginService) UpsertUser(cmd *m.UpsertUserCommand) error { // Sync isGrafanaAdmin permission if extUser.IsGrafanaAdmin != nil && *extUser.IsGrafanaAdmin != cmd.Result.IsAdmin { - if err := ls.Bus.Dispatch(&m.UpdateUserPermissionsCommand{UserId: cmd.Result.Id, IsGrafanaAdmin: *extUser.IsGrafanaAdmin}); err != nil { + if err := ls.Bus.Dispatch(&models.UpdateUserPermissionsCommand{UserId: cmd.Result.Id, IsGrafanaAdmin: *extUser.IsGrafanaAdmin}); err != nil { return err } } - err = ls.Bus.Dispatch(&m.SyncTeamsCommand{ + err = ls.Bus.Dispatch(&models.SyncTeamsCommand{ User: cmd.Result, ExternalUser: extUser, }) @@ -117,8 +124,8 @@ func (ls *LoginService) UpsertUser(cmd *m.UpsertUserCommand) error { return err } -func createUser(extUser *m.ExternalUserInfo) (*m.User, error) { - cmd := &m.CreateUserCommand{ +func createUser(extUser *models.ExternalUserInfo) (*models.User, error) { + cmd := &models.CreateUserCommand{ Login: extUser.Login, Email: extUser.Email, Name: extUser.Name, @@ -132,9 +139,9 @@ func createUser(extUser *m.ExternalUserInfo) (*m.User, error) { return &cmd.Result, nil } -func updateUser(user *m.User, extUser *m.ExternalUserInfo) error { +func updateUser(user *models.User, extUser *models.ExternalUserInfo) error { // sync user info - updateCmd := &m.UpdateUserCommand{ + updateCmd := &models.UpdateUserCommand{ UserId: user.Id, } @@ -165,8 +172,8 @@ func updateUser(user *m.User, extUser *m.ExternalUserInfo) error { return bus.Dispatch(updateCmd) } -func updateUserAuth(user *m.User, extUser *m.ExternalUserInfo) error { - updateCmd := &m.UpdateAuthInfoCommand{ +func updateUserAuth(user *models.User, extUser *models.ExternalUserInfo) error { + updateCmd := &models.UpdateAuthInfoCommand{ AuthModule: extUser.AuthModule, AuthId: extUser.AuthId, UserId: user.Id, @@ -177,13 +184,13 @@ func updateUserAuth(user *m.User, extUser *m.ExternalUserInfo) error { return bus.Dispatch(updateCmd) } -func syncOrgRoles(user *m.User, extUser *m.ExternalUserInfo) error { +func syncOrgRoles(user *models.User, extUser *models.ExternalUserInfo) error { // don't sync org roles if none are specified if len(extUser.OrgRoles) == 0 { return nil } - orgsQuery := &m.GetUserOrgListQuery{UserId: user.Id} + orgsQuery := &models.GetUserOrgListQuery{UserId: user.Id} if err := bus.Dispatch(orgsQuery); err != nil { return err } @@ -199,7 +206,7 @@ func syncOrgRoles(user *m.User, extUser *m.ExternalUserInfo) error { deleteOrgIds = append(deleteOrgIds, org.OrgId) } else if extUser.OrgRoles[org.OrgId] != org.Role { // update role - cmd := &m.UpdateOrgUserCommand{OrgId: org.OrgId, UserId: user.Id, Role: extUser.OrgRoles[org.OrgId]} + cmd := &models.UpdateOrgUserCommand{OrgId: org.OrgId, UserId: user.Id, Role: extUser.OrgRoles[org.OrgId]} if err := bus.Dispatch(cmd); err != nil { return err } @@ -213,16 +220,16 @@ func syncOrgRoles(user *m.User, extUser *m.ExternalUserInfo) error { } // add role - cmd := &m.AddOrgUserCommand{UserId: user.Id, Role: orgRole, OrgId: orgId} + cmd := &models.AddOrgUserCommand{UserId: user.Id, Role: orgRole, OrgId: orgId} err := bus.Dispatch(cmd) - if err != nil && err != m.ErrOrgNotFound { + if err != nil && err != models.ErrOrgNotFound { return err } } // delete any removed org roles for _, orgId := range deleteOrgIds { - cmd := &m.RemoveOrgUserCommand{OrgId: orgId, UserId: user.Id} + cmd := &models.RemoveOrgUserCommand{OrgId: orgId, UserId: user.Id} if err := bus.Dispatch(cmd); err != nil { return err } @@ -235,7 +242,7 @@ func syncOrgRoles(user *m.User, extUser *m.ExternalUserInfo) error { break } - return bus.Dispatch(&m.SetUsingOrgCommand{ + return bus.Dispatch(&models.SetUsingOrgCommand{ UserId: user.Id, OrgId: user.OrgId, }) diff --git a/pkg/services/multildap/multildap.go b/pkg/services/multildap/multildap.go new file mode 100644 index 000000000000..6c2baf1671af --- /dev/null +++ b/pkg/services/multildap/multildap.go @@ -0,0 +1,152 @@ +package multildap + +import ( + "errors" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/ldap" +) + +// GetConfig gets LDAP config +var GetConfig = ldap.GetConfig + +// IsEnabled checks if LDAP is enabled +var IsEnabled = ldap.IsEnabled + +// ErrInvalidCredentials is returned if username and password do not match +var ErrInvalidCredentials = ldap.ErrInvalidCredentials + +// ErrNoLDAPServers is returned when there is no LDAP servers specified +var ErrNoLDAPServers = errors.New("No LDAP servers are configured") + +// ErrDidNotFindUser if request for user is unsuccessful +var ErrDidNotFindUser = errors.New("Did not find a user") + +// IMultiLDAP is interface for MultiLDAP +type IMultiLDAP interface { + Login(query *models.LoginUserQuery) ( + *models.ExternalUserInfo, error, + ) + + Users(logins []string) ( + []*models.ExternalUserInfo, error, + ) + + User(login string) ( + *models.ExternalUserInfo, error, + ) +} + +// MultiLDAP is basic struct of LDAP authorization +type MultiLDAP struct { + configs []*ldap.ServerConfig +} + +// New creates the new LDAP auth +func New(configs []*ldap.ServerConfig) IMultiLDAP { + return &MultiLDAP{ + configs: configs, + } +} + +// Login tries to log in the user in multiples LDAP +func (multiples *MultiLDAP) Login(query *models.LoginUserQuery) ( + *models.ExternalUserInfo, error, +) { + if len(multiples.configs) == 0 { + return nil, ErrNoLDAPServers + } + + for _, config := range multiples.configs { + server := ldap.New(config) + + if err := server.Dial(); err != nil { + return nil, err + } + + defer server.Close() + + user, err := server.Login(query) + + if user != nil { + return user, nil + } + + // Continue if we couldn't find the user + if err == ErrInvalidCredentials { + continue + } + + if err != nil { + return nil, err + } + + return user, nil + } + + // Return invalid credentials if we couldn't find the user anywhere + return nil, ErrInvalidCredentials +} + +// User gets a user by login +func (multiples *MultiLDAP) User(login string) ( + *models.ExternalUserInfo, + error, +) { + + if len(multiples.configs) == 0 { + return nil, ErrNoLDAPServers + } + + search := []string{login} + for _, config := range multiples.configs { + server := ldap.New(config) + + if err := server.Dial(); err != nil { + return nil, err + } + + defer server.Close() + + users, err := server.Users(search) + if err != nil { + return nil, err + } + + if len(users) != 0 { + return users[0], nil + } + } + + return nil, ErrDidNotFindUser +} + +// Users gets users from multiple LDAP servers +func (multiples *MultiLDAP) Users(logins []string) ( + []*models.ExternalUserInfo, + error, +) { + var result []*models.ExternalUserInfo + + if len(multiples.configs) == 0 { + return nil, ErrNoLDAPServers + } + + for _, config := range multiples.configs { + server := ldap.New(config) + + if err := server.Dial(); err != nil { + return nil, err + } + + defer server.Close() + + users, err := server.Users(logins) + if err != nil { + return nil, err + } + result = append(result, users...) + } + + return result, nil +} diff --git a/pkg/services/provisioning/notifiers/config_reader_test.go b/pkg/services/provisioning/notifiers/config_reader_test.go index e2ffb5aa75f8..e09531774252 100644 --- a/pkg/services/provisioning/notifiers/config_reader_test.go +++ b/pkg/services/provisioning/notifiers/config_reader_test.go @@ -66,6 +66,8 @@ func TestNotificationAsConfig(t *testing.T) { So(nt.Settings, ShouldResemble, map[string]interface{}{ "recipient": "XXX", "token": "xoxb", "uploadImage": true, "url": "https://slack.com", }) + So(nt.SendReminder, ShouldBeTrue) + So(nt.Frequency, ShouldEqual, "1h") nt = nts[1] So(nt.Name, ShouldEqual, "another-not-default-notification") diff --git a/pkg/services/provisioning/notifiers/testdata/test-configs/correct-properties/correct-properties.yaml b/pkg/services/provisioning/notifiers/testdata/test-configs/correct-properties/correct-properties.yaml index 1d846f64473b..5c71e0229335 100644 --- a/pkg/services/provisioning/notifiers/testdata/test-configs/correct-properties/correct-properties.yaml +++ b/pkg/services/provisioning/notifiers/testdata/test-configs/correct-properties/correct-properties.yaml @@ -3,8 +3,9 @@ notifiers: type: slack uid: notifier1 org_id: 2 - uid: "notifier1" is_default: true + send_reminder: true + frequency: 1h settings: recipient: "XXX" token: "xoxb" diff --git a/pkg/services/sqlstore/login_attempt.go b/pkg/services/sqlstore/login_attempt.go index fe77dd7e9146..a9adbca5bfed 100644 --- a/pkg/services/sqlstore/login_attempt.go +++ b/pkg/services/sqlstore/login_attempt.go @@ -43,7 +43,7 @@ func DeleteOldLoginAttempts(cmd *m.DeleteOldLoginAttemptsCommand) error { if err != nil { return err } - + // nolint: gosimple if result == nil || len(result) == 0 || result[0] == nil { return nil } diff --git a/pkg/services/sqlstore/migrations/user_mig.go b/pkg/services/sqlstore/migrations/user_mig.go index e273cb7d5424..8202dfc1cbbc 100644 --- a/pkg/services/sqlstore/migrations/user_mig.go +++ b/pkg/services/sqlstore/migrations/user_mig.go @@ -116,6 +116,12 @@ func addUserMigrations(mg *Migrator) { // Adds salt & rands for old users who used ldap or oauth mg.AddMigration("Add missing user data", &AddMissingUserSaltAndRandsMigration{}) + + // is_disabled indicates whether user disabled or not. Disabled user should not be able to log in. + // This field used in couple with LDAP auth to disable users removed from LDAP rather than delete it immediately. + mg.AddMigration("Add is_disabled column to user", NewAddColumnMigration(userV2, &Column{ + Name: "is_disabled", Type: DB_Bool, Nullable: false, Default: "0", + })) } type AddMissingUserSaltAndRandsMigration struct { diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index 44d0f545bfcc..675af5f02bb3 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -8,7 +8,6 @@ import ( "path" "path/filepath" "strings" - "testing" "time" "github.com/go-sql-driver/mysql" @@ -38,6 +37,11 @@ var ( const ContextSessionName = "db-session" func init() { + // This change will make xorm use an empty default schema for postgres and + // by that mimic the functionality of how it was functioning before + // xorm's changes above. + xorm.DefaultPostgresSchema = "" + registry.Register(®istry.Descriptor{ Name: "SqlStore", Instance: &SqlStore{}, @@ -280,7 +284,14 @@ func (ss *SqlStore) readConfig() { ss.dbCfg.CacheMode = sec.Key("cache_mode").MustString("private") } -func InitTestDB(t *testing.T) *SqlStore { +// Interface of arguments for testing db +type ITestDB interface { + Helper() + Fatalf(format string, args ...interface{}) +} + +// InitTestDB initiliaze test DB +func InitTestDB(t ITestDB) *SqlStore { t.Helper() sqlstore := &SqlStore{} sqlstore.skipEnsureAdmin = true diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go index 0ecb0938d4f7..9b744fd32884 100644 --- a/pkg/services/sqlstore/transactions.go +++ b/pkg/services/sqlstore/transactions.go @@ -12,7 +12,7 @@ import ( // WithTransactionalDbSession calls the callback with an session within a transaction func (ss *SqlStore) WithTransactionalDbSession(ctx context.Context, callback dbTransactionFunc) error { - return inTransactionWithRetryCtx(ss.engine, ctx, callback, 0) + return inTransactionWithRetryCtx(ctx, ss.engine, callback, 0) } func (ss *SqlStore) InTransaction(ctx context.Context, fn func(ctx context.Context) error) error { @@ -20,17 +20,17 @@ func (ss *SqlStore) InTransaction(ctx context.Context, fn func(ctx context.Conte } func (ss *SqlStore) inTransactionWithRetry(ctx context.Context, fn func(ctx context.Context) error, retry int) error { - return inTransactionWithRetryCtx(ss.engine, ctx, func(sess *DBSession) error { + return inTransactionWithRetryCtx(ctx, ss.engine, func(sess *DBSession) error { withValue := context.WithValue(ctx, ContextSessionName, sess) return fn(withValue) }, retry) } func inTransactionWithRetry(callback dbTransactionFunc, retry int) error { - return inTransactionWithRetryCtx(x, context.Background(), callback, retry) + return inTransactionWithRetryCtx(context.Background(), x, callback, retry) } -func inTransactionWithRetryCtx(engine *xorm.Engine, ctx context.Context, callback dbTransactionFunc, retry int) error { +func inTransactionWithRetryCtx(ctx context.Context, engine *xorm.Engine, callback dbTransactionFunc, retry int) error { sess, err := startSession(ctx, engine, true) if err != nil { return err @@ -40,12 +40,12 @@ func inTransactionWithRetryCtx(engine *xorm.Engine, ctx context.Context, callbac err = callback(sess) - // special handling of database locked errors for sqlite, then we can retry 3 times + // special handling of database locked errors for sqlite, then we can retry 5 times if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { - if sqlError.Code == sqlite3.ErrLocked { + if sqlError.Code == sqlite3.ErrLocked || sqlError.Code == sqlite3.ErrBusy { sess.Rollback() time.Sleep(time.Millisecond * time.Duration(10)) - sqlog.Info("Database table locked, sleeping then retrying", "retry", retry) + sqlog.Info("Database locked, sleeping then retrying", "error", err, "retry", retry) return inTransactionWithRetry(callback, retry+1) } } @@ -73,5 +73,5 @@ func inTransaction(callback dbTransactionFunc) error { } func inTransactionCtx(ctx context.Context, callback dbTransactionFunc) error { - return inTransactionWithRetryCtx(x, ctx, callback, 0) + return inTransactionWithRetryCtx(ctx, x, callback, 0) } diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 311081af4f8e..641fc5f1344f 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -27,6 +27,8 @@ func (ss *SqlStore) addUserQueryAndCommandHandlers() { bus.AddHandler("sql", GetUserProfile) bus.AddHandler("sql", SearchUsers) bus.AddHandler("sql", GetUserOrgList) + bus.AddHandler("sql", DisableUser) + bus.AddHandler("sql", BatchDisableUsers) bus.AddHandler("sql", DeleteUser) bus.AddHandler("sql", UpdateUserPermissions) bus.AddHandler("sql", SetUserHelpFlag) @@ -326,6 +328,7 @@ func GetUserProfile(query *m.GetUserProfileQuery) error { Login: user.Login, Theme: user.Theme, IsGrafanaAdmin: user.IsAdmin, + IsDisabled: user.IsDisabled, OrgId: user.OrgId, } @@ -450,7 +453,7 @@ func SearchUsers(query *m.SearchUsersQuery) error { offset := query.Limit * (query.Page - 1) sess.Limit(query.Limit, offset) - sess.Cols("id", "email", "name", "login", "is_admin", "last_seen_at") + sess.Cols("id", "email", "name", "login", "is_admin", "is_disabled", "last_seen_at") if err := sess.Find(&query.Result.Users); err != nil { return err } @@ -473,6 +476,43 @@ func SearchUsers(query *m.SearchUsersQuery) error { return err } +func DisableUser(cmd *m.DisableUserCommand) error { + user := m.User{} + sess := x.Table("user") + sess.ID(cmd.UserId).Get(&user) + + user.IsDisabled = cmd.IsDisabled + sess.UseBool("is_disabled") + + _, err := sess.ID(cmd.UserId).Update(&user) + return err +} + +func BatchDisableUsers(cmd *m.BatchDisableUsersCommand) error { + return inTransaction(func(sess *DBSession) error { + userIds := cmd.UserIds + + if len(userIds) == 0 { + return nil + } + + user_id_params := strings.Repeat(",?", len(userIds)-1) + disableSQL := "UPDATE " + dialect.Quote("user") + " SET is_disabled=? WHERE Id IN (?" + user_id_params + ")" + + disableParams := []interface{}{disableSQL, cmd.IsDisabled} + for _, v := range userIds { + disableParams = append(disableParams, v) + } + + _, err := sess.Exec(disableParams...) + if err != nil { + return err + } + + return nil + }) +} + func DeleteUser(cmd *m.DeleteUserCommand) error { return inTransaction(func(sess *DBSession) error { return deleteUserInTransaction(sess, cmd) diff --git a/pkg/services/sqlstore/user_auth.go b/pkg/services/sqlstore/user_auth.go index fd8ec3d057f6..19287b8a8668 100644 --- a/pkg/services/sqlstore/user_auth.go +++ b/pkg/services/sqlstore/user_auth.go @@ -5,7 +5,7 @@ import ( "time" "github.com/grafana/grafana/pkg/bus" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -14,17 +14,18 @@ var getTime = time.Now func init() { bus.AddHandler("sql", GetUserByAuthInfo) + bus.AddHandler("sql", GetExternalUserInfoByLogin) bus.AddHandler("sql", GetAuthInfo) bus.AddHandler("sql", SetAuthInfo) bus.AddHandler("sql", UpdateAuthInfo) bus.AddHandler("sql", DeleteAuthInfo) } -func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { - user := &m.User{} +func GetUserByAuthInfo(query *models.GetUserByAuthInfoQuery) error { + user := &models.User{} has := false var err error - authQuery := &m.GetAuthInfoQuery{} + authQuery := &models.GetAuthInfoQuery{} // Try to find the user by auth module and id first if query.AuthModule != "" && query.AuthId != "" { @@ -32,14 +33,14 @@ func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { authQuery.AuthId = query.AuthId err = GetAuthInfo(authQuery) - if err != m.ErrUserNotFound { + if err != models.ErrUserNotFound { if err != nil { return err } // if user id was specified and doesn't match the user_auth entry, remove it if query.UserId != 0 && query.UserId != authQuery.Result.UserId { - err = DeleteAuthInfo(&m.DeleteAuthInfoCommand{ + err = DeleteAuthInfo(&models.DeleteAuthInfoCommand{ UserAuth: authQuery.Result, }) if err != nil { @@ -55,7 +56,7 @@ func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { if !has { // if the user has been deleted then remove the entry - err = DeleteAuthInfo(&m.DeleteAuthInfoCommand{ + err = DeleteAuthInfo(&models.DeleteAuthInfoCommand{ UserAuth: authQuery.Result, }) if err != nil { @@ -78,7 +79,7 @@ func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { // If not found, try to find the user by email address if !has && query.Email != "" { - user = &m.User{Email: query.Email} + user = &models.User{Email: query.Email} has, err = x.Get(user) if err != nil { return err @@ -87,7 +88,7 @@ func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { // If not found, try to find the user by login if !has && query.Login != "" { - user = &m.User{Login: query.Login} + user = &models.User{Login: query.Login} has, err = x.Get(user) if err != nil { return err @@ -96,12 +97,12 @@ func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { // No user found if !has { - return m.ErrUserNotFound + return models.ErrUserNotFound } // create authInfo record to link accounts if authQuery.Result == nil && query.AuthModule != "" { - cmd2 := &m.SetAuthInfoCommand{ + cmd2 := &models.SetAuthInfoCommand{ UserId: user.Id, AuthModule: query.AuthModule, AuthId: query.AuthId, @@ -115,8 +116,32 @@ func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { return nil } -func GetAuthInfo(query *m.GetAuthInfoQuery) error { - userAuth := &m.UserAuth{ +func GetExternalUserInfoByLogin(query *models.GetExternalUserInfoByLoginQuery) error { + userQuery := models.GetUserByLoginQuery{LoginOrEmail: query.LoginOrEmail} + err := bus.Dispatch(&userQuery) + if err != nil { + return err + } + + authInfoQuery := &models.GetAuthInfoQuery{UserId: userQuery.Result.Id} + if err := bus.Dispatch(authInfoQuery); err != nil { + return err + } + + query.Result = &models.ExternalUserInfo{ + UserId: userQuery.Result.Id, + Login: userQuery.Result.Login, + Email: userQuery.Result.Email, + Name: userQuery.Result.Name, + IsDisabled: userQuery.Result.IsDisabled, + AuthModule: authInfoQuery.Result.AuthModule, + AuthId: authInfoQuery.Result.AuthId, + } + return nil +} + +func GetAuthInfo(query *models.GetAuthInfoQuery) error { + userAuth := &models.UserAuth{ UserId: query.UserId, AuthModule: query.AuthModule, AuthId: query.AuthId, @@ -126,7 +151,7 @@ func GetAuthInfo(query *m.GetAuthInfoQuery) error { return err } if !has { - return m.ErrUserNotFound + return models.ErrUserNotFound } secretAccessToken, err := decodeAndDecrypt(userAuth.OAuthAccessToken) @@ -149,9 +174,9 @@ func GetAuthInfo(query *m.GetAuthInfoQuery) error { return nil } -func SetAuthInfo(cmd *m.SetAuthInfoCommand) error { +func SetAuthInfo(cmd *models.SetAuthInfoCommand) error { return inTransaction(func(sess *DBSession) error { - authUser := &m.UserAuth{ + authUser := &models.UserAuth{ UserId: cmd.UserId, AuthModule: cmd.AuthModule, AuthId: cmd.AuthId, @@ -183,9 +208,9 @@ func SetAuthInfo(cmd *m.SetAuthInfoCommand) error { }) } -func UpdateAuthInfo(cmd *m.UpdateAuthInfoCommand) error { +func UpdateAuthInfo(cmd *models.UpdateAuthInfoCommand) error { return inTransaction(func(sess *DBSession) error { - authUser := &m.UserAuth{ + authUser := &models.UserAuth{ UserId: cmd.UserId, AuthModule: cmd.AuthModule, AuthId: cmd.AuthId, @@ -212,7 +237,7 @@ func UpdateAuthInfo(cmd *m.UpdateAuthInfoCommand) error { authUser.OAuthExpiry = cmd.OAuthToken.Expiry } - cond := &m.UserAuth{ + cond := &models.UserAuth{ UserId: cmd.UserId, AuthModule: cmd.AuthModule, } @@ -222,7 +247,7 @@ func UpdateAuthInfo(cmd *m.UpdateAuthInfoCommand) error { }) } -func DeleteAuthInfo(cmd *m.DeleteAuthInfoCommand) error { +func DeleteAuthInfo(cmd *models.DeleteAuthInfoCommand) error { return inTransaction(func(sess *DBSession) error { _, err := sess.Delete(cmd.UserAuth) return err diff --git a/pkg/services/sqlstore/user_test.go b/pkg/services/sqlstore/user_test.go index 84640687ed9f..e5807ea7bf57 100644 --- a/pkg/services/sqlstore/user_test.go +++ b/pkg/services/sqlstore/user_test.go @@ -175,6 +175,40 @@ func TestUserDataAccess(t *testing.T) { So(found, ShouldBeTrue) }) }) + + Convey("When batch disabling users", func() { + userIdsToDisable := []int64{} + for i := 0; i < 3; i++ { + userIdsToDisable = append(userIdsToDisable, users[i].Id) + } + disableCmd := m.BatchDisableUsersCommand{UserIds: userIdsToDisable, IsDisabled: true} + + err = BatchDisableUsers(&disableCmd) + So(err, ShouldBeNil) + + Convey("Should disable all provided users", func() { + query := m.SearchUsersQuery{} + err = SearchUsers(&query) + + So(query.Result.TotalCount, ShouldEqual, 5) + for _, user := range query.Result.Users { + shouldBeDisabled := false + + // Check if user id is in the userIdsToDisable list + for _, disabledUserId := range userIdsToDisable { + if user.Id == disabledUserId { + So(user.IsDisabled, ShouldBeTrue) + shouldBeDisabled = true + } + } + + // Otherwise user shouldn't be disabled + if !shouldBeDisabled { + So(user.IsDisabled, ShouldBeFalse) + } + } + }) + }) }) Convey("Given one grafana admin user", func() { diff --git a/pkg/services/user/user.go b/pkg/services/user/user.go new file mode 100644 index 000000000000..94762c811b08 --- /dev/null +++ b/pkg/services/user/user.go @@ -0,0 +1,39 @@ +package user + +import ( + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" +) + +// UpsertArgs are object for Upsert method +type UpsertArgs struct { + ReqContext *models.ReqContext + ExternalUser *models.ExternalUserInfo + SignupAllowed bool +} + +// Upsert add/update grafana user +func Upsert(args *UpsertArgs) (*models.User, error) { + query := &models.UpsertUserCommand{ + ReqContext: args.ReqContext, + ExternalUser: args.ExternalUser, + SignupAllowed: args.SignupAllowed, + } + err := bus.Dispatch(query) + if err != nil { + return nil, err + } + + return query.Result, nil +} + +// Get the users +func Get( + query *models.SearchUsersQuery, +) ([]*models.UserSearchHitDTO, error) { + if err := bus.Dispatch(query); err != nil { + return nil, err + } + + return query.Result.Users, nil +} diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 192f300021b8..a6c07d232e10 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -47,10 +47,11 @@ var ( var ( // App settings. - Env = DEV - AppUrl string - AppSubUrl string - InstanceName string + Env = DEV + AppUrl string + AppSubUrl string + ServeFromSubPath bool + InstanceName string // build BuildVersion string @@ -138,7 +139,7 @@ var ( AuthProxyHeaderName string AuthProxyHeaderProperty string AuthProxyAutoSignUp bool - AuthProxyLdapSyncTtl int + AuthProxyLDAPSyncTtl int AuthProxyWhitelist string AuthProxyHeaders map[string]string @@ -165,11 +166,11 @@ var ( GoogleTagManagerId string // LDAP - LdapEnabled bool - LdapConfigFile string - LdapSyncCron string - LdapAllowSignup bool - LdapActiveSyncEnabled bool + LDAPEnabled bool + LDAPConfigFile string + LDAPSyncCron string + LDAPAllowSignup bool + LDAPActiveSyncEnabled bool // QUOTA Quota QuotaSettings @@ -205,8 +206,9 @@ type Cfg struct { Logger log.Logger // HTTP Server Settings - AppUrl string - AppSubUrl string + AppUrl string + AppSubUrl string + ServeFromSubPath bool // Paths ProvisioningPath string @@ -610,8 +612,11 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { if err != nil { return err } + ServeFromSubPath = server.Key("serve_from_sub_path").MustBool(false) + cfg.AppUrl = AppUrl cfg.AppSubUrl = AppSubUrl + cfg.ServeFromSubPath = ServeFromSubPath Protocol = HTTP protocolStr, err := valueAsString(server, "protocol", "http") @@ -805,6 +810,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { // auth proxy authProxy := iniFile.Section("auth.proxy") AuthProxyEnabled = authProxy.Key("enabled").MustBool(false) + AuthProxyHeaderName, err = valueAsString(authProxy, "header_name", "") if err != nil { return err @@ -814,7 +820,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { return err } AuthProxyAutoSignUp = authProxy.Key("auto_sign_up").MustBool(true) - AuthProxyLdapSyncTtl = authProxy.Key("ldap_sync_ttl").MustInt() + AuthProxyLDAPSyncTtl = authProxy.Key("ldap_sync_ttl").MustInt() AuthProxyWhitelist, err = valueAsString(authProxy, "whitelist", "") if err != nil { return err @@ -977,11 +983,11 @@ type RemoteCacheOptions struct { func (cfg *Cfg) readLDAPConfig() { ldapSec := cfg.Raw.Section("auth.ldap") - LdapConfigFile = ldapSec.Key("config_file").String() - LdapSyncCron = ldapSec.Key("sync_cron").String() - LdapEnabled = ldapSec.Key("enabled").MustBool(false) - LdapActiveSyncEnabled = ldapSec.Key("active_sync_enabled").MustBool(false) - LdapAllowSignup = ldapSec.Key("allow_sign_up").MustBool(true) + LDAPConfigFile = ldapSec.Key("config_file").String() + LDAPSyncCron = ldapSec.Key("sync_cron").String() + LDAPEnabled = ldapSec.Key("enabled").MustBool(false) + LDAPActiveSyncEnabled = ldapSec.Key("active_sync_enabled").MustBool(false) + LDAPAllowSignup = ldapSec.Key("allow_sign_up").MustBool(true) } func (cfg *Cfg) readSessionConfig() { diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index cae8d8bfb73b..abbf6fd69b53 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -85,14 +85,17 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * azlog.Debug("AzureMonitor", "target", azureMonitorTarget) urlComponents := map[string]string{} + urlComponents["subscription"] = fmt.Sprintf("%v", query.Model.Get("subscription").MustString()) urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"]) urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) ub := urlBuilder{ - ResourceGroup: urlComponents["resourceGroup"], - MetricDefinition: urlComponents["metricDefinition"], - ResourceName: urlComponents["resourceName"], + DefaultSubscription: query.DataSource.JsonData.Get("subscriptionId").MustString(), + Subscription: urlComponents["subscription"], + ResourceGroup: urlComponents["resourceGroup"], + MetricDefinition: urlComponents["metricDefinition"], + ResourceName: urlComponents["resourceName"], } azureURL := ub.Build() @@ -199,8 +202,7 @@ func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *mode } cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor") - subscriptionID := dsInfo.JsonData.Get("subscriptionId").MustString() - proxyPass := fmt.Sprintf("%s/subscriptions/%s", cloudName, subscriptionID) + proxyPass := fmt.Sprintf("%s/subscriptions", cloudName) u, _ := url.Parse(dsInfo.Url) u.Path = path.Join(u.Path, "render") diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go index 39cfe3f76713..94c2aef6c03a 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -9,6 +9,7 @@ import ( "time" "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" @@ -27,7 +28,13 @@ func TestAzureMonitorDatasource(t *testing.T) { }, Queries: []*tsdb.Query{ { + DataSource: &models.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "subscriptionId": "default-subscription", + }), + }, Model: simplejson.NewFromAny(map[string]interface{}{ + "subscription": "12345678-aaaa-bbbb-cccc-123456789abc", "azureMonitor": map[string]interface{}{ "timeGrain": "PT1M", "aggregation": "Average", @@ -49,7 +56,7 @@ func TestAzureMonitorDatasource(t *testing.T) { So(len(queries), ShouldEqual, 1) So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].URL, ShouldEqual, "resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics") + So(queries[0].URL, ShouldEqual, "12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics") So(queries[0].Target, ShouldEqual, "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") So(len(queries[0].Params), ShouldEqual, 5) So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z") diff --git a/pkg/tsdb/azuremonitor/url-builder.go b/pkg/tsdb/azuremonitor/url-builder.go index c252048f5172..445f815e0cd6 100644 --- a/pkg/tsdb/azuremonitor/url-builder.go +++ b/pkg/tsdb/azuremonitor/url-builder.go @@ -7,22 +7,30 @@ import ( // urlBuilder builds the URL for calling the Azure Monitor API type urlBuilder struct { - ResourceGroup string - MetricDefinition string - ResourceName string + DefaultSubscription string + Subscription string + ResourceGroup string + MetricDefinition string + ResourceName string } // Build checks the metric definition property to see which form of the url // should be returned func (ub *urlBuilder) Build() string { + subscription := ub.Subscription + + if ub.Subscription == "" { + subscription = ub.DefaultSubscription + } + if strings.Count(ub.MetricDefinition, "/") > 1 { rn := strings.Split(ub.ResourceName, "/") lastIndex := strings.LastIndex(ub.MetricDefinition, "/") service := ub.MetricDefinition[lastIndex+1:] md := ub.MetricDefinition[0:lastIndex] - return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, md, rn[0], service, rn[1]) + return fmt.Sprintf("%s/resourceGroups/%s/providers/%s/%s/%s/%s/providers/microsoft.insights/metrics", subscription, ub.ResourceGroup, md, rn[0], service, rn[1]) } - return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, ub.MetricDefinition, ub.ResourceName) + return fmt.Sprintf("%s/resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", subscription, ub.ResourceGroup, ub.MetricDefinition, ub.ResourceName) } diff --git a/pkg/tsdb/azuremonitor/url-builder_test.go b/pkg/tsdb/azuremonitor/url-builder_test.go index 85c4f81bc835..2af784a7554a 100644 --- a/pkg/tsdb/azuremonitor/url-builder_test.go +++ b/pkg/tsdb/azuremonitor/url-builder_test.go @@ -11,35 +11,51 @@ func TestURLBuilder(t *testing.T) { Convey("when metric definition is in the short form", func() { ub := &urlBuilder{ - ResourceGroup: "rg", - MetricDefinition: "Microsoft.Compute/virtualMachines", - ResourceName: "rn", + DefaultSubscription: "default-sub", + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Compute/virtualMachines", + ResourceName: "rn", } url := ub.Build() - So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metrics") + So(url, ShouldEqual, "default-sub/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metrics") + }) + + Convey("when metric definition is in the short form and a subscription is defined", func() { + ub := &urlBuilder{ + DefaultSubscription: "default-sub", + Subscription: "specified-sub", + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Compute/virtualMachines", + ResourceName: "rn", + } + + url := ub.Build() + So(url, ShouldEqual, "specified-sub/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metrics") }) Convey("when metric definition is Microsoft.Storage/storageAccounts/blobServices", func() { ub := &urlBuilder{ - ResourceGroup: "rg", - MetricDefinition: "Microsoft.Storage/storageAccounts/blobServices", - ResourceName: "rn1/default", + DefaultSubscription: "default-sub", + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Storage/storageAccounts/blobServices", + ResourceName: "rn1/default", } url := ub.Build() - So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/blobServices/default/providers/microsoft.insights/metrics") + So(url, ShouldEqual, "default-sub/resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/blobServices/default/providers/microsoft.insights/metrics") }) Convey("when metric definition is Microsoft.Storage/storageAccounts/fileServices", func() { ub := &urlBuilder{ - ResourceGroup: "rg", - MetricDefinition: "Microsoft.Storage/storageAccounts/fileServices", - ResourceName: "rn1/default", + DefaultSubscription: "default-sub", + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Storage/storageAccounts/fileServices", + ResourceName: "rn1/default", } url := ub.Build() - So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/fileServices/default/providers/microsoft.insights/metrics") + So(url, ShouldEqual, "default-sub/resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/fileServices/default/providers/microsoft.insights/metrics") }) }) } diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index ec3f103a4ef2..c0794863efe5 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -74,7 +75,7 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -109,7 +110,7 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index bbd928b05631..8cd83d2f9991 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -4,8 +4,8 @@ import ( "fmt" "regexp" "strings" - "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -69,7 +69,7 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -104,7 +104,7 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 2efba13d31ac..f7a194e63cd1 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -95,7 +96,7 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -139,7 +140,7 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/testdata/scenarios.go b/pkg/tsdb/testdata/scenarios.go index 0a521894f430..3e7aa0541a8a 100644 --- a/pkg/tsdb/testdata/scenarios.go +++ b/pkg/tsdb/testdata/scenarios.go @@ -2,6 +2,7 @@ package testdata import ( "encoding/json" + "fmt" "math" "math/rand" "strconv" @@ -261,6 +262,84 @@ func init() { return queryRes }, }) + + registerScenario(&Scenario{ + Id: "logs", + Name: "Logs", + + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + from := context.TimeRange.GetFromAsMsEpoch() + to := context.TimeRange.GetToAsMsEpoch() + lines := query.Model.Get("lines").MustInt64(10) + includeLevelColumn := query.Model.Get("levelColumn").MustBool(false) + + logLevelGenerator := newRandomStringProvider([]string{ + "emerg", + "alert", + "crit", + "critical", + "warn", + "warning", + "err", + "eror", + "error", + "info", + "notice", + "dbug", + "debug", + "trace", + "", + }) + containerIDGenerator := newRandomStringProvider([]string{ + "f36a9eaa6d34310686f2b851655212023a216de955cbcc764210cefa71179b1a", + "5a354a630364f3742c602f315132e16def594fe68b1e4a195b2fce628e24c97a", + }) + hostnameGenerator := newRandomStringProvider([]string{ + "srv-001", + "srv-002", + }) + + table := tsdb.Table{ + Columns: []tsdb.TableColumn{ + {Text: "time"}, + {Text: "message"}, + {Text: "container_id"}, + {Text: "hostname"}, + }, + Rows: []tsdb.RowValues{}, + } + + if includeLevelColumn { + table.Columns = append(table.Columns, tsdb.TableColumn{Text: "level"}) + } + + for i := int64(0); i < lines && to > from; i++ { + row := tsdb.RowValues{float64(to)} + + logLevel := logLevelGenerator.Next() + timeFormatted := time.Unix(to/1000, 0).Format(time.RFC3339) + lvlString := "" + if !includeLevelColumn { + lvlString = fmt.Sprintf("lvl=%s ", logLevel) + } + + row = append(row, fmt.Sprintf("t=%s %smsg=\"Request Completed\" logger=context userId=1 orgId=1 uname=admin method=GET path=/api/datasources/proxy/152/api/prom/label status=502 remote_addr=[::1] time_ms=1 size=0 referer=\"http://localhost:3000/explore?left=%%5B%%22now-6h%%22,%%22now%%22,%%22Prometheus%%202.x%%22,%%7B%%7D,%%7B%%22ui%%22:%%5Btrue,true,true,%%22none%%22%%5D%%7D%%5D\"", timeFormatted, lvlString)) + row = append(row, containerIDGenerator.Next()) + row = append(row, hostnameGenerator.Next()) + + if includeLevelColumn { + row = append(row, logLevel) + } + + table.Rows = append(table.Rows, row) + to -= query.IntervalMs + } + + queryRes := tsdb.NewQueryResult() + queryRes.Tables = append(queryRes.Tables, &table) + return queryRes + }, + }) } func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { diff --git a/pkg/tsdb/testdata/utils.go b/pkg/tsdb/testdata/utils.go new file mode 100644 index 000000000000..85c02e8a296d --- /dev/null +++ b/pkg/tsdb/testdata/utils.go @@ -0,0 +1,22 @@ +package testdata + +import ( + "math/rand" + "time" +) + +type randomStringProvider struct { + r *rand.Rand + data []string +} + +func newRandomStringProvider(data []string) *randomStringProvider { + return &randomStringProvider{ + r: rand.New(rand.NewSource(time.Now().UnixNano())), + data: data, + } +} + +func (p *randomStringProvider) Next() string { + return p.data[p.r.Int31n(int32(len(p.data)))] +} diff --git a/pkg/util/strings.go b/pkg/util/strings.go index 9eaa141edbfb..9ce5d03e126c 100644 --- a/pkg/util/strings.go +++ b/pkg/util/strings.go @@ -4,6 +4,7 @@ import ( "fmt" "math" "regexp" + "strings" "time" ) @@ -66,3 +67,19 @@ func GetAgeString(t time.Time) string { return "< 1m" } + +// ToCamelCase changes kebab case, snake case or mixed strings to camel case. See unit test for examples. +func ToCamelCase(str string) string { + var finalParts []string + parts := strings.Split(str, "_") + + for _, part := range parts { + finalParts = append(finalParts, strings.Split(part, "-")...) + } + + for index, part := range finalParts[1:] { + finalParts[index+1] = strings.Title(part) + } + + return strings.Join(finalParts, "") +} diff --git a/pkg/util/strings_test.go b/pkg/util/strings_test.go index 0cc1905baff8..4bc52ee75217 100644 --- a/pkg/util/strings_test.go +++ b/pkg/util/strings_test.go @@ -37,3 +37,12 @@ func TestDateAge(t *testing.T) { So(GetAgeString(time.Now().Add(-time.Hour*24*409)), ShouldEqual, "1y") }) } + +func TestToCamelCase(t *testing.T) { + Convey("ToCamelCase", t, func() { + So(ToCamelCase("kebab-case-string"), ShouldEqual, "kebabCaseString") + So(ToCamelCase("snake_case_string"), ShouldEqual, "snakeCaseString") + So(ToCamelCase("mixed-case_string"), ShouldEqual, "mixedCaseString") + So(ToCamelCase("alreadyCamelCase"), ShouldEqual, "alreadyCamelCase") + }) +} diff --git a/public/app/core/actions/application.ts b/public/app/core/actions/application.ts new file mode 100644 index 000000000000..9bde989e8ca6 --- /dev/null +++ b/public/app/core/actions/application.ts @@ -0,0 +1,3 @@ +import { noPayloadActionCreatorFactory } from 'app/core/redux'; + +export const toggleLogActions = noPayloadActionCreatorFactory('TOGGLE_LOG_ACTIONS').create(); diff --git a/public/app/core/components/PluginHelp/PluginHelp.tsx b/public/app/core/components/PluginHelp/PluginHelp.tsx index 677fb254314e..40aed4a6c0c8 100644 --- a/public/app/core/components/PluginHelp/PluginHelp.tsx +++ b/public/app/core/components/PluginHelp/PluginHelp.tsx @@ -1,7 +1,7 @@ import React, { PureComponent } from 'react'; // @ts-ignore import Remarkable from 'remarkable'; -import { getBackendSrv } from '../../services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; interface Props { plugin: { diff --git a/public/app/core/components/SharedPreferences/SharedPreferences.tsx b/public/app/core/components/SharedPreferences/SharedPreferences.tsx index 3b804ba47051..b6d19f1f8af2 100644 --- a/public/app/core/components/SharedPreferences/SharedPreferences.tsx +++ b/public/app/core/components/SharedPreferences/SharedPreferences.tsx @@ -1,9 +1,9 @@ import React, { PureComponent } from 'react'; import { FormLabel, Select } from '@grafana/ui'; -import { getBackendSrv, BackendSrv } from 'app/core/services/backend_srv'; import { DashboardSearchHit, DashboardSearchHitType } from 'app/types'; +import { getBackendSrv } from 'app/core/services/backend_srv'; export interface Props { resourceUri: string; @@ -25,7 +25,7 @@ const timezones = [ ]; export class SharedPreferences extends PureComponent { - backendSrv: BackendSrv = getBackendSrv(); + backendSrv = getBackendSrv(); constructor(props: Props) { super(props); diff --git a/public/app/core/components/help/help.ts b/public/app/core/components/help/help.ts index 7ef54339f497..63be172a5deb 100644 --- a/public/app/core/components/help/help.ts +++ b/public/app/core/components/help/help.ts @@ -13,8 +13,6 @@ export class HelpCtrl { { keys: ['g', 'h'], description: 'Go to Home Dashboard' }, { keys: ['g', 'p'], description: 'Go to Profile' }, { keys: ['s', 'o'], description: 'Open search' }, - { keys: ['s', 's'], description: 'Open search with starred filter' }, - { keys: ['s', 't'], description: 'Open search in tags view' }, { keys: ['esc'], description: 'Exit edit/setting views' }, ], Dashboard: [ diff --git a/public/app/core/components/search/search.ts b/public/app/core/components/search/search.ts index 8728cbb1f348..fbeb90922986 100644 --- a/public/app/core/components/search/search.ts +++ b/public/app/core/components/search/search.ts @@ -38,6 +38,10 @@ interface SelectedIndicies { folderIndex?: number; } +interface OpenSearchParams { + query?: string; +} + export class SearchCtrl { isOpen: boolean; query: SearchQuery; @@ -94,7 +98,7 @@ export class SearchCtrl { appEvents.emit('search-query'); } - openSearch(evt: any, payload: any) { + openSearch(payload: OpenSearchParams = {}) { if (this.isOpen) { this.closeSearch(); return; @@ -105,19 +109,16 @@ export class SearchCtrl { this.selectedIndex = -1; this.results = []; this.query = { - query: evt ? `${evt.query} ` : '', - parsedQuery: this.queryParser.parse(evt && evt.query), + query: payload.query ? `${payload.query} ` : '', + parsedQuery: this.queryParser.parse(payload.query), tags: [], starred: false, }; + this.currentSearchId = 0; this.ignoreClose = true; this.isLoading = true; - if (payload && payload.starred) { - this.query.starred = true; - } - this.$timeout(() => { this.ignoreClose = false; this.giveSearchFocus = true; diff --git a/public/app/core/logs_model.ts b/public/app/core/logs_model.ts index fd7e5c638a94..5fe95a182d07 100644 --- a/public/app/core/logs_model.ts +++ b/public/app/core/logs_model.ts @@ -13,6 +13,14 @@ import { toLegacyResponseData, FieldCache, FieldType, + getLogLevelFromKey, + LogRowModel, + LogsModel, + LogsMetaItem, + LogsMetaKind, + LogsParser, + LogLabelStatsModel, + LogsDedupStrategy, } from '@grafana/ui'; import { getThemeColor } from 'app/core/utils/colors'; import { hasAnsiCodes } from 'app/core/utils/text'; @@ -28,95 +36,12 @@ export const LogLevelColor = { [LogLevel.unknown]: getThemeColor('#8e8e8e', '#dde4ed'), }; -export interface LogSearchMatch { - start: number; - length: number; - text: string; -} - -export interface LogRowModel { - duplicates?: number; - entry: string; - hasAnsi: boolean; - labels: Labels; - logLevel: LogLevel; - raw: string; - searchWords?: string[]; - timestamp: string; // ISO with nanosec precision - timeFromNow: string; - timeEpochMs: number; - timeLocal: string; - uniqueLabels?: Labels; -} - -export interface LogLabelStatsModel { - active?: boolean; - count: number; - proportion: number; - value: string; -} - -export enum LogsMetaKind { - Number, - String, - LabelsMap, -} - -export interface LogsMetaItem { - label: string; - value: string | number | Labels; - kind: LogsMetaKind; -} - -export interface LogsModel { - hasUniqueLabels: boolean; - meta?: LogsMetaItem[]; - rows: LogRowModel[]; - series?: TimeSeries[]; -} - export enum LogsDedupDescription { none = 'No de-duplication', exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.', numbers = 'De-duplication of successive lines that are identical when ignoring numbers, e.g., IP addresses, latencies.', signature = 'De-duplication of successive lines that have identical punctuation and whitespace.', } - -export enum LogsDedupStrategy { - none = 'none', - exact = 'exact', - numbers = 'numbers', - signature = 'signature', -} - -export interface LogsParser { - /** - * Value-agnostic matcher for a field label. - * Used to filter rows, and first capture group contains the value. - */ - buildMatcher: (label: string) => RegExp; - - /** - * Returns all parsable substrings from a line, used for highlighting - */ - getFields: (line: string) => string[]; - - /** - * Gets the label name from a parsable substring of a line - */ - getLabelFromField: (field: string) => string; - - /** - * Gets the label value from a parsable substring of a line - */ - getValueFromField: (field: string) => string; - /** - * Function to verify if this is a valid parser for the given line. - * The parser accepts the line unless it returns undefined. - */ - test: (line: string) => any; -} - const LOGFMT_REGEXP = /(?:^|\s)(\w+)=("[^"]*"|\S+)/; export const LogsParsers: { [name: string]: LogsParser } = { @@ -444,7 +369,17 @@ export function processLogSeriesRow( const timeEpochMs = time.valueOf(); const timeFromNow = time.fromNow(); const timeLocal = time.format('YYYY-MM-DD HH:mm:ss'); - const logLevel = getLogLevel(message); + + let logLevel = LogLevel.unknown; + const logLevelField = fieldCache.getFieldByName('level'); + + if (logLevelField) { + logLevel = getLogLevelFromKey(row[logLevelField.index]); + } else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) { + logLevel = getLogLevelFromKey(series.labels['level']); + } else { + logLevel = getLogLevel(message); + } const hasAnsi = hasAnsiCodes(message); const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : []; diff --git a/public/app/core/middlewares/application.ts b/public/app/core/middlewares/application.ts new file mode 100644 index 000000000000..3ca9768d626f --- /dev/null +++ b/public/app/core/middlewares/application.ts @@ -0,0 +1,27 @@ +import { Store, Dispatch } from 'redux'; +import { StoreState } from 'app/types/store'; +import { ActionOf } from '../redux/actionCreatorFactory'; +import { toggleLogActions } from '../actions/application'; + +export const toggleLogActionsMiddleware = (store: Store) => (next: Dispatch) => (action: ActionOf) => { + const isLogActionsAction = action.type === toggleLogActions.type; + if (isLogActionsAction) { + return next(action); + } + + const logActionsTrue = + window && window.location && window.location.search && window.location.search.indexOf('logActions=true') !== -1; + const logActionsFalse = + window && window.location && window.location.search && window.location.search.indexOf('logActions=false') !== -1; + const logActions = store.getState().application.logActions; + + if (logActionsTrue && !logActions) { + store.dispatch(toggleLogActions()); + } + + if (logActionsFalse && logActions) { + store.dispatch(toggleLogActions()); + } + + return next(action); +}; diff --git a/public/app/core/reducers/application.ts b/public/app/core/reducers/application.ts new file mode 100644 index 000000000000..458f49316191 --- /dev/null +++ b/public/app/core/reducers/application.ts @@ -0,0 +1,17 @@ +import { ApplicationState } from 'app/types/application'; +import { reducerFactory } from 'app/core/redux'; +import { toggleLogActions } from '../actions/application'; + +export const initialState: ApplicationState = { + logActions: false, +}; + +export const applicationReducer = reducerFactory(initialState) + .addMapper({ + filter: toggleLogActions, + mapper: (state): ApplicationState => ({ + ...state, + logActions: !state.logActions, + }), + }) + .create(); diff --git a/public/app/core/reducers/index.ts b/public/app/core/reducers/index.ts index 1c8670ed0d6c..cc0c950ec4a0 100644 --- a/public/app/core/reducers/index.ts +++ b/public/app/core/reducers/index.ts @@ -1,9 +1,11 @@ import { navIndexReducer as navIndex } from './navModel'; import { locationReducer as location } from './location'; import { appNotificationsReducer as appNotifications } from './appNotification'; +import { applicationReducer as application } from './application'; export default { navIndex, location, appNotifications, + application, }; diff --git a/public/app/core/services/AngularLoader.ts b/public/app/core/services/AngularLoader.ts index 817e9c9f3985..ea4487ca2967 100644 --- a/public/app/core/services/AngularLoader.ts +++ b/public/app/core/services/AngularLoader.ts @@ -2,13 +2,9 @@ import angular from 'angular'; import coreModule from 'app/core/core_module'; import _ from 'lodash'; -export interface AngularComponent { - destroy(): void; - digest(): void; - getScope(): any; -} +import { AngularComponent, AngularLoader } from '@grafana/runtime'; -export class AngularLoader { +export class AngularLoaderClass implements AngularLoader { /** @ngInject */ constructor(private $compile: any, private $rootScope: any) {} @@ -38,15 +34,4 @@ export class AngularLoader { } } -coreModule.service('angularLoader', AngularLoader); - -let angularLoaderInstance: AngularLoader; - -export function setAngularLoader(pl: AngularLoader) { - angularLoaderInstance = pl; -} - -// away to access it from react -export function getAngularLoader(): AngularLoader { - return angularLoaderInstance; -} +coreModule.service('angularLoader', AngularLoaderClass); diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index e14b5f57b288..0f099c93d767 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -7,8 +7,9 @@ import { DashboardModel } from 'app/features/dashboard/state/DashboardModel'; import { DashboardSearchHit } from 'app/types/search'; import { ContextSrv } from './context_srv'; import { FolderInfo, DashboardDTO } from 'app/types'; +import { BackendSrv as BackendService, getBackendSrv as getBackendService, BackendSrvRequest } from '@grafana/runtime'; -export class BackendSrv { +export class BackendSrv implements BackendService { private inFlightRequests: { [key: string]: Array> } = {}; private HTTP_REQUEST_CANCELED = -1; private noBackendCache: boolean; @@ -83,7 +84,7 @@ export class BackendSrv { throw data; } - request(options: any) { + request(options: BackendSrvRequest) { options.retry = options.retry || 0; const requestIsLocal = !options.url.match(/^http/); const firstAttempt = options.retry === 0; @@ -385,16 +386,7 @@ export class BackendSrv { coreModule.service('backendSrv', BackendSrv); -// -// Code below is to expore the service to react components -// - -let singletonInstance: BackendSrv; - -export function setBackendSrv(instance: BackendSrv) { - singletonInstance = instance; -} - +// Used for testing and things that really need BackendSrv export function getBackendSrv(): BackendSrv { - return singletonInstance; + return getBackendService() as BackendSrv; } diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index 6fb50dd4ec29..0a4bd1c028ca 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -43,21 +43,11 @@ export class KeybindingSrv { this.bind('g h', this.goToHome); this.bind('g a', this.openAlerting); this.bind('g p', this.goToProfile); - this.bind('s s', this.openSearchStarred); this.bind('s o', this.openSearch); - this.bind('s t', this.openSearchTags); this.bind('f', this.openSearch); this.bindGlobal('esc', this.exit); } - openSearchStarred() { - appEvents.emit('show-dash-search', { starred: true }); - } - - openSearchTags() { - appEvents.emit('show-dash-search', { tagsMode: true }); - } - openSearch() { appEvents.emit('show-dash-search'); } diff --git a/public/app/core/specs/file_export.test.ts b/public/app/core/specs/file_export.test.ts index 9e2ff0a7ce16..ab254a94f2b4 100644 --- a/public/app/core/specs/file_export.test.ts +++ b/public/app/core/specs/file_export.test.ts @@ -92,6 +92,7 @@ describe('file_export', () => { [0x123, 'some string with \n in the middle', 10.01, false], [0b1011, 'some string with ; in the middle', -12.34, true], [123, 'some string with ;; in the middle', -12.34, true], + [1234, '=a bogus formula ', '-and another', '+another', '@ref'], ], }; @@ -108,7 +109,8 @@ describe('file_export', () => { '501;"some string with "" at the end""";0.01;false\r\n' + '291;"some string with \n in the middle";10.01;false\r\n' + '11;"some string with ; in the middle";-12.34;true\r\n' + - '123;"some string with ;; in the middle";-12.34;true'; + '123;"some string with ;; in the middle";-12.34;true\r\n' + + '1234;"\'=a bogus formula";"\'-and another";"\'+another";"\'@ref"'; expect(returnedText).toBe(expectedText); }); diff --git a/public/app/core/specs/logs_model.test.ts b/public/app/core/specs/logs_model.test.ts index c30a3ebfa391..c83f0ce6c1c0 100644 --- a/public/app/core/specs/logs_model.test.ts +++ b/public/app/core/specs/logs_model.test.ts @@ -1,15 +1,12 @@ +import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui'; import { + dedupLogRows, calculateFieldStats, calculateLogsLabelStats, - dedupLogRows, getParser, - LogsDedupStrategy, - LogsModel, LogsParsers, seriesDataToLogsModel, - LogsMetaKind, } from '../logs_model'; -import { SeriesData, FieldType } from '@grafana/ui'; describe('dedupLogRows()', () => { test('should return rows as is when dedup is set to none', () => { @@ -463,8 +460,12 @@ describe('seriesDataToLogsModel', () => { name: 'message', type: FieldType.string, }, + { + name: 'level', + type: FieldType.string, + }, ], - rows: [['1970-01-01T00:00:01Z', 'WARN boooo']], + rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']], }, ]; const logsModel = seriesDataToLogsModel(series, 0); @@ -473,7 +474,7 @@ describe('seriesDataToLogsModel', () => { { entry: 'WARN boooo', labels: undefined, - logLevel: 'warning', + logLevel: LogLevel.debug, uniqueLabels: {}, }, ]); @@ -485,6 +486,7 @@ describe('seriesDataToLogsModel', () => { labels: { foo: 'bar', baz: '1', + level: 'dbug', }, fields: [ { @@ -503,6 +505,7 @@ describe('seriesDataToLogsModel', () => { labels: { foo: 'bar', baz: '2', + level: 'err', }, fields: [ { @@ -524,19 +527,19 @@ describe('seriesDataToLogsModel', () => { { entry: 'INFO 2', labels: { foo: 'bar', baz: '2' }, - logLevel: 'info', + logLevel: LogLevel.error, uniqueLabels: { baz: '2' }, }, { entry: 'WARN boooo', labels: { foo: 'bar', baz: '1' }, - logLevel: 'warning', + logLevel: LogLevel.debug, uniqueLabels: { baz: '1' }, }, { entry: 'INFO 1', labels: { foo: 'bar', baz: '2' }, - logLevel: 'info', + logLevel: LogLevel.error, uniqueLabels: { baz: '2' }, }, ]); diff --git a/public/app/core/time_series2.ts b/public/app/core/time_series2.ts index 05815ab7ab38..d7a57b77afc9 100644 --- a/public/app/core/time_series2.ts +++ b/public/app/core/time_series2.ts @@ -329,7 +329,7 @@ export default class TimeSeries { isMsResolutionNeeded() { for (let i = 0; i < this.datapoints.length; i++) { - if (this.datapoints[i][1] !== null) { + if (this.datapoints[i][1] !== null && this.datapoints[i][1] !== undefined) { const timestamp = this.datapoints[i][1].toString(); if (timestamp.length === 13 && timestamp % 1000 !== 0) { return true; diff --git a/public/app/core/utils/explore.test.ts b/public/app/core/utils/explore.test.ts index 3a0752d2a5ed..9e11fddd6299 100644 --- a/public/app/core/utils/explore.test.ts +++ b/public/app/core/utils/explore.test.ts @@ -12,8 +12,7 @@ import { } from './explore'; import { ExploreUrlState } from 'app/types/explore'; import store from 'app/core/store'; -import { LogsDedupStrategy } from 'app/core/logs_model'; -import { DataQueryError } from '@grafana/ui'; +import { DataQueryError, LogsDedupStrategy } from '@grafana/ui'; const DEFAULT_EXPLORE_STATE: ExploreUrlState = { datasource: null, diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts index 7a19fd5a822d..4a4697d7d0a2 100644 --- a/public/app/core/utils/explore.ts +++ b/public/app/core/utils/explore.ts @@ -1,40 +1,35 @@ // Libraries import _ from 'lodash'; +import { from } from 'rxjs'; +import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; // Services & Utils import * as dateMath from '@grafana/ui/src/utils/datemath'; import { renderUrl } from 'app/core/utils/url'; import kbn from 'app/core/utils/kbn'; import store from 'app/core/store'; -import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; import { getNextRefIdChar } from './query'; // Types import { - colors, TimeRange, RawTimeRange, TimeZone, IntervalValues, DataQuery, DataSourceApi, - toSeriesData, - guessFieldTypes, TimeFragment, DataQueryError, + LogRowModel, + LogsModel, + LogsDedupStrategy, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, } from '@grafana/ui'; -import TimeSeries from 'app/core/time_series2'; -import { - ExploreUrlState, - HistoryItem, - QueryTransaction, - ResultType, - QueryIntervals, - QueryOptions, - ResultGetter, -} from 'app/types/explore'; -import { LogsDedupStrategy, seriesDataToLogsModel } from 'app/core/logs_model'; -import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; +import { ExploreUrlState, HistoryItem, QueryTransaction, QueryIntervals, QueryOptions } from 'app/types/explore'; +import { config } from '../config'; export const DEFAULT_RANGE = { from: 'now-6h', @@ -112,7 +107,6 @@ export async function getExploreUrl( export function buildQueryTransaction( queries: DataQuery[], - resultType: ResultType, queryOptions: QueryOptions, range: TimeRange, queryIntervals: QueryIntervals, @@ -133,7 +127,7 @@ export function buildQueryTransaction( // Using `format` here because it relates to the view panel that the request is for. // However, some datasources don't use `panelId + query.refId`, but only `panelId`. // Therefore panel id has to be unique. - const panelId = `${queryOptions.format}-${key}`; + const panelId = `${key}`; const options = { interval, @@ -152,7 +146,6 @@ export function buildQueryTransaction( return { queries, options, - resultType, scanning, id: generateKey(), // reusing for unique ID done: false, @@ -324,28 +317,6 @@ export function hasNonEmptyQuery(queries: TQuery ); } -export function calculateResultsFromQueryTransactions(result: any, resultType: ResultType, graphInterval: number) { - const flattenedResult: any[] = _.flatten(result); - const graphResult = resultType === 'Graph' && result ? result : null; - const tableResult = - resultType === 'Table' && result - ? mergeTablesIntoModel( - new TableModel(), - ...flattenedResult.filter((r: any) => r.columns && r.rows).map((r: any) => r as TableModel) - ) - : mergeTablesIntoModel(new TableModel()); - const logsResult = - resultType === 'Logs' && result - ? seriesDataToLogsModel(flattenedResult.map(r => guessFieldTypes(toSeriesData(r))), graphInterval) - : null; - - return { - graphResult, - tableResult, - logsResult, - }; -} - export function getIntervals(range: TimeRange, lowLimit: string, resolution: number): IntervalValues { if (!resolution) { return { interval: '1s', intervalMs: 1000 }; @@ -354,37 +325,6 @@ export function getIntervals(range: TimeRange, lowLimit: string, resolution: num return kbn.calculateInterval(range, resolution, lowLimit); } -export const makeTimeSeriesList: ResultGetter = (dataList, transaction, allTransactions) => { - // Prevent multiple Graph transactions to have the same colors - let colorIndexOffset = 0; - for (const other of allTransactions) { - // Only need to consider transactions that came before the current one - if (other === transaction) { - break; - } - // Count timeseries of previous query results - if (other.resultType === 'Graph' && other.done) { - colorIndexOffset += other.result.length; - } - } - - return dataList.map((seriesData, index: number) => { - const datapoints = seriesData.datapoints || []; - const alias = seriesData.target; - const colorIndex = (colorIndexOffset + index) % colors.length; - const color = colors[colorIndex]; - - const series = new TimeSeries({ - datapoints, - alias, - color, - unit: seriesData.unit, - }); - - return series; - }); -}; - /** * Update the query history. Side-effect: store history in local storage */ @@ -529,3 +469,53 @@ export const getRefIds = (value: any): string[] => { return _.uniq(_.flatten(refIds)); }; + +const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => { + if (a.timeEpochMs < b.timeEpochMs) { + return -1; + } + + if (a.timeEpochMs > b.timeEpochMs) { + return 1; + } + + return 0; +}; + +const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => { + if (a.timeEpochMs > b.timeEpochMs) { + return -1; + } + + if (a.timeEpochMs < b.timeEpochMs) { + return 1; + } + + return 0; +}; + +export const sortLogsResult = (logsResult: LogsModel, refreshInterval: string) => { + const rows = logsResult ? logsResult.rows : []; + const live = isLive(refreshInterval); + live ? rows.sort(sortInAscendingOrder) : rows.sort(sortInDescendingOrder); + const result: LogsModel = logsResult ? { ...logsResult, rows } : { hasUniqueLabels: false, rows }; + + return result; +}; + +export const convertToWebSocketUrl = (url: string) => { + const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://'; + let backend = `${protocol}${window.location.host}${config.appSubUrl}`; + if (backend.endsWith('/')) { + backend = backend.slice(0, backend.length - 1); + } + return `${backend}${url}`; +}; + +export const getQueryResponse = ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver +) => { + return from(datasourceInstance.query(options, observer)); +}; diff --git a/public/app/core/utils/file_export.ts b/public/app/core/utils/file_export.ts index 6d341b5582e2..ae8d0ad06dea 100644 --- a/public/app/core/utils/file_export.ts +++ b/public/app/core/utils/file_export.ts @@ -17,7 +17,11 @@ function csvEscaped(text) { return text; } - return text.split(QUOTE).join(QUOTE + QUOTE); + return text + .split(QUOTE) + .join(QUOTE + QUOTE) + .replace(/^([-+=@])/, "'$1") + .replace(/\s+$/, ''); } const domParser = new DOMParser(); diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index d747fa37f579..1a1cf6f56ba1 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -1,4 +1,4 @@ -import _ from 'lodash'; +import { has } from 'lodash'; import { getValueFormat, getValueFormatterIndex, getValueFormats, stringToJsRegex } from '@grafana/ui'; import deprecationWarning from '@grafana/ui/src/utils/deprecationWarning'; @@ -133,7 +133,7 @@ kbn.secondsToHms = seconds => { }; kbn.secondsToHhmmss = seconds => { - const strings = []; + const strings: string[] = []; const numhours = Math.floor(seconds / 3600); const numminutes = Math.floor((seconds % 3600) / 60); const numseconds = Math.floor((seconds % 3600) % 60); @@ -193,7 +193,7 @@ kbn.calculateInterval = (range, resolution, lowLimitInterval) => { kbn.describe_interval = str => { const matches = str.match(kbn.interval_regex); - if (!matches || !_.has(kbn.intervals_in_seconds, matches[2])) { + if (!matches || !has(kbn.intervals_in_seconds, matches[2])) { throw new Error('Invalid interval string, expecting a number followed by one of "Mwdhmsy"'); } else { return { diff --git a/public/app/features/admin/state/apis.ts b/public/app/features/admin/state/apis.ts index 05321c6e7148..1166fa4dc011 100644 --- a/public/app/features/admin/state/apis.ts +++ b/public/app/features/admin/state/apis.ts @@ -1,4 +1,4 @@ -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; export interface ServerStat { name: string; diff --git a/public/app/features/alerting/AlertTab.tsx b/public/app/features/alerting/AlertTab.tsx index c7d1a8e058d9..2f293010b907 100644 --- a/public/app/features/alerting/AlertTab.tsx +++ b/public/app/features/alerting/AlertTab.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Services & Utils -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import appEvents from 'app/core/app_events'; // Components diff --git a/public/app/features/alerting/StateHistory.tsx b/public/app/features/alerting/StateHistory.tsx index c0c804c8bd1e..2a114ec00d10 100644 --- a/public/app/features/alerting/StateHistory.tsx +++ b/public/app/features/alerting/StateHistory.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import alertDef from './state/alertDef'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { DashboardModel } from '../dashboard/state/DashboardModel'; import appEvents from '../../core/app_events'; diff --git a/public/app/features/alerting/TestRuleResult.tsx b/public/app/features/alerting/TestRuleResult.tsx index e8f0551d7073..509ea1721cbd 100644 --- a/public/app/features/alerting/TestRuleResult.tsx +++ b/public/app/features/alerting/TestRuleResult.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import { JSONFormatter } from 'app/core/components/JSONFormatter/JSONFormatter'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { DashboardModel } from '../dashboard/state/DashboardModel'; import { LoadingPlaceholder } from '@grafana/ui/src'; diff --git a/public/app/features/alerting/state/actions.ts b/public/app/features/alerting/state/actions.ts index 5ec84fe051d4..3ca51d521344 100644 --- a/public/app/features/alerting/state/actions.ts +++ b/public/app/features/alerting/state/actions.ts @@ -1,4 +1,4 @@ -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { AlertRuleDTO, StoreState } from 'app/types'; import { ThunkAction } from 'redux-thunk'; diff --git a/public/app/features/dashboard/components/DashNav/DashNav.tsx b/public/app/features/dashboard/components/DashNav/DashNav.tsx index d366c5f18317..8db88e9ba55e 100644 --- a/public/app/features/dashboard/components/DashNav/DashNav.tsx +++ b/public/app/features/dashboard/components/DashNav/DashNav.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import { connect } from 'react-redux'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { appEvents } from 'app/core/app_events'; import { PlaylistSrv } from 'app/features/playlist/playlist_srv'; @@ -60,17 +60,14 @@ export class DashNav extends PureComponent { } } - onOpenSearch = () => { - const { dashboard } = this.props; - const haveFolder = dashboard.meta.folderId > 0; - appEvents.emit( - 'show-dash-search', - haveFolder - ? { - query: 'folder:current', - } - : null - ); + onDahboardNameClick = () => { + appEvents.emit('show-dash-search'); + }; + + onFolderNameClick = () => { + appEvents.emit('show-dash-search', { + query: 'folder:current', + }); }; onClose = () => { @@ -148,11 +145,20 @@ export class DashNav extends PureComponent { return ( <> {this.isSettings &&  / Settings}
diff --git a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx index a043bc3e0daf..b724b89d9425 100644 --- a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx +++ b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; // Types import { DashboardModel } from '../../state/DashboardModel'; diff --git a/public/app/features/dashboard/components/SubMenu/SubMenu.tsx b/public/app/features/dashboard/components/SubMenu/SubMenu.tsx index bb18481d51a7..6f2a60f624ef 100644 --- a/public/app/features/dashboard/components/SubMenu/SubMenu.tsx +++ b/public/app/features/dashboard/components/SubMenu/SubMenu.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; // Types import { DashboardModel } from '../../state/DashboardModel'; diff --git a/public/app/features/dashboard/dashgrid/DashboardPanel.tsx b/public/app/features/dashboard/dashgrid/DashboardPanel.tsx index e076ee5093cd..72977e7ebc15 100644 --- a/public/app/features/dashboard/dashgrid/DashboardPanel.tsx +++ b/public/app/features/dashboard/dashgrid/DashboardPanel.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import classNames from 'classnames'; // Utils & Services -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { importPanelPlugin } from 'app/features/plugins/plugin_loader'; // Components diff --git a/public/app/features/dashboard/dashgrid/PanelChrome.tsx b/public/app/features/dashboard/dashgrid/PanelChrome.tsx index bbebbbf2f65c..485c409220c6 100644 --- a/public/app/features/dashboard/dashgrid/PanelChrome.tsx +++ b/public/app/features/dashboard/dashgrid/PanelChrome.tsx @@ -253,7 +253,8 @@ export class PanelChrome extends PureComponent { id={panel.id} data={data} timeRange={data.request ? data.request.range : this.timeSrv.timeRange()} - options={panel.getOptions(plugin.defaults)} + options={panel.getOptions()} + transparent={panel.transparent} width={width - theme.panelPadding * 2} height={innerPanelHeight} renderCounter={renderCounter} diff --git a/public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderCorner.tsx b/public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderCorner.tsx index 12f3b95ca21f..9d057d5338ea 100644 --- a/public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderCorner.tsx +++ b/public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderCorner.tsx @@ -48,7 +48,7 @@ export class PanelHeaderCorner extends Component { const remarkableInterpolatedMarkdown = new Remarkable().render(interpolatedMarkdown); return ( -
+
{panel.links && panel.links.length > 0 && (
    @@ -71,7 +71,7 @@ export class PanelHeaderCorner extends Component { renderCornerType(infoMode: InfoMode, content: string | JSX.Element) { const theme = infoMode === InfoMode.Error ? 'error' : 'info'; return ( - +
    @@ -91,7 +91,7 @@ export class PanelHeaderCorner extends Component { return this.renderCornerType(infoMode, this.props.error); } - if (infoMode === InfoMode.Info) { + if (infoMode === InfoMode.Info || infoMode === InfoMode.Links) { return this.renderCornerType(infoMode, this.getInfoContent()); } diff --git a/public/app/features/dashboard/panel_editor/GeneralTab.tsx b/public/app/features/dashboard/panel_editor/GeneralTab.tsx index 01a6e39cedba..ddbbb0d88798 100644 --- a/public/app/features/dashboard/panel_editor/GeneralTab.tsx +++ b/public/app/features/dashboard/panel_editor/GeneralTab.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { EditorTabBody } from './EditorTabBody'; import { PanelModel } from '../state/PanelModel'; diff --git a/public/app/features/dashboard/panel_editor/PanelEditor.tsx b/public/app/features/dashboard/panel_editor/PanelEditor.tsx index 722b211e4ef1..dde5f8440c17 100644 --- a/public/app/features/dashboard/panel_editor/PanelEditor.tsx +++ b/public/app/features/dashboard/panel_editor/PanelEditor.tsx @@ -9,7 +9,7 @@ import { AlertTab } from '../../alerting/AlertTab'; import config from 'app/core/config'; import { store } from 'app/store/store'; import { updateLocation } from 'app/core/actions'; -import { AngularComponent } from 'app/core/services/AngularLoader'; +import { AngularComponent } from '@grafana/runtime'; import { PanelModel } from '../state/PanelModel'; import { DashboardModel } from '../state/DashboardModel'; diff --git a/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx b/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx index 8b5f6b964f24..ca66d84ad784 100644 --- a/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx +++ b/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx @@ -5,7 +5,7 @@ import _ from 'lodash'; // Utils & Services import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { Emitter } from 'app/core/utils/emitter'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; diff --git a/public/app/features/dashboard/panel_editor/VisualizationTab.tsx b/public/app/features/dashboard/panel_editor/VisualizationTab.tsx index eb7bad652424..f67532dd3980 100644 --- a/public/app/features/dashboard/panel_editor/VisualizationTab.tsx +++ b/public/app/features/dashboard/panel_editor/VisualizationTab.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { connectWithStore } from 'app/core/utils/connectWithReduxStore'; import { StoreState } from 'app/types'; import { updateLocation } from 'app/core/actions'; @@ -53,8 +53,8 @@ export class VisualizationTab extends PureComponent { } getReactPanelOptions = () => { - const { panel, plugin } = this.props; - return panel.getOptions(plugin.defaults); + const { panel } = this.props; + return panel.getOptions(); }; renderPanelOptions() { diff --git a/public/app/features/dashboard/state/PanelModel.test.ts b/public/app/features/dashboard/state/PanelModel.test.ts index f4d9e8d1a667..3dcb5f5cb066 100644 --- a/public/app/features/dashboard/state/PanelModel.test.ts +++ b/public/app/features/dashboard/state/PanelModel.test.ts @@ -7,45 +7,70 @@ describe('PanelModel', () => { describe('when creating new panel model', () => { let model; let modelJson; + let persistedOptionsMock; + const defaultOptionsMock = { + fieldOptions: { + thresholds: [ + { + color: '#F2495C', + index: 1, + value: 50, + }, + { + color: '#73BF69', + index: 0, + value: null, + }, + ], + }, + showThresholds: true, + }; beforeEach(() => { + persistedOptionsMock = { + fieldOptions: { + thresholds: [ + { + color: '#F2495C', + index: 1, + value: 50, + }, + { + color: '#73BF69', + index: 0, + value: null, + }, + ], + }, + }; + modelJson = { type: 'table', showColumns: true, targets: [{ refId: 'A' }, { noRefId: true }], - options: { - fieldOptions: { - thresholds: [ - { - color: '#F2495C', - index: 1, - value: 50, - }, - { - color: '#73BF69', - index: 0, - value: null, - }, - ], - }, - }, + options: persistedOptionsMock, }; + model = new PanelModel(modelJson); - model.pluginLoaded( - getPanelPlugin( - { - id: 'table', - }, - null, // react - TablePanelCtrl // angular - ) + const panelPlugin = getPanelPlugin( + { + id: 'table', + }, + null, // react + TablePanelCtrl // angular ); + panelPlugin.setDefaults(defaultOptionsMock); + model.pluginLoaded(panelPlugin); }); it('should apply defaults', () => { expect(model.gridPos.h).toBe(3); }); + it('should apply option defaults', () => { + expect(model.getOptions().showThresholds).toBeTruthy(); + }); + it('should set model props on instance', () => { expect(model.showColumns).toBe(true); }); @@ -89,11 +114,22 @@ describe('PanelModel', () => { }); describe('when changing panel type', () => { + const newPanelPluginDefaults = { + showThresholdLabels: false, + }; + beforeEach(() => { - model.changePlugin(getPanelPlugin({ id: 'graph' })); + const newPlugin = getPanelPlugin({ id: 'graph' }); + newPlugin.setDefaults(newPanelPluginDefaults); + model.changePlugin(newPlugin); model.alert = { id: 2 }; }); + it('should apply next panel option defaults', () => { + expect(model.getOptions().showThresholdLabels).toBeFalsy(); + expect(model.getOptions().showThresholds).toBeUndefined(); + }); + it('should remove table properties but keep core props', () => { expect(model.showColumns).toBe(undefined); }); @@ -153,19 +189,5 @@ describe('PanelModel', () => { expect(panelQueryRunner).toBe(sameQueryRunner); }); }); - - describe('get panel options', () => { - it('should apply defaults', () => { - model.options = { existingProp: 10 }; - const options = model.getOptions({ - defaultProp: true, - existingProp: 0, - }); - - expect(options.defaultProp).toBe(true); - expect(options.existingProp).toBe(10); - expect(model.options).toBe(options); - }); - }); }); }); diff --git a/public/app/features/dashboard/state/PanelModel.ts b/public/app/features/dashboard/state/PanelModel.ts index 156ae6dfa720..044d097e86ad 100644 --- a/public/app/features/dashboard/state/PanelModel.ts +++ b/public/app/features/dashboard/state/PanelModel.ts @@ -157,8 +157,8 @@ export class PanelModel { } } - getOptions(panelDefaults: any) { - return _.defaultsDeep(this.options || {}, panelDefaults); + getOptions() { + return this.options; } updateOptions(options: object) { @@ -179,7 +179,6 @@ export class PanelModel { model[property] = _.cloneDeep(this[property]); } - return model; } @@ -247,9 +246,18 @@ export class PanelModel { }); } + private applyPluginOptionDefaults(plugin: PanelPlugin) { + if (plugin.angularConfigCtrl) { + return; + } + this.options = _.defaultsDeep({}, this.options || {}, plugin.defaults); + } + pluginLoaded(plugin: PanelPlugin) { this.plugin = plugin; + this.applyPluginOptionDefaults(plugin); + if (plugin.panel && plugin.onPanelMigration) { const version = getPluginVersion(plugin); if (version !== this.pluginVersion) { @@ -284,7 +292,7 @@ export class PanelModel { // switch this.type = pluginId; this.plugin = newPlugin; - + this.applyPluginOptionDefaults(newPlugin); // Let panel plugins inspect options from previous panel and keep any that it can use if (newPlugin.onPanelTypeChanged) { this.options = this.options || {}; diff --git a/public/app/features/dashboard/state/PanelQueryRunner.ts b/public/app/features/dashboard/state/PanelQueryRunner.ts index 00f6e3a00f02..b75a538f496e 100644 --- a/public/app/features/dashboard/state/PanelQueryRunner.ts +++ b/public/app/features/dashboard/state/PanelQueryRunner.ts @@ -108,9 +108,6 @@ export class PanelQueryRunner { delayStateNotification, } = options; - // filter out hidden queries & deep clone them - const clonedAndFilteredQueries = cloneDeep(queries.filter(q => !q.hide)); - const request: DataQueryRequest = { requestId: getNextRequestId(), timezone, @@ -120,7 +117,7 @@ export class PanelQueryRunner { timeInfo, interval: '', intervalMs: 0, - targets: clonedAndFilteredQueries, + targets: cloneDeep(queries), maxDataPoints: maxDataPoints || widthPixels, scopedVars: scopedVars || {}, cacheTimeout, @@ -135,6 +132,10 @@ export class PanelQueryRunner { try { const ds = await getDataSource(datasource, request.scopedVars); + if (ds.meta && !ds.meta.hiddenQueries) { + request.targets = request.targets.filter(q => !q.hide); + } + // Attach the datasource name to each query request.targets = request.targets.map(query => { if (!query.datasource) { diff --git a/public/app/features/dashboard/state/actions.ts b/public/app/features/dashboard/state/actions.ts index 50f645095755..7b01975e29d3 100644 --- a/public/app/features/dashboard/state/actions.ts +++ b/public/app/features/dashboard/state/actions.ts @@ -1,5 +1,5 @@ // Services & Utils -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { actionCreatorFactory, noPayloadActionCreatorFactory } from 'app/core/redux'; import { createSuccessNotification } from 'app/core/copy/appNotification'; diff --git a/public/app/features/datasources/NewDataSourcePage.tsx b/public/app/features/datasources/NewDataSourcePage.tsx index 6420225f2afe..bd16ba6e8840 100644 --- a/public/app/features/datasources/NewDataSourcePage.tsx +++ b/public/app/features/datasources/NewDataSourcePage.tsx @@ -6,7 +6,7 @@ import { StoreState } from 'app/types'; import { addDataSource, loadDataSourceTypes, setDataSourceTypeSearchQuery } from './state/actions'; import { getDataSourceTypes } from './state/selectors'; import { FilterInput } from 'app/core/components/FilterInput/FilterInput'; -import { NavModel, DataSourcePluginMeta, List } from '@grafana/ui'; +import { NavModel, DataSourcePluginMeta, List, PluginType } from '@grafana/ui'; export interface Props { navModel: NavModel; @@ -43,6 +43,7 @@ class NewDataSourcePage extends PureComponent { loki: 90, mysql: 80, postgres: 79, + gcloud: -1, }; componentDidMount() { @@ -114,6 +115,8 @@ class NewDataSourcePage extends PureComponent { {} as DataSourceCategories ); + categories['cloud'].push(getGrafanaCloudPhantomPlugin()); + return ( <> {this.categoryInfoList.map(category => ( @@ -174,7 +177,9 @@ interface DataSourceTypeCardProps { } const DataSourceTypeCard: FC = props => { - const { plugin, onClick, onLearnMoreClick } = props; + const { plugin, onLearnMoreClick } = props; + const canSelect = plugin.id !== 'gcloud'; + const onClick = canSelect ? props.onClick : () => {}; // find first plugin info link const learnMoreLink = plugin.info.links && plugin.info.links.length > 0 ? plugin.info.links[0].url : null; @@ -188,16 +193,45 @@ const DataSourceTypeCard: FC = props => {
    {learnMoreLink && ( - - Learn more + + Learn more )} - + {canSelect && }
); }; +function getGrafanaCloudPhantomPlugin(): DataSourcePluginMeta { + return { + id: 'gcloud', + name: 'Grafana Cloud', + type: PluginType.datasource, + module: '', + baseUrl: '', + info: { + description: 'Hosted Graphite, Prometheus and Loki', + logos: { small: 'public/img/grafana_icon.svg', large: 'asd' }, + author: { name: 'Grafana Labs' }, + links: [ + { + url: 'https://grafana.com/cloud', + name: 'Learn more', + }, + ], + screenshots: [], + updated: '2019-05-10', + version: '1.0.0', + }, + }; +} + export function getNavModel(): NavModel { const main = { icon: 'gicon gicon-add-datasources', diff --git a/public/app/features/datasources/settings/DataSourceSettingsPage.test.tsx b/public/app/features/datasources/settings/DataSourceSettingsPage.test.tsx index 6f9939e882b9..af3b4d9e786f 100644 --- a/public/app/features/datasources/settings/DataSourceSettingsPage.test.tsx +++ b/public/app/features/datasources/settings/DataSourceSettingsPage.test.tsx @@ -8,6 +8,12 @@ import { setDataSourceName, setIsDefault } from '../state/actions'; const pluginMock = new DataSourcePlugin({} as DataSourceConstructor); +jest.mock('app/features/plugins/plugin_loader', () => { + return { + importDataSourcePlugin: () => Promise.resolve(pluginMock), + }; +}); + const setup = (propOverrides?: object) => { const props: Props = { navModel: {} as NavModel, diff --git a/public/app/features/datasources/settings/DataSourceSettingsPage.tsx b/public/app/features/datasources/settings/DataSourceSettingsPage.tsx index 5c31b946149c..30d4d6ea38ca 100644 --- a/public/app/features/datasources/settings/DataSourceSettingsPage.tsx +++ b/public/app/features/datasources/settings/DataSourceSettingsPage.tsx @@ -276,7 +276,7 @@ export class DataSourceSettingsPage extends PureComponent {
{testingMessage && ( -
+
{testingStatus === 'error' ? ( @@ -285,7 +285,9 @@ export class DataSourceSettingsPage extends PureComponent { )}
-
{testingMessage}
+
+ {testingMessage} +
)} diff --git a/public/app/features/datasources/settings/PluginSettings.tsx b/public/app/features/datasources/settings/PluginSettings.tsx index a7462cbb45c7..58da3cc55f49 100644 --- a/public/app/features/datasources/settings/PluginSettings.tsx +++ b/public/app/features/datasources/settings/PluginSettings.tsx @@ -8,7 +8,7 @@ import { DataQuery, DataSourceJsonData, } from '@grafana/ui'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; export type GenericDataSourcePlugin = DataSourcePlugin>; diff --git a/public/app/features/datasources/state/actions.ts b/public/app/features/datasources/state/actions.ts index a09289500693..9fb003bc0c4c 100644 --- a/public/app/features/datasources/state/actions.ts +++ b/public/app/features/datasources/state/actions.ts @@ -1,6 +1,6 @@ import { ThunkAction } from 'redux-thunk'; import config from '../../../core/config'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { LayoutMode } from 'app/core/components/LayoutSelector/LayoutSelector'; import { updateLocation, updateNavIndex, UpdateNavIndexAction } from 'app/core/actions'; diff --git a/public/app/features/explore/ElapsedTime.tsx b/public/app/features/explore/ElapsedTime.tsx index a2d941515cd6..7f649c497035 100644 --- a/public/app/features/explore/ElapsedTime.tsx +++ b/public/app/features/explore/ElapsedTime.tsx @@ -1,8 +1,20 @@ import React, { PureComponent } from 'react'; +import { toDuration } from '@grafana/ui/src/utils/moment_wrapper'; const INTERVAL = 150; -export default class ElapsedTime extends PureComponent { +export interface Props { + time?: number; + renderCount?: number; + className?: string; + humanize?: boolean; +} + +export interface State { + elapsed: number; +} + +export default class ElapsedTime extends PureComponent { offset: number; timer: number; @@ -21,12 +33,17 @@ export default class ElapsedTime extends PureComponent { this.setState({ elapsed }); }; - componentWillReceiveProps(nextProps) { + componentWillReceiveProps(nextProps: Props) { if (nextProps.time) { clearInterval(this.timer); } else if (this.props.time) { this.start(); } + + if (nextProps.renderCount) { + clearInterval(this.timer); + this.start(); + } } componentDidMount() { @@ -39,8 +56,16 @@ export default class ElapsedTime extends PureComponent { render() { const { elapsed } = this.state; - const { className, time } = this.props; + const { className, time, humanize } = this.props; const value = (time || elapsed) / 1000; - return {value.toFixed(1)}s; + let displayValue = `${value.toFixed(1)}s`; + if (humanize) { + const duration = toDuration(elapsed); + const hours = duration.hours(); + const minutes = duration.minutes(); + const seconds = duration.seconds(); + displayValue = hours ? `${hours}h ${minutes}m ${seconds}s` : minutes ? ` ${minutes}m ${seconds}s` : `${seconds}s`; + } + return {displayValue}; } } diff --git a/public/app/features/explore/Explore.tsx b/public/app/features/explore/Explore.tsx index 21e047399cd6..8028e8362d79 100644 --- a/public/app/features/explore/Explore.tsx +++ b/public/app/features/explore/Explore.tsx @@ -51,11 +51,11 @@ import { } from 'app/core/utils/explore'; import { Emitter } from 'app/core/utils/emitter'; import { ExploreToolbar } from './ExploreToolbar'; -import { scanStopAction } from './state/actionTypes'; import { NoDataSourceCallToAction } from './NoDataSourceCallToAction'; import { FadeIn } from 'app/core/components/Animations/FadeIn'; import { getTimeZone } from '../profile/state/selectors'; import { ErrorContainer } from './ErrorContainer'; +import { scanStopAction } from './state/actionTypes'; interface ExploreProps { StartPage?: ComponentClass; @@ -87,6 +87,7 @@ interface ExploreProps { initialUI: ExploreUIState; queryErrors: DataQueryError[]; mode: ExploreMode; + isLive: boolean; } /** @@ -315,6 +316,7 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps) { update, queryErrors, mode, + isLive, } = item; const { datasource, queries, range: urlRange, ui } = (urlState || {}) as ExploreUrlState; @@ -340,6 +342,7 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps) { initialUI, queryErrors, mode, + isLive, }; } diff --git a/public/app/features/explore/ExploreToolbar.tsx b/public/app/features/explore/ExploreToolbar.tsx index f37a2e391ce4..9d3cb9841208 100644 --- a/public/app/features/explore/ExploreToolbar.tsx +++ b/public/app/features/explore/ExploreToolbar.tsx @@ -10,6 +10,7 @@ import { TimeZone, TimeRange, SelectOptionItem, + LoadingState, } from '@grafana/ui'; import { DataSourcePicker } from 'app/core/components/Select/DataSourcePicker'; import { StoreState } from 'app/types/store'; @@ -39,15 +40,20 @@ const createResponsiveButton = (options: { buttonClassName?: string; iconClassName?: string; iconSide?: IconSide; + disabled?: boolean; }) => { const defaultOptions = { iconSide: IconSide.left, }; const props = { ...options, defaultOptions }; - const { title, onClick, buttonClassName, iconClassName, splitted, iconSide } = props; + const { title, onClick, buttonClassName, iconClassName, splitted, iconSide, disabled } = props; return ( -
) : null}
- - - + {!isLive && ( + + + + )} {refreshInterval && }
@@ -227,7 +241,8 @@ export class UnConnectedExploreToolbar extends PureComponent { title: 'Run Query', onClick: this.onRunQuery, buttonClassName: 'navbar-button--secondary', - iconClassName: loading ? 'fa fa-spinner fa-fw fa-spin run-icon' : 'fa fa-level-down fa-fw run-icon', + iconClassName: + loading && !isLive ? 'fa fa-spinner fa-fw fa-spin run-icon' : 'fa fa-level-down fa-fw run-icon', iconSide: IconSide.right, })}
@@ -247,16 +262,17 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps exploreDatasources, range, refreshInterval, - graphIsLoading, - logIsLoading, - tableIsLoading, + loadingState, supportedModes, mode, + isLive, } = exploreItem; const selectedDatasource = datasourceInstance ? exploreDatasources.find(datasource => datasource.name === datasourceInstance.name) : undefined; - const loading = graphIsLoading || logIsLoading || tableIsLoading; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; + const hasLiveOption = + datasourceInstance && datasourceInstance.meta && datasourceInstance.meta.streaming ? true : false; const supportedModeOptions: Array> = []; let selectedModeOption = null; @@ -296,6 +312,8 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps refreshInterval, supportedModeOptions, selectedModeOption, + hasLiveOption, + isLive, }; }; diff --git a/public/app/features/explore/GraphContainer.tsx b/public/app/features/explore/GraphContainer.tsx index 7033473a33b3..6d1bb6c4e387 100644 --- a/public/app/features/explore/GraphContainer.tsx +++ b/public/app/features/explore/GraphContainer.tsx @@ -1,7 +1,7 @@ import React, { PureComponent } from 'react'; import { hot } from 'react-hot-loader'; import { connect } from 'react-redux'; -import { TimeRange, TimeZone, AbsoluteTimeRange } from '@grafana/ui'; +import { TimeRange, TimeZone, AbsoluteTimeRange, LoadingState } from '@grafana/ui'; import { ExploreId, ExploreItemState } from 'app/types/explore'; import { StoreState } from 'app/types'; @@ -46,22 +46,20 @@ export class GraphContainer extends PureComponent { const graphHeight = showingGraph && showingTable ? 200 : 400; const timeRange = { from: range.from.valueOf(), to: range.to.valueOf() }; - if (!graphResult) { - return null; - } - return ( - - + + {graphResult && ( + + )} ); } @@ -71,8 +69,8 @@ function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const { split } = explore; const item: ExploreItemState = explore[exploreId]; - const { graphResult, graphIsLoading, range, showingGraph, showingTable } = item; - const loading = graphIsLoading; + const { graphResult, loadingState, range, showingGraph, showingTable } = item; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; return { graphResult, loading, range, showingGraph, showingTable, split, timeZone: getTimeZone(state.user) }; } diff --git a/public/app/features/explore/LiveLogs.tsx b/public/app/features/explore/LiveLogs.tsx new file mode 100644 index 000000000000..040beae2aec8 --- /dev/null +++ b/public/app/features/explore/LiveLogs.tsx @@ -0,0 +1,125 @@ +import React, { PureComponent } from 'react'; +import { css, cx } from 'emotion'; +import { + Themeable, + withTheme, + GrafanaTheme, + selectThemeVariant, + LinkButton, + LogsModel, + LogRowModel, +} from '@grafana/ui'; + +import ElapsedTime from './ElapsedTime'; +import { ButtonSize, ButtonVariant } from '@grafana/ui/src/components/Button/AbstractButton'; + +const getStyles = (theme: GrafanaTheme) => ({ + logsRowsLive: css` + label: logs-rows-live; + display: flex; + flex-flow: column nowrap; + height: 65vh; + overflow-y: auto; + :first-child { + margin-top: auto !important; + } + `, + logsRowFresh: css` + label: logs-row-fresh; + color: ${theme.colors.text}; + background-color: ${selectThemeVariant({ light: theme.colors.gray6, dark: theme.colors.gray1 }, theme.type)}; + `, + logsRowOld: css` + label: logs-row-old; + opacity: 0.8; + `, + logsRowsIndicator: css` + font-size: ${theme.typography.size.md}; + padding: ${theme.spacing.sm} 0; + display: flex; + align-items: center; + `, +}); + +export interface Props extends Themeable { + logsResult?: LogsModel; + stopLive: () => void; +} + +export interface State { + renderCount: number; +} + +class LiveLogs extends PureComponent { + private liveEndDiv: HTMLDivElement = null; + + constructor(props: Props) { + super(props); + this.state = { renderCount: 0 }; + } + + componentDidUpdate(prevProps: Props) { + const prevRows: LogRowModel[] = prevProps.logsResult ? prevProps.logsResult.rows : []; + const rows: LogRowModel[] = this.props.logsResult ? this.props.logsResult.rows : []; + + if (prevRows !== rows) { + this.setState({ + renderCount: this.state.renderCount + 1, + }); + } + + if (this.liveEndDiv) { + this.liveEndDiv.scrollIntoView(false); + } + } + + render() { + const { theme } = this.props; + const { renderCount } = this.state; + const styles = getStyles(theme); + const rowsToRender: LogRowModel[] = this.props.logsResult ? this.props.logsResult.rows : []; + + return ( + <> +
+ {rowsToRender.map((row: any, index) => { + return ( +
+
+ {row.timeLocal} +
+
{row.entry}
+
+ ); + })} +
{ + this.liveEndDiv = element; + if (this.liveEndDiv) { + this.liveEndDiv.scrollIntoView(false); + } + }} + /> +
+
+ + Last line received: ago + + + Stop Live + +
+ + ); + } +} + +export const LiveLogsWithTheme = withTheme(LiveLogs); diff --git a/public/app/features/explore/LogLabel.tsx b/public/app/features/explore/LogLabel.tsx index b4570f10c826..1794d60b6891 100644 --- a/public/app/features/explore/LogLabel.tsx +++ b/public/app/features/explore/LogLabel.tsx @@ -1,7 +1,8 @@ import React, { PureComponent } from 'react'; -import { calculateLogsLabelStats, LogLabelStatsModel, LogRowModel } from 'app/core/logs_model'; import { LogLabelStats } from './LogLabelStats'; +import { LogRowModel, LogLabelStatsModel } from '@grafana/ui'; +import { calculateLogsLabelStats } from 'app/core/logs_model'; interface Props { getRows?: () => LogRowModel[]; diff --git a/public/app/features/explore/LogLabelStats.tsx b/public/app/features/explore/LogLabelStats.tsx index 466cc050e43b..07f33d8aff71 100644 --- a/public/app/features/explore/LogLabelStats.tsx +++ b/public/app/features/explore/LogLabelStats.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import classnames from 'classnames'; -import { LogLabelStatsModel } from 'app/core/logs_model'; +import { LogLabelStatsModel } from '@grafana/ui'; function LogLabelStatsRow(logLabelStatsModel: LogLabelStatsModel) { const { active, count, proportion, value } = logLabelStatsModel; diff --git a/public/app/features/explore/LogLabels.tsx b/public/app/features/explore/LogLabels.tsx index f89836055c53..9dffca38a66a 100644 --- a/public/app/features/explore/LogLabels.tsx +++ b/public/app/features/explore/LogLabels.tsx @@ -1,8 +1,7 @@ import React, { PureComponent } from 'react'; -import { LogRowModel } from 'app/core/logs_model'; import { LogLabel } from './LogLabel'; -import { Labels } from '@grafana/ui'; +import { Labels, LogRowModel } from '@grafana/ui'; interface Props { getRows?: () => LogRowModel[]; diff --git a/public/app/features/explore/LogRow.tsx b/public/app/features/explore/LogRow.tsx index 8e3de04749b8..ad2aced592f4 100644 --- a/public/app/features/explore/LogRow.tsx +++ b/public/app/features/explore/LogRow.tsx @@ -3,11 +3,29 @@ import _ from 'lodash'; import Highlighter from 'react-highlight-words'; import classnames from 'classnames'; -import { LogRowModel, LogLabelStatsModel, LogsParser, calculateFieldStats, getParser } from 'app/core/logs_model'; +import { calculateFieldStats, getParser } from 'app/core/logs_model'; import { LogLabels } from './LogLabels'; import { findHighlightChunksInText } from 'app/core/utils/text'; import { LogLabelStats } from './LogLabelStats'; import { LogMessageAnsi } from './LogMessageAnsi'; +import { css, cx } from 'emotion'; +import { + LogRowContextProvider, + LogRowContextRows, + HasMoreContextRows, + LogRowContextQueryErrors, +} from './LogRowContextProvider'; +import { + ThemeContext, + selectThemeVariant, + GrafanaTheme, + DataQueryResponse, + LogRowModel, + LogLabelStatsModel, + LogsParser, +} from '@grafana/ui'; +import { LogRowContext } from './LogRowContext'; +import tinycolor from 'tinycolor2'; interface Props { highlighterExpressions?: string[]; @@ -18,6 +36,9 @@ interface Props { showUtc: boolean; getRows: () => LogRowModel[]; onClickLabel?: (label: string, value: string) => void; + onContextClick?: () => void; + getRowContext?: (row: LogRowModel, options?: any) => Promise; + className?: string; } interface State { @@ -29,6 +50,7 @@ interface State { parser?: LogsParser; parsedFieldHighlights: string[]; showFieldStats: boolean; + showContext: boolean; } /** @@ -44,6 +66,32 @@ const FieldHighlight = onClick => props => { ); }; +const logRowStyles = css` + position: relative; + /* z-index: 0; */ + /* outline: none; */ +`; + +const getLogRowWithContextStyles = (theme: GrafanaTheme, state: State) => { + const outlineColor = selectThemeVariant( + { + light: theme.colors.white, + dark: theme.colors.black, + }, + theme.type + ); + + return { + row: css` + z-index: 1; + outline: 9999px solid + ${tinycolor(outlineColor) + .setAlpha(0.7) + .toRgbString()}; + `, + }; +}; + /** * Renders a log line. * @@ -63,6 +111,7 @@ export class LogRow extends PureComponent { parser: undefined, parsedFieldHighlights: [], showFieldStats: false, + showContext: false, }; componentWillUnmount() { @@ -89,11 +138,21 @@ export class LogRow extends PureComponent { }; onMouseOverMessage = () => { + if (this.state.showContext || this.isTextSelected()) { + // When showing context we don't want to the LogRow rerender as it will mess up state of context block + // making the "after" context to be scrolled to the top, what is desired only on open + // The log row message needs to be refactored to separate component that encapsulates parsing and parsed message state + return; + } // Don't parse right away, user might move along this.mouseMessageTimer = setTimeout(this.parseMessage, 500); }; onMouseOutMessage = () => { + if (this.state.showContext) { + // See comment in onMouseOverMessage method + return; + } clearTimeout(this.mouseMessageTimer); this.setState({ parsed: false }); }; @@ -110,7 +169,39 @@ export class LogRow extends PureComponent { } }; - render() { + isTextSelected() { + if (!window.getSelection) { + return false; + } + + const selection = window.getSelection(); + + if (!selection) { + return false; + } + + return selection.anchorNode !== null && selection.isCollapsed === false; + } + + toggleContext = () => { + this.setState(state => { + return { + showContext: !state.showContext, + }; + }); + }; + + onContextToggle = (e: React.SyntheticEvent) => { + e.stopPropagation(); + this.toggleContext(); + }; + + renderLogRow( + context?: LogRowContextRows, + errors?: LogRowContextQueryErrors, + hasMoreContextRows?: HasMoreContextRows, + updateLimit?: () => void + ) { const { getRows, highlighterExpressions, @@ -129,6 +220,7 @@ export class LogRow extends PureComponent { parsed, parsedFieldHighlights, showFieldStats, + showContext, } = this.state; const { entry, hasAnsi, raw } = row; const previewHighlights = highlighterExpressions && !_.isEqual(highlighterExpressions, row.searchWords); @@ -139,59 +231,131 @@ export class LogRow extends PureComponent { }); return ( -
- {showDuplicates && ( -
{row.duplicates > 0 ? `${row.duplicates + 1}x` : null}
- )} -
- {showUtc && ( -
- {row.timestamp} -
- )} - {showLocalTime && ( -
- {row.timeLocal} -
- )} - {showLabels && ( -
- -
- )} -
- {parsed && ( - - )} - {!parsed && needsHighlighter && ( - - )} - {hasAnsi && !parsed && !needsHighlighter && } - {!hasAnsi && !parsed && !needsHighlighter && entry} - {showFieldStats && ( -
- + + {theme => { + const styles = this.state.showContext + ? cx(logRowStyles, getLogRowWithContextStyles(theme, this.state).row) + : logRowStyles; + return ( +
+ {showDuplicates && ( +
{row.duplicates > 0 ? `${row.duplicates + 1}x` : null}
+ )} +
+ {showUtc && ( +
+ {row.timestamp} +
+ )} + {showLocalTime && ( +
+ {row.timeLocal} +
+ )} + {showLabels && ( +
+ +
+ )} +
+
+ {showContext && context && ( + { + if (updateLimit) { + updateLimit(); + } + }} + /> + )} + + {parsed && ( + + )} + {!parsed && needsHighlighter && ( + + )} + {hasAnsi && !parsed && !needsHighlighter && } + {!hasAnsi && !parsed && !needsHighlighter && entry} + {showFieldStats && ( +
+ +
+ )} +
+ {row.searchWords && row.searchWords.length > 0 && ( + + {showContext ? 'Hide' : 'Show'} context + + )} +
+
- )} -
-
+ ); + }} + ); } + + render() { + const { showContext } = this.state; + + if (showContext) { + return ( + <> + + {({ result, errors, hasMoreContextRows, updateLimit }) => { + return <>{this.renderLogRow(result, errors, hasMoreContextRows, updateLimit)}; + }} + + + ); + } + + return this.renderLogRow(); + } } diff --git a/public/app/features/explore/LogRowContext.tsx b/public/app/features/explore/LogRowContext.tsx new file mode 100644 index 000000000000..da9c3ec48123 --- /dev/null +++ b/public/app/features/explore/LogRowContext.tsx @@ -0,0 +1,239 @@ +import React, { useContext, useRef, useState, useLayoutEffect } from 'react'; +import { + ThemeContext, + List, + GrafanaTheme, + selectThemeVariant, + ClickOutsideWrapper, + CustomScrollbar, + DataQueryError, + LogRowModel, +} from '@grafana/ui'; +import { css, cx } from 'emotion'; +import { LogRowContextRows, HasMoreContextRows, LogRowContextQueryErrors } from './LogRowContextProvider'; +import { Alert } from './Error'; + +interface LogRowContextProps { + row: LogRowModel; + context: LogRowContextRows; + errors?: LogRowContextQueryErrors; + hasMoreContextRows: HasMoreContextRows; + onOutsideClick: () => void; + onLoadMoreContext: () => void; +} + +const getLogRowContextStyles = (theme: GrafanaTheme) => { + const gradientTop = selectThemeVariant( + { + light: theme.colors.white, + dark: theme.colors.dark1, + }, + theme.type + ); + const gradientBottom = selectThemeVariant( + { + light: theme.colors.gray7, + dark: theme.colors.dark2, + }, + theme.type + ); + + const boxShadowColor = selectThemeVariant( + { + light: theme.colors.gray5, + dark: theme.colors.black, + }, + theme.type + ); + const borderColor = selectThemeVariant( + { + light: theme.colors.gray5, + dark: theme.colors.dark9, + }, + theme.type + ); + + return { + commonStyles: css` + position: absolute; + width: calc(100% + 20px); + left: -10px; + height: 250px; + z-index: 2; + overflow: hidden; + background: ${theme.colors.pageBg}; + background: linear-gradient(180deg, ${gradientTop} 0%, ${gradientBottom} 104.25%); + box-shadow: 0px 2px 4px ${boxShadowColor}, 0px 0px 2px ${boxShadowColor}; + border: 1px solid ${borderColor}; + border-radius: ${theme.border.radius.md}; + `, + header: css` + height: 30px; + padding: 0 10px; + display: flex; + align-items: center; + background: ${borderColor}; + `, + logs: css` + height: 220px; + padding: 10px; + `, + }; +}; + +interface LogRowContextGroupHeaderProps { + row: LogRowModel; + rows: Array; + onLoadMoreContext: () => void; + shouldScrollToBottom?: boolean; + canLoadMoreRows?: boolean; +} +interface LogRowContextGroupProps extends LogRowContextGroupHeaderProps { + rows: Array; + className: string; + error?: string; +} + +const LogRowContextGroupHeader: React.FunctionComponent = ({ + row, + rows, + onLoadMoreContext, + canLoadMoreRows, +}) => { + const theme = useContext(ThemeContext); + const { header } = getLogRowContextStyles(theme); + + // Filtering out the original row from the context. + // Loki requires a rowTimestamp+1ns for the following logs to be queried. + // We don't to ns-precision calculations in Loki log row context retrieval, hence the filtering here + // Also see: https://github.com/grafana/loki/issues/597 + const logRowsToRender = rows.filter(contextRow => contextRow !== row.raw); + + return ( +
+ + Found {logRowsToRender.length} rows. + + {(rows.length >= 10 || (rows.length > 10 && rows.length % 10 !== 0)) && canLoadMoreRows && ( + onLoadMoreContext()} + > + Load 10 more + + )} +
+ ); +}; + +const LogRowContextGroup: React.FunctionComponent = ({ + row, + rows, + error, + className, + shouldScrollToBottom, + canLoadMoreRows, + onLoadMoreContext, +}) => { + const theme = useContext(ThemeContext); + const { commonStyles, logs } = getLogRowContextStyles(theme); + const [scrollTop, setScrollTop] = useState(0); + const listContainerRef = useRef(); + + useLayoutEffect(() => { + if (shouldScrollToBottom && listContainerRef.current) { + setScrollTop(listContainerRef.current.offsetHeight); + } + }); + + const headerProps = { + row, + rows, + onLoadMoreContext, + canLoadMoreRows, + }; + + return ( +
+ {/* When displaying "after" context */} + {shouldScrollToBottom && !error && } +
+ +
+ {!error && ( + { + return ( +
+ {item} +
+ ); + }} + /> + )} + {error && } +
+
+
+ {/* When displaying "before" context */} + {!shouldScrollToBottom && !error && } +
+ ); +}; + +export const LogRowContext: React.FunctionComponent = ({ + row, + context, + errors, + onOutsideClick, + onLoadMoreContext, + hasMoreContextRows, +}) => { + return ( + +
+ {context.after && ( + + )} + + {context.before && ( + + )} +
+
+ ); +}; diff --git a/public/app/features/explore/LogRowContextProvider.tsx b/public/app/features/explore/LogRowContextProvider.tsx new file mode 100644 index 000000000000..a43d982e2b48 --- /dev/null +++ b/public/app/features/explore/LogRowContextProvider.tsx @@ -0,0 +1,115 @@ +import { DataQueryResponse, DataQueryError, LogRowModel } from '@grafana/ui'; +import { useState, useEffect } from 'react'; +import flatten from 'lodash/flatten'; +import useAsync from 'react-use/lib/useAsync'; + +export interface LogRowContextRows { + before?: string[]; + after?: string[]; +} +export interface LogRowContextQueryErrors { + before?: string; + after?: string; +} + +export interface HasMoreContextRows { + before: boolean; + after: boolean; +} + +interface LogRowContextProviderProps { + row: LogRowModel; + getRowContext: (row: LogRowModel, options?: any) => Promise; + children: (props: { + result: LogRowContextRows; + errors: LogRowContextQueryErrors; + hasMoreContextRows: HasMoreContextRows; + updateLimit: () => void; + }) => JSX.Element; +} + +export const LogRowContextProvider: React.FunctionComponent = ({ + getRowContext, + row, + children, +}) => { + const [limit, setLimit] = useState(10); + const [result, setResult] = useState<{ + data: string[][]; + errors: string[]; + }>(null); + const [hasMoreContextRows, setHasMoreContextRows] = useState({ + before: true, + after: true, + }); + + const { value } = useAsync(async () => { + const promises = [ + getRowContext(row, { + limit, + }), + getRowContext(row, { + limit, + direction: 'FORWARD', + }), + ]; + + const results: Array = await Promise.all(promises.map(p => p.catch(e => e))); + + return { + data: results.map(result => { + if ((result as DataQueryResponse).data) { + return (result as DataQueryResponse).data.map(series => { + return series.rows.map(row => row[1]); + }); + } else { + return []; + } + }), + errors: results.map(result => { + if ((result as DataQueryError).message) { + return (result as DataQueryError).message; + } else { + return null; + } + }), + }; + }, [limit]); + + useEffect(() => { + if (value) { + setResult(currentResult => { + let hasMoreLogsBefore = true, + hasMoreLogsAfter = true; + + if (currentResult && currentResult.data[0].length === value.data[0].length) { + hasMoreLogsBefore = false; + } + + if (currentResult && currentResult.data[1].length === value.data[1].length) { + hasMoreLogsAfter = false; + } + + setHasMoreContextRows({ + before: hasMoreLogsBefore, + after: hasMoreLogsAfter, + }); + + return value; + }); + } + }, [value]); + + return children({ + result: { + before: result ? flatten(result.data[0]) : [], + after: result ? flatten(result.data[1]) : [], + }, + errors: { + before: result ? result.errors[0] : null, + after: result ? result.errors[1] : null, + }, + hasMoreContextRows, + updateLimit: () => setLimit(limit + 10), + }); +}; diff --git a/public/app/features/explore/Logs.tsx b/public/app/features/explore/Logs.tsx index 6603ac0330dc..b6ee5a7f7312 100644 --- a/public/app/features/explore/Logs.tsx +++ b/public/app/features/explore/Logs.tsx @@ -2,16 +2,26 @@ import _ from 'lodash'; import React, { PureComponent } from 'react'; import * as rangeUtil from '@grafana/ui/src/utils/rangeutil'; -import { RawTimeRange, Switch, LogLevel, TimeZone, TimeRange, AbsoluteTimeRange } from '@grafana/ui'; +import { + RawTimeRange, + Switch, + LogLevel, + TimeZone, + TimeRange, + AbsoluteTimeRange, + LogsMetaKind, + LogsModel, + LogsDedupStrategy, + LogRowModel, +} from '@grafana/ui'; import TimeSeries from 'app/core/time_series2'; -import { LogsDedupDescription, LogsDedupStrategy, LogsModel, LogsMetaKind } from 'app/core/logs_model'; - import ToggleButtonGroup, { ToggleButton } from 'app/core/components/ToggleButtonGroup/ToggleButtonGroup'; import Graph from './Graph'; import { LogLabels } from './LogLabels'; import { LogRow } from './LogRow'; +import { LogsDedupDescription } from 'app/core/logs_model'; const PREVIEW_LIMIT = 100; @@ -60,6 +70,7 @@ interface Props { onStopScanning?: () => void; onDedupStrategyChange: (dedupStrategy: LogsDedupStrategy) => void; onToggleLogLevel: (hiddenLogLevels: Set) => void; + getRowContext?: (row: LogRowModel, options?: any) => Promise; } interface State { @@ -174,7 +185,7 @@ export default class Logs extends PureComponent { const hasLabel = hasData && dedupedData.hasUniqueLabels; const dedupCount = dedupedData.rows.reduce((sum, row) => sum + row.duplicates, 0); const showDuplicates = dedupStrategy !== LogsDedupStrategy.none && dedupCount > 0; - const meta = [...data.meta]; + const meta = data.meta ? [...data.meta] : []; if (dedupStrategy !== LogsDedupStrategy.none) { meta.push({ @@ -192,7 +203,9 @@ export default class Logs extends PureComponent { // React profiler becomes unusable if we pass all rows to all rows and their labels, using getter instead const getRows = () => processedRows; - const timeSeries = data.series.map(series => new TimeSeries(series)); + const timeSeries = data.series + ? data.series.map(series => new TimeSeries(series)) + : [new TimeSeries({ datapoints: [] })]; const absRange = { from: range.from.valueOf(), to: range.to.valueOf(), @@ -252,6 +265,7 @@ export default class Logs extends PureComponent { { ; width: number; changeTime: typeof changeTime; + isLive: boolean; + stopLive: typeof changeRefreshIntervalAction; } export class LogsContainer extends PureComponent { @@ -48,8 +64,9 @@ export class LogsContainer extends PureComponent { changeTime(exploreId, range); }; - onClickLogsButton = () => { - this.props.toggleLogs(this.props.exploreId, this.props.showingLogs); + onStopLive = () => { + const { exploreId } = this.props; + this.props.stopLive({ exploreId, refreshInterval: offOption.value }); }; handleDedupStrategyChange = (dedupStrategy: LogsDedupStrategy) => { @@ -64,6 +81,16 @@ export class LogsContainer extends PureComponent { }); }; + getLogRowContext = async (row: LogRowModel, options?: any) => { + const { datasourceInstance } = this.props; + + if (datasourceInstance) { + return datasourceInstance.getLogRowContext(row, options); + } + + return []; + }; + render() { const { exploreId, @@ -76,15 +103,23 @@ export class LogsContainer extends PureComponent { onStopScanning, range, timeZone, - showingLogs, scanning, scanRange, width, hiddenLogLevels, + isLive, } = this.props; + if (isLive) { + return ( + + + + ); + } + return ( - + { scanRange={scanRange} width={width} hiddenLogLevels={hiddenLogLevels} + getRowContext={this.getLogRowContext} /> ); @@ -113,9 +149,18 @@ export class LogsContainer extends PureComponent { function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const item: ExploreItemState = explore[exploreId]; - const { logsHighlighterExpressions, logsResult, logIsLoading, scanning, scanRange, range } = item; - const loading = logIsLoading; - const { showingLogs, dedupStrategy } = exploreItemUIStateSelector(item); + const { + logsHighlighterExpressions, + logsResult, + loadingState, + scanning, + scanRange, + range, + datasourceInstance, + isLive, + } = item; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; + const { dedupStrategy } = exploreItemUIStateSelector(item); const hiddenLogLevels = new Set(item.hiddenLogLevels); const dedupedResult = deduplicatedLogsSelector(item); const timeZone = getTimeZone(state.user); @@ -126,20 +171,21 @@ function mapStateToProps(state: StoreState, { exploreId }) { logsResult, scanning, scanRange, - showingLogs, range, timeZone, dedupStrategy, hiddenLogLevels, dedupedResult, + datasourceInstance, + isLive, }; } const mapDispatchToProps = { - toggleLogs, changeDedupStrategy, toggleLogLevelAction, changeTime, + stopLive: changeRefreshIntervalAction, }; export default hot(module)( diff --git a/public/app/features/explore/Panel.tsx b/public/app/features/explore/Panel.tsx index dc75cb0ecca2..841a19dba2a6 100644 --- a/public/app/features/explore/Panel.tsx +++ b/public/app/features/explore/Panel.tsx @@ -4,18 +4,27 @@ interface Props { isOpen: boolean; label: string; loading?: boolean; - onToggle: (isOpen: boolean) => void; + collapsible?: boolean; + onToggle?: (isOpen: boolean) => void; } export default class Panel extends PureComponent { - onClickToggle = () => this.props.onToggle(!this.props.isOpen); + onClickToggle = () => { + const { onToggle, isOpen } = this.props; + if (onToggle) { + onToggle(!isOpen); + } + }; render() { - const { isOpen, loading } = this.props; + const { isOpen, loading, collapsible } = this.props; + const panelClass = collapsible + ? 'explore-panel explore-panel--collapsible panel-container' + : 'explore-panel panel-container'; const iconClass = isOpen ? 'fa fa-caret-up' : 'fa fa-caret-down'; const loaderClass = loading ? 'explore-panel__loader explore-panel__loader--active' : 'explore-panel__loader'; return ( -
+
diff --git a/public/app/features/explore/QueryEditor.tsx b/public/app/features/explore/QueryEditor.tsx index 54927b8cc91b..d29e8a0e8925 100644 --- a/public/app/features/explore/QueryEditor.tsx +++ b/public/app/features/explore/QueryEditor.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Services -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; // Types @@ -35,7 +35,7 @@ export default class QueryEditor extends PureComponent { const loader = getAngularLoader(); const template = ' '; - const target = { ...initialQuery }; + const target = { datasource: datasource.name, ...initialQuery }; const scopeProps = { ctrl: { datasource, diff --git a/public/app/features/explore/QueryRow.tsx b/public/app/features/explore/QueryRow.tsx index 2a0429dbd971..49880c11230a 100644 --- a/public/app/features/explore/QueryRow.tsx +++ b/public/app/features/explore/QueryRow.tsx @@ -20,7 +20,6 @@ import { QueryFixAction, DataSourceStatus, PanelData, - LoadingState, DataQueryError, } from '@grafana/ui'; import { HistoryItem, ExploreItemState, ExploreId } from 'app/types/explore'; @@ -180,9 +179,7 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps) range, datasourceError, graphResult, - graphIsLoading, - tableIsLoading, - logIsLoading, + loadingState, latency, queryErrors, } = item; @@ -190,15 +187,9 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps) const datasourceStatus = datasourceError ? DataSourceStatus.Disconnected : DataSourceStatus.Connected; const error = queryErrors.filter(queryError => queryError.refId === query.refId)[0]; const series = graphResult ? graphResult : []; // TODO: use SeriesData - const queryResponseState = - graphIsLoading || tableIsLoading || logIsLoading - ? LoadingState.Loading - : error - ? LoadingState.Error - : LoadingState.Done; const queryResponse: PanelData = { series, - state: queryResponseState, + state: loadingState, error, }; diff --git a/public/app/features/explore/TableContainer.tsx b/public/app/features/explore/TableContainer.tsx index 78f190a05cb8..ea227e78b976 100644 --- a/public/app/features/explore/TableContainer.tsx +++ b/public/app/features/explore/TableContainer.tsx @@ -9,6 +9,7 @@ import { toggleTable } from './state/actions'; import Table from './Table'; import Panel from './Panel'; import TableModel from 'app/core/table_model'; +import { LoadingState } from '@grafana/ui'; interface TableContainerProps { exploreId: ExploreId; @@ -27,13 +28,9 @@ export class TableContainer extends PureComponent { render() { const { loading, onClickCell, showingTable, tableResult } = this.props; - if (!tableResult) { - return null; - } - return ( - -
+ + {tableResult &&
} ); } @@ -42,8 +39,11 @@ export class TableContainer extends PureComponent { function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const item: ExploreItemState = explore[exploreId]; - const { tableIsLoading, showingTable, tableResult } = item; - const loading = tableIsLoading; + const { loadingState, showingTable, tableResult } = item; + const loading = + tableResult && tableResult.rows.length > 0 + ? false + : loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; return { loading, showingTable, tableResult }; } diff --git a/public/app/features/explore/state/actionTypes.ts b/public/app/features/explore/state/actionTypes.ts index 225a672ae2e5..68b9ac604eb1 100644 --- a/public/app/features/explore/state/actionTypes.ts +++ b/public/app/features/explore/state/actionTypes.ts @@ -9,18 +9,23 @@ import { LogLevel, TimeRange, DataQueryError, + SeriesData, + LogsModel, + TimeSeries, + DataQueryResponseData, + LoadingState, } from '@grafana/ui/src/types'; import { ExploreId, ExploreItemState, HistoryItem, RangeScanner, - ResultType, - QueryTransaction, ExploreUIState, ExploreMode, + QueryOptions, } from 'app/types/explore'; import { actionCreatorFactory, noPayloadActionCreatorFactory, ActionOf } from 'app/core/redux/actionCreatorFactory'; +import TableModel from 'app/core/table_model'; /** Higher order actions * @@ -142,21 +147,19 @@ export interface ModifyQueriesPayload { export interface QueryFailurePayload { exploreId: ExploreId; response: DataQueryError; - resultType: ResultType; } export interface QueryStartPayload { exploreId: ExploreId; - resultType: ResultType; - rowIndex: number; - transaction: QueryTransaction; } export interface QuerySuccessPayload { exploreId: ExploreId; - result: any; - resultType: ResultType; latency: number; + loadingState: LoadingState; + graphResult: TimeSeries[]; + tableResult: TableModel; + logsResult: LogsModel; } export interface HistoryUpdatedPayload { @@ -204,10 +207,6 @@ export interface ToggleGraphPayload { exploreId: ExploreId; } -export interface ToggleLogsPayload { - exploreId: ExploreId; -} - export interface UpdateUIStatePayload extends Partial { exploreId: ExploreId; } @@ -234,6 +233,7 @@ export interface LoadExploreDataSourcesPayload { export interface RunQueriesPayload { exploreId: ExploreId; + range: TimeRange; } export interface ResetQueryErrorPayload { @@ -241,6 +241,41 @@ export interface ResetQueryErrorPayload { refIds: string[]; } +export interface SetUrlReplacedPayload { + exploreId: ExploreId; +} + +export interface ProcessQueryErrorsPayload { + exploreId: ExploreId; + response: any; + datasourceId: string; +} + +export interface ProcessQueryResultsPayload { + exploreId: ExploreId; + latency: number; + datasourceId: string; + loadingState: LoadingState; + series?: DataQueryResponseData[]; + delta?: SeriesData[]; +} + +export interface RunQueriesBatchPayload { + exploreId: ExploreId; + queryOptions: QueryOptions; +} + +export interface LimitMessageRatePayload { + series: SeriesData[]; + exploreId: ExploreId; + datasourceId: string; +} + +export interface ChangeRangePayload { + exploreId: ExploreId; + range: TimeRange; +} + /** * Adds a query row after the row with the given index. */ @@ -336,13 +371,6 @@ export const modifyQueriesAction = actionCreatorFactory('e */ export const queryFailureAction = actionCreatorFactory('explore/QUERY_FAILURE').create(); -/** - * Start a query transaction for the given result type. - * @param exploreId Explore area - * @param transaction Query options and `done` status. - * @param resultType Associate the transaction with a result viewer, e.g., Graph - * @param rowIndex Index is used to associate latency for this transaction with a query row - */ export const queryStartAction = actionCreatorFactory('explore/QUERY_START').create(); /** @@ -395,6 +423,7 @@ export const splitCloseAction = actionCreatorFactory('e * The copy keeps all query modifications but wipes the query results. */ export const splitOpenAction = actionCreatorFactory('explore/SPLIT_OPEN').create(); + export const stateSaveAction = noPayloadActionCreatorFactory('explore/STATE_SAVE').create(); /** @@ -412,11 +441,6 @@ export const toggleTableAction = actionCreatorFactory('explo */ export const toggleGraphAction = actionCreatorFactory('explore/TOGGLE_GRAPH').create(); -/** - * Expand/collapse the logs result viewer. When collapsed, log queries won't be run. - */ -export const toggleLogsAction = actionCreatorFactory('explore/TOGGLE_LOGS').create(); - /** * Updates datasource instance before datasouce loading has started */ @@ -448,6 +472,24 @@ export const historyUpdatedAction = actionCreatorFactory( export const resetQueryErrorAction = actionCreatorFactory('explore/RESET_QUERY_ERROR').create(); +export const setUrlReplacedAction = actionCreatorFactory('explore/SET_URL_REPLACED').create(); + +export const processQueryErrorsAction = actionCreatorFactory( + 'explore/PROCESS_QUERY_ERRORS' +).create(); + +export const processQueryResultsAction = actionCreatorFactory( + 'explore/PROCESS_QUERY_RESULTS' +).create(); + +export const runQueriesBatchAction = actionCreatorFactory('explore/RUN_QUERIES_BATCH').create(); + +export const limitMessageRatePayloadAction = actionCreatorFactory( + 'explore/LIMIT_MESSAGE_RATE_PAYLOAD' +).create(); + +export const changeRangeAction = actionCreatorFactory('explore/CHANGE_RANGE').create(); + export type HigherOrderAction = | ActionOf | SplitOpenAction diff --git a/public/app/features/explore/state/actions.test.ts b/public/app/features/explore/state/actions.test.ts index 0bae4dc1d958..ba096602c050 100644 --- a/public/app/features/explore/state/actions.test.ts +++ b/public/app/features/explore/state/actions.test.ts @@ -1,7 +1,6 @@ import { refreshExplore, testDatasource, loadDatasource } from './actions'; import { ExploreId, ExploreUrlState, ExploreUpdateState } from 'app/types'; import { thunkTester } from 'test/core/thunk/thunkTester'; -import { LogsDedupStrategy } from 'app/core/logs_model'; import { initializeExploreAction, InitializeExplorePayload, @@ -18,7 +17,7 @@ import { Emitter } from 'app/core/core'; import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { makeInitialUpdateState } from './reducers'; import { DataQuery } from '@grafana/ui/src/types/datasource'; -import { DefaultTimeZone, RawTimeRange } from '@grafana/ui'; +import { DefaultTimeZone, RawTimeRange, LogsDedupStrategy } from '@grafana/ui'; import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; jest.mock('app/features/plugins/datasource_srv', () => ({ diff --git a/public/app/features/explore/state/actions.ts b/public/app/features/explore/state/actions.ts index 310f310e6710..4f95744eb479 100644 --- a/public/app/features/explore/state/actions.ts +++ b/public/app/features/explore/state/actions.ts @@ -7,25 +7,14 @@ import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { Emitter } from 'app/core/core'; import { LAST_USED_DATASOURCE_KEY, - clearQueryKeys, ensureQueries, generateEmptyQuery, - hasNonEmptyQuery, - makeTimeSeriesList, - updateHistory, - buildQueryTransaction, - serializeStateToUrlParam, parseUrlState, getTimeRange, getTimeRangeFromUrl, generateNewKeyAndAddRefIdIfMissing, - instanceOfDataQueryError, - getRefIds, } from 'app/core/utils/explore'; -// Actions -import { updateLocation } from 'app/core/actions'; - // Types import { ThunkResult } from 'app/types'; import { @@ -34,18 +23,9 @@ import { DataQuery, DataSourceSelectItem, QueryFixAction, - TimeRange, -} from '@grafana/ui/src/types'; -import { - ExploreId, - ExploreUrlState, - RangeScanner, - ResultType, - QueryOptions, - ExploreUIState, - QueryTransaction, - ExploreMode, -} from 'app/types/explore'; + LogsDedupStrategy, +} from '@grafana/ui'; +import { ExploreId, RangeScanner, ExploreUIState, QueryTransaction, ExploreMode } from 'app/types/explore'; import { updateDatasourceInstanceAction, changeQueryAction, @@ -54,7 +34,6 @@ import { changeSizeAction, ChangeSizePayload, changeTimeAction, - scanStopAction, clearQueriesAction, initializeExploreAction, loadDatasourceMissingAction, @@ -63,36 +42,29 @@ import { LoadDatasourceReadyPayload, loadDatasourceReadyAction, modifyQueriesAction, - queryFailureAction, - querySuccessAction, - scanRangeAction, scanStartAction, setQueriesAction, splitCloseAction, splitOpenAction, addQueryRowAction, toggleGraphAction, - toggleLogsAction, toggleTableAction, ToggleGraphPayload, - ToggleLogsPayload, ToggleTablePayload, updateUIStateAction, - runQueriesAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, loadExploreDatasources, - queryStartAction, - historyUpdatedAction, - resetQueryErrorAction, changeModeAction, + scanStopAction, + scanRangeAction, + runQueriesAction, + stateSaveAction, } from './actionTypes'; import { ActionOf, ActionCreator } from 'app/core/redux/actionCreatorFactory'; -import { LogsDedupStrategy } from 'app/core/logs_model'; import { getTimeZone } from 'app/features/profile/state/selectors'; -import { isDateTime } from '@grafana/ui/src/utils/moment_wrapper'; -import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; +import { offOption } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; /** * Updates UI state and save it to the URL @@ -100,7 +72,7 @@ import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; const updateExploreUIState = (exploreId: ExploreId, uiStateFragment: Partial): ThunkResult => { return dispatch => { dispatch(updateUIStateAction({ exploreId, ...uiStateFragment })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; }; @@ -119,7 +91,7 @@ export function addQueryRow(exploreId: ExploreId, index: number): ThunkResult { +export function changeDatasource(exploreId: ExploreId, datasource: string): ThunkResult { return async (dispatch, getState) => { let newDataSourceInstance: DataSourceApi = null; @@ -136,8 +108,12 @@ export function changeDatasource(exploreId: ExploreId, datasource: string, repla dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance })); + if (getState().explore[exploreId].isLive) { + dispatch(changeRefreshInterval(exploreId, offOption.value)); + } + await dispatch(loadDatasource(exploreId, newDataSourceInstance)); - dispatch(runQueries(exploreId, false, replaceUrl)); + dispatch(runQueries(exploreId)); }; } @@ -216,7 +192,7 @@ export function clearQueries(exploreId: ExploreId): ThunkResult { return dispatch => { dispatch(scanStopAction({ exploreId })); dispatch(clearQueriesAction({ exploreId })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; } @@ -243,7 +219,7 @@ export function loadExploreDatasourcesAndSetDatasource( dispatch(loadExploreDatasources({ exploreId, exploreDatasources })); if (exploreDatasources.length >= 1) { - dispatch(changeDatasource(exploreId, datasourceName, true)); + dispatch(changeDatasource(exploreId, datasourceName)); } else { dispatch(loadDatasourceMissingAction({ exploreId })); } @@ -420,188 +396,17 @@ export function modifyQueries( }; } -export function processQueryErrors( - exploreId: ExploreId, - response: any, - resultType: ResultType, - datasourceId: string -): ThunkResult { - return (dispatch, getState) => { - const { datasourceInstance } = getState().explore[exploreId]; - - if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { - // Navigated away, queries did not matter - return; - } - - console.error(response); // To help finding problems with query syntax - - if (!instanceOfDataQueryError(response)) { - response = toDataQueryError(response); - } - - dispatch( - queryFailureAction({ - exploreId, - response, - resultType, - }) - ); - }; -} - -/** - * @param exploreId Explore area - * @param response Response from `datasourceInstance.query()` - * @param latency Duration between request and response - * @param resultType The type of result - * @param datasourceId Origin datasource instance, used to discard results if current datasource is different - */ -export function processQueryResults( - exploreId: ExploreId, - response: any, - latency: number, - resultType: ResultType, - datasourceId: string -): ThunkResult { - return (dispatch, getState) => { - const { datasourceInstance, scanning, scanner } = getState().explore[exploreId]; - - // If datasource already changed, results do not matter - if (datasourceInstance.meta.id !== datasourceId) { - return; - } - - const series: any[] = response.data; - const refIds = getRefIds(series); - - // Clears any previous errors that now have a successful query, important so Angular editors are updated correctly - dispatch( - resetQueryErrorAction({ - exploreId, - refIds, - }) - ); - - const resultGetter = - resultType === 'Graph' ? makeTimeSeriesList : resultType === 'Table' ? (data: any[]) => data : null; - const result = resultGetter ? resultGetter(series, null, []) : series; - - dispatch( - querySuccessAction({ - exploreId, - result, - resultType, - latency, - }) - ); - - // Keep scanning for results if this was the last scanning transaction - if (scanning) { - if (_.size(result) === 0) { - const range = scanner(); - dispatch(scanRangeAction({ exploreId, range })); - } else { - // We can stop scanning if we have a result - dispatch(scanStopAction({ exploreId })); - } - } - }; -} - /** * Main action to run queries and dispatches sub-actions based on which result viewers are active */ -export function runQueries(exploreId: ExploreId, ignoreUIState = false, replaceUrl = false): ThunkResult { +export function runQueries(exploreId: ExploreId): ThunkResult { return (dispatch, getState) => { - const { - datasourceInstance, - queries, - showingLogs, - showingGraph, - showingTable, - datasourceError, - containerWidth, - mode, - } = getState().explore[exploreId]; - - if (datasourceError) { - // let's not run any queries if data source is in a faulty state - return; - } - - if (!hasNonEmptyQuery(queries)) { - dispatch(clearQueriesAction({ exploreId })); - dispatch(stateSave(replaceUrl)); // Remember to save to state and update location - return; - } - - // Some datasource's query builders allow per-query interval limits, - // but we're using the datasource interval limit for now - const interval = datasourceInstance.interval; - - dispatch(runQueriesAction({ exploreId })); - // Keep table queries first since they need to return quickly - if ((ignoreUIState || showingTable) && mode === ExploreMode.Metrics) { - dispatch( - runQueriesForType(exploreId, 'Table', { - interval, - format: 'table', - instant: true, - valueWithRefId: true, - }) - ); - } - if ((ignoreUIState || showingGraph) && mode === ExploreMode.Metrics) { - dispatch( - runQueriesForType(exploreId, 'Graph', { - interval, - format: 'time_series', - instant: false, - maxDataPoints: containerWidth, - }) - ); - } - if ((ignoreUIState || showingLogs) && mode === ExploreMode.Logs) { - dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' })); - } + const { range } = getState().explore[exploreId]; - dispatch(stateSave(replaceUrl)); - }; -} + const timeZone = getTimeZone(getState().user); + const updatedRange = getTimeRange(timeZone, range.raw); -/** - * Helper action to build a query transaction object and handing the query to the datasource. - * @param exploreId Explore area - * @param resultType Result viewer that will be associated with this query result - * @param queryOptions Query options as required by the datasource's `query()` function. - * @param resultGetter Optional result extractor, e.g., if the result is a list and you only need the first element. - */ -function runQueriesForType( - exploreId: ExploreId, - resultType: ResultType, - queryOptions: QueryOptions -): ThunkResult { - return async (dispatch, getState) => { - const { datasourceInstance, eventBridge, queries, queryIntervals, range, scanning, history } = getState().explore[ - exploreId - ]; - const datasourceId = datasourceInstance.meta.id; - const transaction = buildQueryTransaction(queries, resultType, queryOptions, range, queryIntervals, scanning); - dispatch(queryStartAction({ exploreId, resultType, rowIndex: 0, transaction })); - try { - const now = Date.now(); - const response = await datasourceInstance.query(transaction.options); - eventBridge.emit('data-received', response.data || []); - const latency = Date.now() - now; - // Side-effect: Saving history in localstorage - const nextHistory = updateHistory(history, datasourceId, queries); - dispatch(historyUpdatedAction({ exploreId, history: nextHistory })); - dispatch(processQueryResults(exploreId, response, latency, resultType, datasourceId)); - } catch (err) { - eventBridge.emit('data-error', err); - dispatch(processQueryErrors(exploreId, err, resultType, datasourceId)); - } + dispatch(runQueriesAction({ exploreId, range: updatedRange })); }; } @@ -641,7 +446,7 @@ export function setQueries(exploreId: ExploreId, rawQueries: DataQuery[]): Thunk export function splitClose(itemId: ExploreId): ThunkResult { return dispatch => { dispatch(splitCloseAction({ itemId })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; } @@ -665,64 +470,7 @@ export function splitOpen(): ThunkResult { urlState, }; dispatch(splitOpenAction({ itemState })); - dispatch(stateSave()); - }; -} - -const toRawTimeRange = (range: TimeRange): RawTimeRange => { - let from = range.raw.from; - if (isDateTime(from)) { - from = from.valueOf().toString(10); - } - - let to = range.raw.to; - if (isDateTime(to)) { - to = to.valueOf().toString(10); - } - - return { - from, - to, - }; -}; - -/** - * Saves Explore state to URL using the `left` and `right` parameters. - * If split view is not active, `right` will not be set. - */ -export function stateSave(replaceUrl = false): ThunkResult { - return (dispatch, getState) => { - const { left, right, split } = getState().explore; - const urlStates: { [index: string]: string } = {}; - const leftUrlState: ExploreUrlState = { - datasource: left.datasourceInstance.name, - queries: left.queries.map(clearQueryKeys), - range: toRawTimeRange(left.range), - ui: { - showingGraph: left.showingGraph, - showingLogs: left.showingLogs, - showingTable: left.showingTable, - dedupStrategy: left.dedupStrategy, - }, - }; - urlStates.left = serializeStateToUrlParam(leftUrlState, true); - if (split) { - const rightUrlState: ExploreUrlState = { - datasource: right.datasourceInstance.name, - queries: right.queries.map(clearQueryKeys), - range: toRawTimeRange(right.range), - ui: { - showingGraph: right.showingGraph, - showingLogs: right.showingLogs, - showingTable: right.showingTable, - dedupStrategy: right.dedupStrategy, - }, - }; - - urlStates.right = serializeStateToUrlParam(rightUrlState, true); - } - - dispatch(updateLocation({ query: urlStates, replace: replaceUrl })); + dispatch(stateSaveAction()); }; } @@ -731,10 +479,7 @@ export function stateSave(replaceUrl = false): ThunkResult { * queries won't be run */ const togglePanelActionCreator = ( - actionCreator: - | ActionCreator - | ActionCreator - | ActionCreator + actionCreator: ActionCreator | ActionCreator ) => (exploreId: ExploreId, isPanelVisible: boolean): ThunkResult => { return dispatch => { let uiFragmentStateUpdate: Partial; @@ -744,9 +489,6 @@ const togglePanelActionCreator = ( case toggleGraphAction.type: uiFragmentStateUpdate = { showingGraph: !isPanelVisible }; break; - case toggleLogsAction.type: - uiFragmentStateUpdate = { showingLogs: !isPanelVisible }; - break; case toggleTableAction.type: uiFragmentStateUpdate = { showingTable: !isPanelVisible }; break; @@ -766,11 +508,6 @@ const togglePanelActionCreator = ( */ export const toggleGraph = togglePanelActionCreator(toggleGraphAction); -/** - * Expand/collapse the logs result viewer. When collapsed, log queries won't be run. - */ -export const toggleLogs = togglePanelActionCreator(toggleLogsAction); - /** * Expand/collapse the table result viewer. When collapsed, table queries won't be run. */ diff --git a/public/app/features/explore/state/epics/limitMessageRateEpic.ts b/public/app/features/explore/state/epics/limitMessageRateEpic.ts new file mode 100644 index 000000000000..620137069687 --- /dev/null +++ b/public/app/features/explore/state/epics/limitMessageRateEpic.ts @@ -0,0 +1,25 @@ +import { Epic } from 'redux-observable'; +import { map, throttleTime } from 'rxjs/operators'; +import { LoadingState } from '@grafana/ui'; + +import { StoreState } from 'app/types'; +import { ActionOf } from '../../../../core/redux/actionCreatorFactory'; +import { limitMessageRatePayloadAction, LimitMessageRatePayload, processQueryResultsAction } from '../actionTypes'; +import { EpicDependencies } from 'app/store/configureStore'; + +export const limitMessageRateEpic: Epic, ActionOf, StoreState, EpicDependencies> = action$ => { + return action$.ofType(limitMessageRatePayloadAction.type).pipe( + throttleTime(1), + map((action: ActionOf) => { + const { exploreId, series, datasourceId } = action.payload; + return processQueryResultsAction({ + exploreId, + latency: 0, + datasourceId, + loadingState: LoadingState.Streaming, + series: null, + delta: series, + }); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts b/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts new file mode 100644 index 000000000000..7cdaca78f7d0 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts @@ -0,0 +1,67 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { processQueryErrorsAction, queryFailureAction } from '../actionTypes'; +import { processQueryErrorsEpic } from './processQueryErrorsEpic'; + +describe('processQueryErrorsEpic', () => { + let originalConsoleError = console.error; + + beforeEach(() => { + originalConsoleError = console.error; + console.error = jest.fn(); + }); + + afterEach(() => { + console.error = originalConsoleError; + }); + + describe('when processQueryErrorsAction is dispatched', () => { + describe('and datasourceInstance is the same', () => { + describe('and the response is not cancelled', () => { + it('then queryFailureAction is dispatched', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const response = { message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) + .thenResultingActionsEqual(queryFailureAction({ exploreId, response })); + + expect(console.error).toBeCalledTimes(1); + expect(console.error).toBeCalledWith(response); + expect(eventBridge.emit).toBeCalledTimes(1); + expect(eventBridge.emit).toBeCalledWith('data-error', response); + }); + }); + + describe('and the response is cancelled', () => { + it('then no actions are dispatched', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const response = { cancelled: true, message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) + .thenNoActionsWhereDispatched(); + + expect(console.error).not.toBeCalled(); + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); + + describe('and datasourceInstance is not the same', () => { + describe('and the response is not cancelled', () => { + it('then no actions are dispatched', () => { + const { exploreId, state, eventBridge } = mockExploreState(); + const response = { message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId: 'other id', response })) + .thenNoActionsWhereDispatched(); + + expect(console.error).not.toBeCalled(); + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/processQueryErrorsEpic.ts b/public/app/features/explore/state/epics/processQueryErrorsEpic.ts new file mode 100644 index 000000000000..ea029186dc89 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryErrorsEpic.ts @@ -0,0 +1,40 @@ +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { NEVER, of } from 'rxjs'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { instanceOfDataQueryError } from 'app/core/utils/explore'; +import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; +import { processQueryErrorsAction, ProcessQueryErrorsPayload, queryFailureAction } from '../actionTypes'; + +export const processQueryErrorsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(processQueryErrorsAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, datasourceId } = action.payload; + let { response } = action.payload; + const { datasourceInstance, eventBridge } = state$.value.explore[exploreId]; + + if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { + // Navigated away, queries did not matter + return NEVER; + } + + // For Angular editors + eventBridge.emit('data-error', response); + + console.error(response); // To help finding problems with query syntax + + if (!instanceOfDataQueryError(response)) { + response = toDataQueryError(response); + } + + return of( + queryFailureAction({ + exploreId, + response, + }) + ); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts b/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts new file mode 100644 index 000000000000..c5da93081aa5 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts @@ -0,0 +1,119 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { + processQueryResultsAction, + resetQueryErrorAction, + querySuccessAction, + scanStopAction, + scanRangeAction, +} from '../actionTypes'; +import { SeriesData, LoadingState } from '@grafana/ui'; +import { processQueryResultsEpic } from './processQueryResultsEpic'; +import TableModel from 'app/core/table_model'; + +const testContext = () => { + const serieA: SeriesData = { + fields: [], + refId: 'A', + rows: [], + }; + const serieB: SeriesData = { + fields: [], + refId: 'B', + rows: [], + }; + const series = [serieA, serieB]; + const latency = 0; + const loadingState = LoadingState.Done; + + return { + latency, + series, + loadingState, + }; +}; + +describe('processQueryResultsEpic', () => { + describe('when processQueryResultsAction is dispatched', () => { + describe('and datasourceInstance is the same', () => { + describe('and explore is not scanning', () => { + it('then resetQueryErrorAction and querySuccessAction are dispatched and eventBridge emits correct message', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const { latency, series, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }) + ); + + expect(eventBridge.emit).toBeCalledTimes(1); + expect(eventBridge.emit).toBeCalledWith('data-received', series); + }); + }); + + describe('and explore is scanning', () => { + describe('and we have a result', () => { + it('then correct actions are dispatched', () => { + const { datasourceId, exploreId, state } = mockExploreState({ scanning: true }); + const { latency, series, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), + scanStopAction({ exploreId }) + ); + }); + }); + + describe('and we do not have a result', () => { + it('then correct actions are dispatched', () => { + const { datasourceId, exploreId, state, scanner } = mockExploreState({ scanning: true }); + const { latency, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series: [], latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: [] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), + scanRangeAction({ exploreId, range: scanner() }) + ); + }); + }); + }); + }); + + describe('and datasourceInstance is not the same', () => { + it('then no actions are dispatched and eventBridge does not emit message', () => { + const { exploreId, state, eventBridge } = mockExploreState(); + const { series, loadingState } = testContext(); + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId: 'other id', loadingState, series, latency: 0 }) + ) + .thenNoActionsWhereDispatched(); + + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/processQueryResultsEpic.ts b/public/app/features/explore/state/epics/processQueryResultsEpic.ts new file mode 100644 index 000000000000..76e767c36a09 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryResultsEpic.ts @@ -0,0 +1,76 @@ +import _ from 'lodash'; +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { NEVER } from 'rxjs'; +import { LoadingState } from '@grafana/ui'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { getRefIds } from 'app/core/utils/explore'; +import { + processQueryResultsAction, + ProcessQueryResultsPayload, + querySuccessAction, + scanRangeAction, + resetQueryErrorAction, + scanStopAction, +} from '../actionTypes'; +import { ResultProcessor } from '../../utils/ResultProcessor'; + +export const processQueryResultsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(processQueryResultsAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, datasourceId, latency, loadingState, series, delta } = action.payload; + const { datasourceInstance, scanning, scanner, eventBridge } = state$.value.explore[exploreId]; + + // If datasource already changed, results do not matter + if (datasourceInstance.meta.id !== datasourceId) { + return NEVER; + } + + const result = series || delta || []; + const replacePreviousResults = loadingState === LoadingState.Done && series && !delta ? true : false; + const resultProcessor = new ResultProcessor(state$.value.explore[exploreId], replacePreviousResults, result); + const graphResult = resultProcessor.getGraphResult(); + const tableResult = resultProcessor.getTableResult(); + const logsResult = resultProcessor.getLogsResult(); + const refIds = getRefIds(result); + const actions: Array> = []; + + // For Angular editors + eventBridge.emit('data-received', resultProcessor.getRawData()); + + // Clears any previous errors that now have a successful query, important so Angular editors are updated correctly + actions.push( + resetQueryErrorAction({ + exploreId, + refIds, + }) + ); + + actions.push( + querySuccessAction({ + exploreId, + latency, + loadingState, + graphResult, + tableResult, + logsResult, + }) + ); + + // Keep scanning for results if this was the last scanning transaction + if (scanning) { + if (_.size(result) === 0) { + const range = scanner(); + actions.push(scanRangeAction({ exploreId, range })); + } else { + // We can stop scanning if we have a result + actions.push(scanStopAction({ exploreId })); + } + } + + return actions; + }) + ); +}; diff --git a/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts b/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts new file mode 100644 index 000000000000..6ddada2bc32a --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts @@ -0,0 +1,421 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { runQueriesBatchEpic } from './runQueriesBatchEpic'; +import { + runQueriesBatchAction, + queryStartAction, + historyUpdatedAction, + processQueryResultsAction, + processQueryErrorsAction, + limitMessageRatePayloadAction, + resetExploreAction, + updateDatasourceInstanceAction, + changeRefreshIntervalAction, + clearQueriesAction, + stateSaveAction, +} from '../actionTypes'; +import { LoadingState, DataQueryRequest, SeriesData, FieldType } from '@grafana/ui'; + +const testContext = () => { + const series: SeriesData[] = [ + { + fields: [ + { + name: 'Value', + }, + { + name: 'Time', + type: FieldType.time, + unit: 'dateTimeAsIso', + }, + ], + rows: [], + refId: 'A', + }, + ]; + const response = { data: series }; + + return { + response, + series, + }; +}; + +describe('runQueriesBatchEpic', () => { + let originalDateNow = Date.now; + beforeEach(() => { + originalDateNow = Date.now; + Date.now = () => 1337; + }); + + afterEach(() => { + Date.now = originalDateNow; + }); + + describe('when runQueriesBatchAction is dispatched', () => { + describe('and query targets are not live', () => { + describe('and query is successful', () => { + it('then correct actions are dispatched', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + }); + }); + + describe('and query is not successful', () => { + it('then correct actions are dispatched', () => { + const error = { + message: 'Error parsing line x', + }; + const { exploreId, state, datasourceId } = mockExploreState(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryThrowsError(error) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + processQueryErrorsAction({ exploreId, response: error, datasourceId }) + ); + }); + }); + }); + + describe('and query targets are live', () => { + describe('and state equals Streaming', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + const serieA = { + fields: [], + rows: [], + refId: 'A', + }; + const serieB = { + fields: [], + rows: [], + refId: 'B', + }; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Streaming, + delta: [serieA], + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Streaming, + delta: [serieB], + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + limitMessageRatePayloadAction({ exploreId, series: [serieA], datasourceId }), + limitMessageRatePayloadAction({ exploreId, series: [serieB], datasourceId }) + ); + }); + }); + + describe('and state equals Error', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + const error = { message: 'Something went really wrong!' }; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Error, + error, + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + processQueryErrorsAction({ exploreId, response: error, datasourceId }) + ); + }); + }); + + describe('and state equals Done', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId, history } = mockExploreState(); + const unsubscribe = jest.fn(); + const serieA = { + fields: [], + rows: [], + refId: 'A', + }; + const serieB = { + fields: [], + rows: [], + refId: 'B', + }; + const delta = [serieA, serieB]; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Done, + series: null, + delta, + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta, + series: null, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + }); + }); + }); + + describe('and another runQueriesBatchAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) // first observable + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched( + // second observable and unsubscribes the first observable + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 800 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), // output from first observable + historyUpdatedAction({ exploreId, history }), // output from first observable + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction(), + // output from first observable + queryStartAction({ exploreId }), // output from second observable + historyUpdatedAction({ exploreId, history }), // output from second observable + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + // output from second observable + ); + + expect(unsubscribe).toBeCalledTimes(1); // first unsubscribe should be called but not second as that isn't unsubscribed + }); + }); + + describe('and resetExploreAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(resetExploreAction()) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and updateDatasourceInstanceAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId, datasourceInstance } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(updateDatasourceInstanceAction({ exploreId, datasourceInstance })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and changeRefreshIntervalAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: '' })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and clearQueriesAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(clearQueriesAction({ exploreId })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/runQueriesBatchEpic.ts b/public/app/features/explore/state/epics/runQueriesBatchEpic.ts new file mode 100644 index 000000000000..8e2642f193f8 --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesBatchEpic.ts @@ -0,0 +1,220 @@ +import { Epic } from 'redux-observable'; +import { Observable, Subject } from 'rxjs'; +import { mergeMap, catchError, takeUntil, filter } from 'rxjs/operators'; +import _, { isString } from 'lodash'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; +import { DataStreamState, LoadingState, DataQueryResponse, SeriesData, DataQueryResponseData } from '@grafana/ui'; +import * as dateMath from '@grafana/ui/src/utils/datemath'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { buildQueryTransaction, updateHistory } from 'app/core/utils/explore'; +import { + clearQueriesAction, + historyUpdatedAction, + resetExploreAction, + updateDatasourceInstanceAction, + changeRefreshIntervalAction, + processQueryErrorsAction, + processQueryResultsAction, + runQueriesBatchAction, + RunQueriesBatchPayload, + queryStartAction, + limitMessageRatePayloadAction, + stateSaveAction, + changeRangeAction, +} from '../actionTypes'; +import { ExploreId, ExploreItemState } from 'app/types'; + +const publishActions = (outerObservable: Subject, actions: Array>) => { + for (const action of actions) { + outerObservable.next(action); + } +}; + +interface ProcessResponseConfig { + exploreId: ExploreId; + exploreItemState: ExploreItemState; + datasourceId: string; + now: number; + loadingState: LoadingState; + series?: DataQueryResponseData[]; + delta?: SeriesData[]; +} + +const processResponse = (config: ProcessResponseConfig) => { + const { exploreId, exploreItemState, datasourceId, now, loadingState, series, delta } = config; + const { queries, history } = exploreItemState; + const latency = Date.now() - now; + + // Side-effect: Saving history in localstorage + const nextHistory = updateHistory(history, datasourceId, queries); + return [ + historyUpdatedAction({ exploreId, history: nextHistory }), + processQueryResultsAction({ exploreId, latency, datasourceId, loadingState, series, delta }), + stateSaveAction(), + ]; +}; + +interface ProcessErrorConfig { + exploreId: ExploreId; + datasourceId: string; + error: any; +} + +const processError = (config: ProcessErrorConfig) => { + const { exploreId, datasourceId, error } = config; + + return [processQueryErrorsAction({ exploreId, response: error, datasourceId })]; +}; + +export const runQueriesBatchEpic: Epic, ActionOf, StoreState> = ( + action$, + state$, + { getQueryResponse } +) => { + return action$.ofType(runQueriesBatchAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, queryOptions } = action.payload; + const exploreItemState = state$.value.explore[exploreId]; + const { datasourceInstance, queries, queryIntervals, range, scanning } = exploreItemState; + + // Create an observable per run queries action + // Within the observable create two subscriptions + // First subscription: 'querySubscription' subscribes to the call to query method on datasourceinstance + // Second subscription: 'streamSubscription' subscribes to events from the query methods observer callback + const observable: Observable> = Observable.create((outerObservable: Subject) => { + const datasourceId = datasourceInstance.meta.id; + const transaction = buildQueryTransaction(queries, queryOptions, range, queryIntervals, scanning); + outerObservable.next(queryStartAction({ exploreId })); + + const now = Date.now(); + let datasourceUnsubscribe: Function = null; + const streamHandler = new Subject(); + const observer = (event: DataStreamState) => { + datasourceUnsubscribe = event.unsubscribe; + if (!streamHandler.closed) { + // their might be a race condition when unsubscribing + streamHandler.next(event); + } + }; + + // observer subscription, handles datasourceInstance.query observer events and pushes that forward + const streamSubscription = streamHandler.subscribe({ + next: event => { + const { state, error, series, delta } = event; + if (!series && !delta && !error) { + return; + } + + if (state === LoadingState.Error) { + const actions = processError({ exploreId, datasourceId, error }); + publishActions(outerObservable, actions); + } + + if (state === LoadingState.Streaming) { + if (event.request && event.request.range) { + let newRange = event.request.range; + if (isString(newRange.raw.from)) { + newRange = { + from: dateMath.parse(newRange.raw.from, false), + to: dateMath.parse(newRange.raw.to, true), + raw: newRange.raw, + }; + } + outerObservable.next(changeRangeAction({ exploreId, range: newRange })); + } + outerObservable.next( + limitMessageRatePayloadAction({ + exploreId, + series: delta, + datasourceId, + }) + ); + } + + if (state === LoadingState.Done || state === LoadingState.Loading) { + const actions = processResponse({ + exploreId, + exploreItemState, + datasourceId, + now, + loadingState: state, + series: null, + delta, + }); + publishActions(outerObservable, actions); + } + }, + }); + + // query subscription, handles datasourceInstance.query response and pushes that forward + const querySubscription = getQueryResponse(datasourceInstance, transaction.options, observer) + .pipe( + mergeMap((response: DataQueryResponse) => { + return processResponse({ + exploreId, + exploreItemState, + datasourceId, + now, + loadingState: LoadingState.Done, + series: response && response.data ? response.data : [], + delta: null, + }); + }), + catchError(error => { + return processError({ exploreId, datasourceId, error }); + }) + ) + .subscribe({ next: (action: ActionOf) => outerObservable.next(action) }); + + // this unsubscribe method will be called when any of the takeUntil actions below happen + const unsubscribe = () => { + if (datasourceUnsubscribe) { + datasourceUnsubscribe(); + } + querySubscription.unsubscribe(); + streamSubscription.unsubscribe(); + streamHandler.unsubscribe(); + outerObservable.unsubscribe(); + }; + + return unsubscribe; + }); + + return observable.pipe( + takeUntil( + action$ + .ofType( + runQueriesBatchAction.type, + resetExploreAction.type, + updateDatasourceInstanceAction.type, + changeRefreshIntervalAction.type, + clearQueriesAction.type + ) + .pipe( + filter(action => { + if (action.type === resetExploreAction.type) { + return true; // stops all subscriptions if user navigates away + } + + if (action.type === updateDatasourceInstanceAction.type && action.payload.exploreId === exploreId) { + return true; // stops subscriptions if user changes data source + } + + if (action.type === changeRefreshIntervalAction.type && action.payload.exploreId === exploreId) { + return !isLive(action.payload.refreshInterval); // stops subscriptions if user changes refresh interval away from 'Live' + } + + if (action.type === clearQueriesAction.type && action.payload.exploreId === exploreId) { + return true; // stops subscriptions if user clears all queries + } + + return action.payload.exploreId === exploreId; + }) + ) + ) + ); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/runQueriesEpic.test.ts b/public/app/features/explore/state/epics/runQueriesEpic.test.ts new file mode 100644 index 000000000000..87b1f86513f1 --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesEpic.test.ts @@ -0,0 +1,71 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { runQueriesAction, stateSaveAction, runQueriesBatchAction, clearQueriesAction } from '../actionTypes'; +import { runQueriesEpic } from './runQueriesEpic'; + +describe('runQueriesEpic', () => { + describe('when runQueriesAction is dispatched', () => { + describe('and there is no datasourceError', () => { + describe('and we have non empty queries', () => { + describe('and explore is not live', () => { + it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { + const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; + const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ queries }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual( + runQueriesBatchAction({ + exploreId, + queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: false }, + }) + ); + }); + }); + + describe('and explore is live', () => { + it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { + const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; + const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ + queries, + isLive: true, + streaming: true, + }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual( + runQueriesBatchAction({ + exploreId, + queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: true }, + }) + ); + }); + }); + }); + + describe('and we have no queries', () => { + it('then clearQueriesAction and stateSaveAction are dispatched', () => { + const queries = []; + const { exploreId, state } = mockExploreState({ queries }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual(clearQueriesAction({ exploreId }), stateSaveAction()); + }); + }); + }); + + describe('and there is a datasourceError', () => { + it('then no actions are dispatched', () => { + const { exploreId, state } = mockExploreState({ + datasourceError: { message: 'Some error' }, + }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenNoActionsWhereDispatched(); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/runQueriesEpic.ts b/public/app/features/explore/state/epics/runQueriesEpic.ts new file mode 100644 index 000000000000..2102c11b103c --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesEpic.ts @@ -0,0 +1,39 @@ +import { Epic } from 'redux-observable'; +import { NEVER } from 'rxjs'; +import { mergeMap } from 'rxjs/operators'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { hasNonEmptyQuery } from 'app/core/utils/explore'; +import { + clearQueriesAction, + runQueriesAction, + RunQueriesPayload, + runQueriesBatchAction, + stateSaveAction, +} from '../actionTypes'; + +export const runQueriesEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(runQueriesAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId } = action.payload; + const { datasourceInstance, queries, datasourceError, containerWidth, isLive } = state$.value.explore[exploreId]; + + if (datasourceError) { + // let's not run any queries if data source is in a faulty state + return NEVER; + } + + if (!hasNonEmptyQuery(queries)) { + return [clearQueriesAction({ exploreId }), stateSaveAction()]; // Remember to save to state and update location + } + + // Some datasource's query builders allow per-query interval limits, + // but we're using the datasource interval limit for now + const interval = datasourceInstance.interval; + const live = isLive; + + return [runQueriesBatchAction({ exploreId, queryOptions: { interval, maxDataPoints: containerWidth, live } })]; + }) + ); +}; diff --git a/public/app/features/explore/state/epics/stateSaveEpic.test.ts b/public/app/features/explore/state/epics/stateSaveEpic.test.ts new file mode 100644 index 000000000000..bee12ad92a9e --- /dev/null +++ b/public/app/features/explore/state/epics/stateSaveEpic.test.ts @@ -0,0 +1,61 @@ +import { epicTester } from 'test/core/redux/epicTester'; +import { stateSaveEpic } from './stateSaveEpic'; +import { stateSaveAction, setUrlReplacedAction } from '../actionTypes'; +import { updateLocation } from 'app/core/actions/location'; +import { mockExploreState } from 'test/mocks/mockExploreState'; + +describe('stateSaveEpic', () => { + describe('when stateSaveAction is dispatched', () => { + describe('and there is a left state', () => { + describe('and no split', () => { + it('then the correct actions are dispatched', () => { + const { exploreId, state } = mockExploreState(); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { left: '["now-6h","now","test",{"ui":[true,true,true,null]}]' }, + replace: true, + }), + setUrlReplacedAction({ exploreId }) + ); + }); + }); + + describe('and explore is splitted', () => { + it('then the correct actions are dispatched', () => { + const { exploreId, state } = mockExploreState({ split: true }); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { + left: '["now-6h","now","test",{"ui":[true,true,true,null]}]', + right: '["now-6h","now","test",{"ui":[true,true,true,null]}]', + }, + replace: true, + }), + setUrlReplacedAction({ exploreId }) + ); + }); + }); + }); + + describe('and urlReplaced is true', () => { + it('then setUrlReplacedAction should not be dispatched', () => { + const { state } = mockExploreState({ urlReplaced: true }); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { left: '["now-6h","now","test",{"ui":[true,true,true,null]}]' }, + replace: false, + }) + ); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/stateSaveEpic.ts b/public/app/features/explore/state/epics/stateSaveEpic.ts new file mode 100644 index 000000000000..107f1de547b4 --- /dev/null +++ b/public/app/features/explore/state/epics/stateSaveEpic.ts @@ -0,0 +1,72 @@ +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { RawTimeRange, TimeRange } from '@grafana/ui/src/types/time'; +import { isDateTime } from '@grafana/ui/src/utils/moment_wrapper'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { ExploreUrlState, ExploreId } from 'app/types/explore'; +import { clearQueryKeys, serializeStateToUrlParam } from 'app/core/utils/explore'; +import { updateLocation } from 'app/core/actions/location'; +import { setUrlReplacedAction, stateSaveAction } from '../actionTypes'; + +const toRawTimeRange = (range: TimeRange): RawTimeRange => { + let from = range.raw.from; + if (isDateTime(from)) { + from = from.valueOf().toString(10); + } + + let to = range.raw.to; + if (isDateTime(to)) { + to = to.valueOf().toString(10); + } + + return { + from, + to, + }; +}; + +export const stateSaveEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(stateSaveAction.type).pipe( + mergeMap(() => { + const { left, right, split } = state$.value.explore; + const replace = left && left.urlReplaced === false; + const urlStates: { [index: string]: string } = {}; + const leftUrlState: ExploreUrlState = { + datasource: left.datasourceInstance.name, + queries: left.queries.map(clearQueryKeys), + range: toRawTimeRange(left.range), + ui: { + showingGraph: left.showingGraph, + showingLogs: true, + showingTable: left.showingTable, + dedupStrategy: left.dedupStrategy, + }, + }; + urlStates.left = serializeStateToUrlParam(leftUrlState, true); + if (split) { + const rightUrlState: ExploreUrlState = { + datasource: right.datasourceInstance.name, + queries: right.queries.map(clearQueryKeys), + range: toRawTimeRange(right.range), + ui: { + showingGraph: right.showingGraph, + showingLogs: true, + showingTable: right.showingTable, + dedupStrategy: right.dedupStrategy, + }, + }; + + urlStates.right = serializeStateToUrlParam(rightUrlState, true); + } + + const actions: Array> = [updateLocation({ query: urlStates, replace })]; + if (replace) { + actions.push(setUrlReplacedAction({ exploreId: ExploreId.left })); + } + + return actions; + }) + ); +}; diff --git a/public/app/features/explore/state/reducers.test.ts b/public/app/features/explore/state/reducers.test.ts index c5ee8dbb7798..1f553313f807 100644 --- a/public/app/features/explore/state/reducers.test.ts +++ b/public/app/features/explore/state/reducers.test.ts @@ -4,20 +4,19 @@ import { exploreReducer, makeInitialUpdateState, initialExploreState, + DEFAULT_RANGE, } from './reducers'; import { ExploreId, ExploreItemState, ExploreUrlState, ExploreState, - QueryTransaction, RangeScanner, ExploreMode, } from 'app/types/explore'; import { reducerTester } from 'test/core/redux/reducerTester'; import { scanStartAction, - scanStopAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, @@ -25,18 +24,19 @@ import { splitOpenAction, splitCloseAction, changeModeAction, + scanStopAction, + runQueriesAction, } from './actionTypes'; import { Reducer } from 'redux'; import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { updateLocation } from 'app/core/actions/location'; -import { LogsDedupStrategy, LogsModel } from 'app/core/logs_model'; import { serializeStateToUrlParam } from 'app/core/utils/explore'; import TableModel from 'app/core/table_model'; -import { DataSourceApi, DataQuery } from '@grafana/ui'; +import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy, LoadingState, dateTime } from '@grafana/ui'; describe('Explore item reducer', () => { describe('scanning', () => { - test('should start scanning', () => { + it('should start scanning', () => { const scanner = jest.fn(); const initalState = { ...makeExploreItemState(), @@ -53,7 +53,7 @@ describe('Explore item reducer', () => { scanner, }); }); - test('should stop scanning', () => { + it('should stop scanning', () => { const scanner = jest.fn(); const initalState = { ...makeExploreItemState(), @@ -96,7 +96,6 @@ describe('Explore item reducer', () => { describe('when testDataSourceFailureAction is dispatched', () => { it('then it should set correct state', () => { const error = 'some error'; - const queryTransactions: QueryTransaction[] = []; const initalState: Partial = { datasourceError: null, graphResult: [], @@ -111,7 +110,6 @@ describe('Explore item reducer', () => { }; const expectedState = { datasourceError: error, - queryTransactions, graphResult: undefined as any[], tableResult: undefined as TableModel, logsResult: undefined as LogsModel, @@ -144,9 +142,8 @@ describe('Explore item reducer', () => { const StartPage = {}; const datasourceInstance = { meta: { - metrics: {}, - logs: {}, - tables: {}, + metrics: true, + logs: true, }, components: { ExploreStartPage: StartPage, @@ -156,9 +153,6 @@ describe('Explore item reducer', () => { const queryKeys: string[] = []; const initalState: Partial = { datasourceInstance: null, - supportsGraph: false, - supportsLogs: false, - supportsTable: false, StartPage: null, showingStartPage: false, queries, @@ -166,15 +160,15 @@ describe('Explore item reducer', () => { }; const expectedState = { datasourceInstance, - supportsGraph: true, - supportsLogs: true, - supportsTable: true, StartPage, showingStartPage: true, queries, queryKeys, supportedModes: [ExploreMode.Metrics, ExploreMode.Logs], mode: ExploreMode.Metrics, + loadingState: LoadingState.NotStarted, + latency: 0, + queryErrors: [], }; reducerTester() @@ -185,6 +179,34 @@ describe('Explore item reducer', () => { }); }); }); + + describe('run queries', () => { + describe('when runQueriesAction is dispatched', () => { + it('then it should set correct state', () => { + const initalState: Partial = { + showingStartPage: true, + range: null, + }; + const expectedState = { + queryIntervals: { + interval: '1s', + intervalMs: 1000, + }, + showingStartPage: false, + range: { + from: dateTime(), + to: dateTime(), + raw: DEFAULT_RANGE, + }, + }; + + reducerTester() + .givenReducer(itemReducer, initalState) + .whenActionIsDispatched(runQueriesAction({ exploreId: ExploreId.left, range: expectedState.range })) + .thenStateShouldEqual(expectedState); + }); + }); + }); }); export const setup = (urlStateOverrides?: any) => { @@ -529,46 +551,8 @@ describe('Explore reducer', () => { }); }); - describe('and refreshInterval differs', () => { - it('then it should return update refreshInterval', () => { - const { initalState, serializedUrlState } = setup(); - const expectedState = { - ...initalState, - left: { - ...initalState.left, - update: { - ...initalState.left.update, - refreshInterval: true, - }, - }, - }; - const stateWithDifferentDataSource = { - ...initalState, - left: { - ...initalState.left, - urlState: { - ...initalState.left.urlState, - refreshInterval: '5s', - }, - }, - }; - - reducerTester() - .givenReducer(exploreReducer, stateWithDifferentDataSource) - .whenActionIsDispatched( - updateLocation({ - query: { - left: serializedUrlState, - }, - path: '/explore', - }) - ) - .thenStateShouldEqual(expectedState); - }); - }); - describe('and nothing differs', () => { - fit('then it should return update ui', () => { + it('then it should return update ui', () => { const { initalState, serializedUrlState } = setup(); const expectedState = { ...initalState }; diff --git a/public/app/features/explore/state/reducers.ts b/public/app/features/explore/state/reducers.ts index f0847360cbe3..67775b9626bd 100644 --- a/public/app/features/explore/state/reducers.ts +++ b/public/app/features/explore/state/reducers.ts @@ -1,15 +1,15 @@ import _ from 'lodash'; import { - calculateResultsFromQueryTransactions, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, generateNewKeyAndAddRefIdIfMissing, + sortLogsResult, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, ExploreId, ExploreUpdateState, ExploreMode } from 'app/types/explore'; -import { DataQuery } from '@grafana/ui/src/types'; +import { DataQuery, LoadingState } from '@grafana/ui'; import { HigherOrderAction, ActionTypes, @@ -19,10 +19,17 @@ import { splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, - runQueriesAction, historyUpdatedAction, - resetQueryErrorAction, changeModeAction, + queryFailureAction, + setUrlReplacedAction, + querySuccessAction, + scanRangeAction, + scanStopAction, + resetQueryErrorAction, + queryStartAction, + runQueriesAction, + changeRangeAction, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { @@ -39,13 +46,8 @@ import { loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, - queryFailureAction, - queryStartAction, - querySuccessAction, removeQueryRowAction, - scanRangeAction, scanStartAction, - scanStopAction, setQueriesAction, toggleTableAction, queriesImportedAction, @@ -55,6 +57,7 @@ import { import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; import TableModel from 'app/core/table_model'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; export const DEFAULT_RANGE = { from: 'now-6h', @@ -95,14 +98,8 @@ export const makeExploreItemState = (): ExploreItemState => ({ scanning: false, scanRange: null, showingGraph: true, - showingLogs: true, showingTable: true, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, - supportsGraph: null, - supportsLogs: null, - supportsTable: null, + loadingState: LoadingState.NotStarted, queryKeys: [], urlState: null, update: makeInitialUpdateState(), @@ -110,6 +107,8 @@ export const makeExploreItemState = (): ExploreItemState => ({ latency: 0, supportedModes: [], mode: null, + isLive: false, + urlReplaced: false, }); /** @@ -185,9 +184,15 @@ export const itemReducer = reducerFactory({} as ExploreItemSta filter: changeRefreshIntervalAction, mapper: (state, action): ExploreItemState => { const { refreshInterval } = action.payload; + const live = isLive(refreshInterval); + const logsResult = sortLogsResult(state.logsResult, refreshInterval); + return { ...state, - refreshInterval: refreshInterval, + refreshInterval, + loadingState: live ? LoadingState.Streaming : LoadingState.NotStarted, + isLive: live, + logsResult, }; }, }) @@ -234,7 +239,6 @@ export const itemReducer = reducerFactory({} as ExploreItemSta // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; - const supportsTable = datasourceInstance.meta.tables; let mode = ExploreMode.Metrics; const supportedModes: ExploreMode[] = []; @@ -259,12 +263,7 @@ export const itemReducer = reducerFactory({} as ExploreItemSta datasourceInstance, queryErrors: [], latency: 0, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, - supportsGraph, - supportsLogs, - supportsTable, + loadingState: LoadingState.NotStarted, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), @@ -341,37 +340,29 @@ export const itemReducer = reducerFactory({} as ExploreItemSta .addMapper({ filter: queryFailureAction, mapper: (state, action): ExploreItemState => { - const { resultType, response } = action.payload; + const { response } = action.payload; const queryErrors = state.queryErrors.concat(response); return { ...state, - graphResult: resultType === 'Graph' ? null : state.graphResult, - tableResult: resultType === 'Table' ? null : state.tableResult, - logsResult: resultType === 'Logs' ? null : state.logsResult, + graphResult: null, + tableResult: null, + logsResult: null, latency: 0, queryErrors, - showingStartPage: false, - graphIsLoading: resultType === 'Graph' ? false : state.graphIsLoading, - logIsLoading: resultType === 'Logs' ? false : state.logIsLoading, - tableIsLoading: resultType === 'Table' ? false : state.tableIsLoading, + loadingState: LoadingState.Error, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryStartAction, - mapper: (state, action): ExploreItemState => { - const { resultType } = action.payload; - + mapper: (state): ExploreItemState => { return { ...state, queryErrors: [], latency: 0, - graphIsLoading: resultType === 'Graph' ? true : state.graphIsLoading, - logIsLoading: resultType === 'Logs' ? true : state.logIsLoading, - tableIsLoading: resultType === 'Table' ? true : state.tableIsLoading, - showingStartPage: false, + loadingState: LoadingState.Loading, update: makeInitialUpdateState(), }; }, @@ -379,19 +370,15 @@ export const itemReducer = reducerFactory({} as ExploreItemSta .addMapper({ filter: querySuccessAction, mapper: (state, action): ExploreItemState => { - const { queryIntervals } = state; - const { result, resultType, latency } = action.payload; - const results = calculateResultsFromQueryTransactions(result, resultType, queryIntervals.intervalMs); + const { latency, loadingState, graphResult, tableResult, logsResult } = action.payload; return { ...state, - graphResult: resultType === 'Graph' ? results.graphResult : state.graphResult, - tableResult: resultType === 'Table' ? results.tableResult : state.tableResult, - logsResult: resultType === 'Logs' ? results.logsResult : state.logsResult, + loadingState, + graphResult, + tableResult, + logsResult, latency, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, showingStartPage: false, update: makeInitialUpdateState(), }; @@ -533,8 +520,9 @@ export const itemReducer = reducerFactory({} as ExploreItemSta }) .addMapper({ filter: runQueriesAction, - mapper: (state): ExploreItemState => { - const { range, datasourceInstance, containerWidth } = state; + mapper: (state, action): ExploreItemState => { + const { range } = action.payload; + const { datasourceInstance, containerWidth } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; @@ -542,7 +530,9 @@ export const itemReducer = reducerFactory({} as ExploreItemSta const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, + range, queryIntervals, + showingStartPage: false, }; }, }) @@ -573,6 +563,24 @@ export const itemReducer = reducerFactory({} as ExploreItemSta }; }, }) + .addMapper({ + filter: setUrlReplacedAction, + mapper: (state): ExploreItemState => { + return { + ...state, + urlReplaced: true, + }; + }, + }) + .addMapper({ + filter: changeRangeAction, + mapper: (state, action): ExploreItemState => { + return { + ...state, + range: action.payload.range, + }; + }, + }) .create(); export const updateChildRefreshState = ( diff --git a/public/app/features/explore/state/selectors.test.ts b/public/app/features/explore/state/selectors.test.ts index 3a1fa5102ae3..52f8d27811dc 100644 --- a/public/app/features/explore/state/selectors.test.ts +++ b/public/app/features/explore/state/selectors.test.ts @@ -1,5 +1,5 @@ import { deduplicatedLogsSelector } from './selectors'; -import { LogsDedupStrategy } from 'app/core/logs_model'; +import { LogsDedupStrategy } from '@grafana/ui'; import { ExploreItemState } from 'app/types'; const state = { diff --git a/public/app/features/explore/state/selectors.ts b/public/app/features/explore/state/selectors.ts index fff52651646c..6925e706d4f4 100644 --- a/public/app/features/explore/state/selectors.ts +++ b/public/app/features/explore/state/selectors.ts @@ -3,10 +3,9 @@ import { ExploreItemState } from 'app/types'; import { filterLogLevels, dedupLogRows } from 'app/core/logs_model'; export const exploreItemUIStateSelector = (itemState: ExploreItemState) => { - const { showingGraph, showingLogs, showingTable, showingStartPage, dedupStrategy } = itemState; + const { showingGraph, showingTable, showingStartPage, dedupStrategy } = itemState; return { showingGraph, - showingLogs, showingTable, showingStartPage, dedupStrategy, diff --git a/public/app/features/explore/utils/ResultProcessor.test.ts b/public/app/features/explore/utils/ResultProcessor.test.ts new file mode 100644 index 000000000000..4979afa538cb --- /dev/null +++ b/public/app/features/explore/utils/ResultProcessor.test.ts @@ -0,0 +1,453 @@ +jest.mock('@grafana/ui/src/utils/moment_wrapper', () => ({ + dateTime: (ts: any) => { + return { + valueOf: () => ts, + fromNow: () => 'fromNow() jest mocked', + format: (fmt: string) => 'format() jest mocked', + }; + }, +})); + +import { ResultProcessor } from './ResultProcessor'; +import { ExploreItemState, ExploreMode } from 'app/types/explore'; +import TableModel from 'app/core/table_model'; +import { toFixed } from '@grafana/ui'; + +const testContext = (options: any = {}) => { + const response = [ + { + target: 'A-series', + alias: 'A-series', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + refId: 'A', + }, + { + columns: [ + { + text: 'Time', + }, + { + text: 'Message', + }, + { + text: 'Description', + }, + { + text: 'Value', + }, + ], + rows: [ + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + refId: 'B', + }, + ]; + const defaultOptions = { + mode: ExploreMode.Metrics, + replacePreviousResults: true, + result: { data: response }, + graphResult: [], + tableResult: new TableModel(), + logsResult: { hasUniqueLabels: false, rows: [] }, + }; + const combinedOptions = { ...defaultOptions, ...options }; + const state = ({ + mode: combinedOptions.mode, + graphResult: combinedOptions.graphResult, + tableResult: combinedOptions.tableResult, + logsResult: combinedOptions.logsResult, + queryIntervals: { intervalMs: 10 }, + } as any) as ExploreItemState; + const resultProcessor = new ResultProcessor(state, combinedOptions.replacePreviousResults, combinedOptions.result); + + return { + result: combinedOptions.result, + resultProcessor, + }; +}; + +describe('ResultProcessor', () => { + describe('constructed without result', () => { + describe('when calling getRawData', () => { + it('then it should return an empty array', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual([]); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return an empty array', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return an empty TableModel', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual(new TableModel()); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return null', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getLogsResult(); + + expect(theResult).toBeNull(); + }); + }); + }); + + describe('constructed with a result that is a DataQueryResponse', () => { + describe('when calling getRawData', () => { + it('then it should return result.data', () => { + const { result, resultProcessor } = testContext(); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual(result.data); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return correct graph result', () => { + const { resultProcessor } = testContext(); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return correct table result', () => { + const { resultProcessor } = testContext(); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual({ + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + type: 'table', + }); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return correct logs result', () => { + const { resultProcessor } = testContext({ mode: ExploreMode.Logs, observerResponse: null }); + const theResult = resultProcessor.getLogsResult(); + + expect(theResult).toEqual({ + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a message', + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + meta: undefined, + refId: 'A', + target: 'A-series', + unit: undefined, + }, + ], + }); + }); + }); + }); + + describe('constructed with result that is a DataQueryResponse and merging with previous results', () => { + describe('when calling getRawData', () => { + it('then it should return result.data', () => { + const { result, resultProcessor } = testContext(); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual(result.data); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return correct graph result', () => { + const { resultProcessor } = testContext({ + replacePreviousResults: false, + graphResult: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[19.91264531864214, 1558038518831], [20.35179822906545, 1558038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ], + }); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [ + [19.91264531864214, 1558038518831], + [20.35179822906545, 1558038519831], + [39.91264531864214, 1559038518831], + [40.35179822906545, 1559038519831], + ], + unit: undefined, + valueFormater: toFixed, + }, + ]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return correct table result', () => { + const { resultProcessor } = testContext({ + replacePreviousResults: false, + tableResult: { + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1], + [1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1], + ], + type: 'table', + }, + }); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual({ + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1], + [1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1], + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + type: 'table', + }); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return correct logs result', () => { + const { resultProcessor } = testContext({ + mode: ExploreMode.Logs, + replacePreviousResults: false, + logsResult: { + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a previous message 1', + fresh: true, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 1', + searchWords: [], + timeEpochMs: 1558038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a previous message 2', + fresh: true, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 2', + searchWords: [], + timeEpochMs: 1558038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[37.91264531864214, 1558038518831], [38.35179822906545, 1558038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ], + }, + }); + const theResult = resultProcessor.getLogsResult(); + const expected = { + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a previous message 1', + fresh: false, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 1', + searchWords: [], + timeEpochMs: 1558038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a previous message 2', + fresh: false, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 2', + searchWords: [], + timeEpochMs: 1558038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038518831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + fresh: true, + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + fresh: true, + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [ + [37.91264531864214, 1558038518831], + [38.35179822906545, 1558038519831], + [39.91264531864214, 1559038518831], + [40.35179822906545, 1559038519831], + ], + unit: undefined, + valueFormater: toFixed, + }, + ], + }; + + expect(theResult).toEqual(expected); + }); + }); + }); +}); diff --git a/public/app/features/explore/utils/ResultProcessor.ts b/public/app/features/explore/utils/ResultProcessor.ts new file mode 100644 index 000000000000..2521c4914f8e --- /dev/null +++ b/public/app/features/explore/utils/ResultProcessor.ts @@ -0,0 +1,176 @@ +import { + DataQueryResponse, + TableData, + isTableData, + LogsModel, + toSeriesData, + guessFieldTypes, + DataQueryResponseData, + TimeSeries, +} from '@grafana/ui'; + +import { ExploreItemState, ExploreMode } from 'app/types/explore'; +import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState'; +import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; +import { sortLogsResult } from 'app/core/utils/explore'; +import { seriesDataToLogsModel } from 'app/core/logs_model'; +import { default as TimeSeries2 } from 'app/core/time_series2'; +import { DataProcessor } from 'app/plugins/panel/graph/data_processor'; + +export class ResultProcessor { + private rawData: DataQueryResponseData[] = []; + private metrics: TimeSeries[] = []; + private tables: TableData[] = []; + + constructor( + private state: ExploreItemState, + private replacePreviousResults: boolean, + result?: DataQueryResponse | DataQueryResponseData[] + ) { + if (result && result.hasOwnProperty('data')) { + this.rawData = (result as DataQueryResponse).data; + } else { + this.rawData = (result as DataQueryResponseData[]) || []; + } + + if (this.state.mode !== ExploreMode.Metrics) { + return; + } + + for (let index = 0; index < this.rawData.length; index++) { + const res: any = this.rawData[index]; + const isTable = isTableData(res); + if (isTable) { + this.tables.push(res); + } else { + this.metrics.push(res); + } + } + } + + getRawData = (): any[] => { + return this.rawData; + }; + + getGraphResult = (): TimeSeries[] => { + if (this.state.mode !== ExploreMode.Metrics) { + return []; + } + + const newResults = this.makeTimeSeriesList(this.metrics); + return this.mergeGraphResults(newResults, this.state.graphResult); + }; + + getTableResult = (): TableModel => { + if (this.state.mode !== ExploreMode.Metrics) { + return new TableModel(); + } + + const prevTableResults = this.state.tableResult || []; + const tablesToMerge = this.replacePreviousResults ? this.tables : [].concat(prevTableResults, this.tables); + + return mergeTablesIntoModel(new TableModel(), ...tablesToMerge); + }; + + getLogsResult = (): LogsModel => { + if (this.state.mode !== ExploreMode.Logs) { + return null; + } + const graphInterval = this.state.queryIntervals.intervalMs; + const seriesData = this.rawData.map(result => guessFieldTypes(toSeriesData(result))); + const newResults = this.rawData ? seriesDataToLogsModel(seriesData, graphInterval) : null; + + if (this.replacePreviousResults) { + return newResults; + } + + const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] }; + const sortedLogResult = sortLogsResult(prevLogsResult, this.state.refreshInterval); + const rowsInState = sortedLogResult.rows; + const seriesInState = sortedLogResult.series || []; + + const processedRows = []; + for (const row of rowsInState) { + processedRows.push({ ...row, fresh: false }); + } + for (const row of newResults.rows) { + processedRows.push({ ...row, fresh: true }); + } + + const processedSeries = this.mergeGraphResults(newResults.series, seriesInState); + + const slice = -1000; + const rows = processedRows.slice(slice); + const series = processedSeries.slice(slice); + + return { ...newResults, rows, series }; + }; + + private makeTimeSeriesList = (rawData: any[]) => { + const dataList = getProcessedSeriesData(rawData); + const dataProcessor = new DataProcessor({ xaxis: {}, aliasColors: [] }); // Hack before we use GraphSeriesXY instead + const timeSeries = dataProcessor.getSeriesList({ dataList }); + + return (timeSeries as any) as TimeSeries[]; // Hack before we use GraphSeriesXY instead + }; + + private isSameTimeSeries = (a: TimeSeries | TimeSeries2, b: TimeSeries | TimeSeries2) => { + if (a.hasOwnProperty('id') && b.hasOwnProperty('id')) { + if (a['id'] !== undefined && b['id'] !== undefined && a['id'] === b['id']) { + return true; + } + } + + if (a.hasOwnProperty('alias') && b.hasOwnProperty('alias')) { + if (a['alias'] !== undefined && b['alias'] !== undefined && a['alias'] === b['alias']) { + return true; + } + } + + return false; + }; + + private mergeGraphResults = ( + newResults: TimeSeries[] | TimeSeries2[], + prevResults: TimeSeries[] | TimeSeries2[] + ): TimeSeries[] => { + if (!prevResults || prevResults.length === 0 || this.replacePreviousResults) { + return (newResults as any) as TimeSeries[]; // Hack before we use GraphSeriesXY instead + } + + const results: TimeSeries[] = prevResults.slice() as TimeSeries[]; + + // update existing results + for (let index = 0; index < results.length; index++) { + const prevResult = results[index]; + for (const newResult of newResults) { + const isSame = this.isSameTimeSeries(prevResult, newResult); + + if (isSame) { + prevResult.datapoints = prevResult.datapoints.concat(newResult.datapoints); + break; + } + } + } + + // add new results + for (const newResult of newResults) { + let isNew = true; + for (const prevResult of results) { + const isSame = this.isSameTimeSeries(prevResult, newResult); + if (isSame) { + isNew = false; + break; + } + } + + if (isNew) { + const timeSeries2Result = new TimeSeries2({ ...newResult }); + + const result = (timeSeries2Result as any) as TimeSeries; // Hack before we use GraphSeriesXY instead + results.push(result); + } + } + return results; + }; +} diff --git a/public/app/features/org/state/actions.ts b/public/app/features/org/state/actions.ts index fc8742d12226..214674783cef 100644 --- a/public/app/features/org/state/actions.ts +++ b/public/app/features/org/state/actions.ts @@ -1,5 +1,5 @@ import { Organization, ThunkResult } from 'app/types'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; export enum ActionTypes { LoadOrganization = 'LOAD_ORGANIZATION', diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index 6143a5e182ba..c1c45f8acc40 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -5,11 +5,12 @@ import coreModule from 'app/core/core_module'; // Services & Utils import config from 'app/core/config'; import { importDataSourcePlugin } from './plugin_loader'; +import { DataSourceSrv as DataSourceService, getDataSourceSrv as getDataSourceService } from '@grafana/runtime'; // Types import { DataSourceApi, DataSourceSelectItem, ScopedVars } from '@grafana/ui/src/types'; -export class DatasourceSrv { +export class DatasourceSrv implements DataSourceService { datasources: { [name: string]: DataSourceApi }; /** @ngInject */ @@ -61,9 +62,13 @@ export class DatasourceSrv { return; } - const instance: DataSourceApi = this.$injector.instantiate(dsPlugin.DataSourceClass, { - instanceSettings: dsConfig, - }); + // If there is only one constructor argument it is instanceSettings + const useAngular = dsPlugin.DataSourceClass.length !== 1; + const instance: DataSourceApi = useAngular + ? this.$injector.instantiate(dsPlugin.DataSourceClass, { + instanceSettings: dsConfig, + }) + : new dsPlugin.DataSourceClass(dsConfig); instance.components = dsPlugin.components; instance.meta = dsConfig.meta; @@ -171,14 +176,8 @@ export class DatasourceSrv { } } -let singleton: DatasourceSrv; - -export function setDatasourceSrv(srv: DatasourceSrv) { - singleton = srv; -} - export function getDatasourceSrv(): DatasourceSrv { - return singleton; + return getDataSourceService() as DatasourceSrv; } coreModule.service('datasourceSrv', DatasourceSrv); diff --git a/public/app/features/plugins/plugin_loader.test.ts b/public/app/features/plugins/plugin_loader.test.ts new file mode 100644 index 000000000000..845a5dde62aa --- /dev/null +++ b/public/app/features/plugins/plugin_loader.test.ts @@ -0,0 +1,68 @@ +// Use the real plugin_loader (stubbed by default) +jest.unmock('app/features/plugins/plugin_loader'); + +(global as any).ace = { + define: jest.fn(), +}; + +jest.mock('app/core/core', () => { + return { + coreModule: { + directive: jest.fn(), + }, + }; +}); + +/* tslint:disable:import-blacklist */ +import System from 'systemjs/dist/system.js'; + +import { AppPluginMeta, PluginMetaInfo, PluginType, AppPlugin } from '@grafana/ui'; +import { importAppPlugin } from './plugin_loader'; + +class MyCustomApp extends AppPlugin { + initWasCalled = false; + calledTwice = false; + + init(meta: AppPluginMeta) { + this.initWasCalled = true; + this.calledTwice = this.meta === meta; + } +} + +describe('Load App', () => { + const app = new MyCustomApp(); + const modulePath = 'my/custom/plugin/module'; + + beforeAll(() => { + System.set(modulePath, System.newModule({ plugin: app })); + }); + + afterAll(() => { + System.delete(modulePath); + }); + + it('should call init and set meta', async () => { + const meta: AppPluginMeta = { + id: 'test-app', + module: modulePath, + baseUrl: 'xxx', + info: {} as PluginMetaInfo, + type: PluginType.app, + name: 'test', + }; + + // Check that we mocked the import OK + const m = await System.import(modulePath); + expect(m.plugin).toBe(app); + + const loaded = await importAppPlugin(meta); + expect(loaded).toBe(app); + expect(app.meta).toBe(meta); + expect(app.initWasCalled).toBeTruthy(); + expect(app.calledTwice).toBeFalsy(); + + const again = await importAppPlugin(meta); + expect(again).toBe(app); + expect(app.calledTwice).toBeTruthy(); + }); +}); diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index 2ee31a7b6aa2..74986466a49a 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -29,6 +29,7 @@ import impressionSrv from 'app/core/services/impression_srv'; import builtInPlugins from './built_in_plugins'; import * as d3 from 'd3'; import * as grafanaUI from '@grafana/ui'; +import * as grafanaRT from '@grafana/runtime'; // rxjs import { Observable, Subject } from 'rxjs'; @@ -68,6 +69,7 @@ function exposeToPlugin(name: string, component: any) { } exposeToPlugin('@grafana/ui', grafanaUI); +exposeToPlugin('@grafana/runtime', grafanaRT); exposeToPlugin('lodash', _); exposeToPlugin('moment', moment); exposeToPlugin('jquery', jquery); @@ -75,6 +77,10 @@ exposeToPlugin('angular', angular); exposeToPlugin('d3', d3); exposeToPlugin('rxjs/Subject', Subject); exposeToPlugin('rxjs/Observable', Observable); +exposeToPlugin('rxjs', { + Subject: Subject, + Observable: Observable, +}); // Experimental modules exposeToPlugin('prismjs', prismjs); @@ -84,12 +90,6 @@ exposeToPlugin('slate-plain-serializer', slatePlain); exposeToPlugin('react', react); exposeToPlugin('react-dom', reactDom); -// backward compatible path -exposeToPlugin('vendor/npm/rxjs/Rx', { - Subject: Subject, - Observable: Observable, -}); - exposeToPlugin('app/features/dashboard/impression_store', { impressions: impressionSrv, __esModule: true, @@ -183,8 +183,9 @@ export function importDataSourcePlugin(meta: DataSourcePluginMeta): Promise { return importPluginModule(meta.module).then(pluginExports => { const plugin = pluginExports.plugin ? (pluginExports.plugin as AppPlugin) : new AppPlugin(); - plugin.meta = meta; plugin.setComponentsFromLegacyExports(pluginExports); + plugin.init(meta); + plugin.meta = meta; return plugin; }); } diff --git a/public/app/features/plugins/state/actions.ts b/public/app/features/plugins/state/actions.ts index 9a1dbde7bffc..da0e14717633 100644 --- a/public/app/features/plugins/state/actions.ts +++ b/public/app/features/plugins/state/actions.ts @@ -1,6 +1,6 @@ import { StoreState } from 'app/types'; import { ThunkAction } from 'redux-thunk'; -import { getBackendSrv } from '../../../core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; import { PluginDashboard } from '../../../types/plugins'; import { PluginMeta } from '@grafana/ui'; diff --git a/public/app/features/plugins/wrappers/AppConfigWrapper.tsx b/public/app/features/plugins/wrappers/AppConfigWrapper.tsx index de6c670679d6..eb9afa9cf679 100644 --- a/public/app/features/plugins/wrappers/AppConfigWrapper.tsx +++ b/public/app/features/plugins/wrappers/AppConfigWrapper.tsx @@ -5,7 +5,7 @@ import extend from 'lodash/extend'; import { PluginMeta, AppPlugin, Button } from '@grafana/ui'; -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { getBackendSrv } from 'app/core/services/backend_srv'; import { ButtonVariant } from '@grafana/ui/src/components/Button/AbstractButton'; import { css } from 'emotion'; diff --git a/public/app/features/teams/state/actions.ts b/public/app/features/teams/state/actions.ts index e2582839233f..cd369b86e922 100644 --- a/public/app/features/teams/state/actions.ts +++ b/public/app/features/teams/state/actions.ts @@ -1,5 +1,5 @@ import { ThunkAction } from 'redux-thunk'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { StoreState, Team, TeamGroup, TeamMember } from 'app/types'; import { updateNavIndex, UpdateNavIndexAction } from 'app/core/actions'; import { buildNavModel } from './navModel'; diff --git a/public/app/features/users/state/actions.ts b/public/app/features/users/state/actions.ts index 5c50aa290965..3d69e6638596 100644 --- a/public/app/features/users/state/actions.ts +++ b/public/app/features/users/state/actions.ts @@ -1,6 +1,6 @@ import { ThunkAction } from 'redux-thunk'; import { StoreState } from '../../../types'; -import { getBackendSrv } from '../../../core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { Invitee, OrgUser } from 'app/types'; export enum ActionTypes { diff --git a/public/app/plugins/app/example-app/ExampleRootPage.tsx b/public/app/plugins/app/example-app/ExampleRootPage.tsx index 488d3b511a11..565a9fa37d45 100644 --- a/public/app/plugins/app/example-app/ExampleRootPage.tsx +++ b/public/app/plugins/app/example-app/ExampleRootPage.tsx @@ -10,7 +10,7 @@ const TAB_ID_A = 'A'; const TAB_ID_B = 'B'; const TAB_ID_C = 'C'; -export class ExampleRootPage extends PureComponent { +export class ExampleRootPage extends PureComponent { constructor(props: Props) { super(props); } @@ -79,7 +79,7 @@ export class ExampleRootPage extends PureComponent { } render() { - const { path, query } = this.props; + const { path, query, meta } = this.props; return (
@@ -96,6 +96,7 @@ export class ExampleRootPage extends PureComponent { ZZZ +
{JSON.stringify(meta.jsonData)}
); } diff --git a/public/app/plugins/app/example-app/module.ts b/public/app/plugins/app/example-app/module.ts index f82f7faec08b..8b7ea7b42f47 100644 --- a/public/app/plugins/app/example-app/module.ts +++ b/public/app/plugins/app/example-app/module.ts @@ -5,6 +5,7 @@ import { AppPlugin } from '@grafana/ui'; import { ExamplePage1 } from './config/ExamplePage1'; import { ExamplePage2 } from './config/ExamplePage2'; import { ExampleRootPage } from './ExampleRootPage'; +import { ExampleAppSettings } from './types'; // Legacy exports just for testing export { @@ -12,7 +13,7 @@ export { AngularExamplePageCtrl, // Must match `pages.component` in plugin.json }; -export const plugin = new AppPlugin() +export const plugin = new AppPlugin() .setRootPage(ExampleRootPage) .addConfigPage({ title: 'Page 1', diff --git a/public/app/plugins/app/example-app/types.ts b/public/app/plugins/app/example-app/types.ts new file mode 100644 index 000000000000..c3c5bad5e767 --- /dev/null +++ b/public/app/plugins/app/example-app/types.ts @@ -0,0 +1,4 @@ +export interface ExampleAppSettings { + customText?: string; + customCheckbox?: boolean; +} diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index bd2bd67248ca..5a92122f221c 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -149,12 +149,14 @@ export default class CloudWatchDatasource extends DataSourceApi if (res.results) { for (const query of request.queries) { const queryRes = res.results[query.refId]; - for (const series of queryRes.series) { - const s = { target: series.name, datapoints: series.points } as any; - if (queryRes.meta.unit) { - s.unit = queryRes.meta.unit; + if (queryRes) { + for (const series of queryRes.series) { + const s = { target: series.name, datapoints: series.points } as any; + if (queryRes.meta.unit) { + s.unit = queryRes.meta.unit; + } + data.push(s); } - data.push(s); } } } diff --git a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html index 1b252e1dda3e..ce272bb4d64a 100644 --- a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html +++ b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html @@ -1,8 +1,18 @@ +
+
+ + +
+ +
+
+
+
+
-
diff --git a/public/app/plugins/datasource/cloudwatch/plugin.json b/public/app/plugins/datasource/cloudwatch/plugin.json index 212bb20a059f..2e71355975ce 100644 --- a/public/app/plugins/datasource/cloudwatch/plugin.json +++ b/public/app/plugins/datasource/cloudwatch/plugin.json @@ -4,6 +4,7 @@ "id": "cloudwatch", "category": "cloud", + "hiddenQueries": true, "metrics": true, "alerting": true, "annotations": true, diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts index 5bf7dfb19eb5..172aa5ee077a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts @@ -649,6 +649,16 @@ export const grafanaMacros = [ display: '$__timeFilter()', hint: 'Macro that uses the selected timerange in Grafana to filter the query.', }, + { + text: '$__timeTo', + display: '$__timeTo()', + hint: 'Returns the From datetime from the Grafana picker. Example: datetime(2018-06-05T20:09:58.907Z).', + }, + { + text: '$__timeFrom', + display: '$__timeFrom()', + hint: 'Returns the From datetime from the Grafana picker. Example: datetime(2018-06-05T18:09:58.907Z).', + }, { text: '$__escapeMulti', display: '$__escapeMulti()', diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts index fab268a34401..186c78743f8c 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts @@ -90,27 +90,27 @@ describe('LogAnalyticsDatasource', () => { }); }); - describe('when using $__from and $__to is in the query and range is until now', () => { + describe('when using $__timeFrom and $__timeTo is in the query and range is until now', () => { beforeEach(() => { - builder.rawQueryString = 'query=Tablename | where myTime >= $__from and myTime <= $__to'; + builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; }); - it('should replace $__from and $__to with a datetime and the now() function', () => { + it('should replace $__timeFrom and $__timeTo with a datetime and the now() function', () => { const query = builder.generate().uriString; expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); - expect(query).toContain('myTime%20%3C%3D%20now()'); + expect(query).toContain('myTime%20%3C%3D%20datetime('); }); }); - describe('when using $__from and $__to is in the query and range is a specific interval', () => { + describe('when using $__timeFrom and $__timeTo is in the query and range is a specific interval', () => { beforeEach(() => { - builder.rawQueryString = 'query=Tablename | where myTime >= $__from and myTime <= $__to'; + builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; builder.options.range.to = dateTime().subtract(1, 'hour'); builder.options.rangeRaw.to = 'now-1h'; }); - it('should replace $__from and $__to with datetimes', () => { + it('should replace $__timeFrom and $__timeTo with datetimes', () => { const query = builder.generate().uriString; expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts index afb64da8f4c6..ad72c4eb2eb9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts @@ -21,12 +21,16 @@ export default class LogAnalyticsQuerystringBuilder { if (p1 === 'timeFilter') { return this.getTimeFilter(p2, this.options); } + if (p1 === 'timeFrom') { + return this.getFrom(this.options); + } + if (p1 === 'timeTo') { + return this.getUntil(this.options); + } return match; }); queryString = queryString.replace(/\$__interval/gi, this.options.interval); - queryString = queryString.replace(/\$__from/gi, this.getFrom(this.options)); - queryString = queryString.replace(/\$__to/gi, this.getUntil(this.options)); } const rawQuery = queryString; queryString = encodeURIComponent(queryString); @@ -44,7 +48,10 @@ export default class LogAnalyticsQuerystringBuilder { getUntil(options) { if (options.rangeRaw.to === 'now') { - return 'now()'; + const now = Date.now(); + return `datetime(${dateTime(now) + .startOf('minute') + .toISOString()})`; } else { const until = options.range.to; return `datetime(${dateTime(until) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html index a5b2b2adc5be..7a855a10b44b 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html @@ -67,8 +67,8 @@ - $__timeFilter(datetimeColumn) -> datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z) Or build your own conditionals using these built-in variables which just return the values: - - $__from -> datetime(2018-06-05T18:09:58.907Z) - - $__to -> datetime(2018-06-05T20:09:58.907Z) + - $__timeFrom -> datetime(2018-06-05T18:09:58.907Z) + - $__timeTo -> datetime(2018-06-05T20:09:58.907Z) - $__interval -> 5m
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html index 6761502dd2a2..1c2b14f366ed 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html @@ -10,7 +10,7 @@
+ get-options="ctrl.getSubscriptions()" on-change="ctrl.onSubscriptionChange()" css-class="min-width-12">
@@ -189,13 +189,13 @@ If using the All option, then check the Include All Option checkbox and in the Custom all value field type in: all. If All is chosen -> 1 == 1 Or build your own conditionals using these built-in variables which just return the values: - - $__from -> datetime(2018-06-05T18:09:58.907Z) - - $__to -> datetime(2018-06-05T20:09:58.907Z) + - $__timeFrom -> datetime(2018-06-05T18:09:58.907Z) + - $__timeTo -> datetime(2018-06-05T20:09:58.907Z) - $__interval -> 5m Examples: - ¡ where $__timeFilter - - | where TimeGenerated ≥ $__from and TimeGenerated ≤ $__to + - | where TimeGenerated ≥ $__timeFrom and TimeGenerated ≤ $__timeTo - | summarize count() by Category, bin(TimeGenerated, $__interval)
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.test.ts index 015ba0df9f60..cbdd3f54aba9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.test.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.test.ts @@ -189,11 +189,19 @@ describe('AzureMonitorQueryCtrl', () => { }; beforeEach(() => { + queryCtrl.target.subscription = 'sub1'; queryCtrl.target.azureMonitor.resourceGroup = 'test'; queryCtrl.target.azureMonitor.metricDefinition = 'Microsoft.Compute/virtualMachines'; queryCtrl.target.azureMonitor.resourceName = 'test'; queryCtrl.target.azureMonitor.metricName = 'Percentage CPU'; - queryCtrl.datasource.getMetricMetadata = function(resourceGroup, metricDefinition, resourceName, metricName) { + queryCtrl.datasource.getMetricMetadata = function( + subscription, + resourceGroup, + metricDefinition, + resourceName, + metricName + ) { + expect(subscription).toBe('sub1'); expect(resourceGroup).toBe('test'); expect(metricDefinition).toBe('Microsoft.Compute/virtualMachines'); expect(resourceName).toBe('test'); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts index 3d896f27a0a1..9fc12e9b9169 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts @@ -110,6 +110,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { this.migrateTimeGrains(); + this.migrateToFromTimes(); + this.panelCtrl.events.on('data-received', this.onDataReceived.bind(this), $scope); this.panelCtrl.events.on('data-error', this.onDataError.bind(this), $scope); this.resultFormats = [{ text: 'Time series', value: 'time_series' }, { text: 'Table', value: 'table' }]; @@ -171,6 +173,11 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { } } + migrateToFromTimes() { + this.target.azureLogAnalytics.query = this.target.azureLogAnalytics.query.replace(/\$__from\s/gi, '$__timeFrom() '); + this.target.azureLogAnalytics.query = this.target.azureLogAnalytics.query.replace(/\$__to\s/gi, '$__timeTo() '); + } + replace(variable: string) { return this.templateSrv.replace(variable, this.panelCtrl.panel.scopedVars); } @@ -197,6 +204,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { if (!this.target.subscription && this.subscriptions.length > 0) { this.target.subscription = this.subscriptions[0].value; } + + return this.subscriptions; }); } @@ -204,6 +213,18 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { if (this.target.queryType === 'Azure Log Analytics') { return this.getWorkspaces(); } + + if (this.target.queryType === 'Azure Monitor') { + this.target.azureMonitor.resourceGroup = this.defaultDropdownValue; + this.target.azureMonitor.metricDefinition = this.defaultDropdownValue; + this.target.azureMonitor.resourceName = this.defaultDropdownValue; + this.target.azureMonitor.metricName = this.defaultDropdownValue; + this.target.azureMonitor.aggregation = ''; + this.target.azureMonitor.timeGrains = []; + this.target.azureMonitor.timeGrain = ''; + this.target.azureMonitor.dimensions = []; + this.target.azureMonitor.dimension = ''; + } } /* Azure Monitor Section */ @@ -282,6 +303,9 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { this.target.azureMonitor.metricDefinition = this.defaultDropdownValue; this.target.azureMonitor.resourceName = this.defaultDropdownValue; this.target.azureMonitor.metricName = this.defaultDropdownValue; + this.target.azureMonitor.aggregation = ''; + this.target.azureMonitor.timeGrains = []; + this.target.azureMonitor.timeGrain = ''; this.target.azureMonitor.dimensions = []; this.target.azureMonitor.dimension = ''; } @@ -289,12 +313,18 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { onMetricDefinitionChange() { this.target.azureMonitor.resourceName = this.defaultDropdownValue; this.target.azureMonitor.metricName = this.defaultDropdownValue; + this.target.azureMonitor.aggregation = ''; + this.target.azureMonitor.timeGrains = []; + this.target.azureMonitor.timeGrain = ''; this.target.azureMonitor.dimensions = []; this.target.azureMonitor.dimension = ''; } onResourceNameChange() { this.target.azureMonitor.metricName = this.defaultDropdownValue; + this.target.azureMonitor.aggregation = ''; + this.target.azureMonitor.timeGrains = []; + this.target.azureMonitor.timeGrain = ''; this.target.azureMonitor.dimensions = []; this.target.azureMonitor.dimension = ''; } @@ -306,6 +336,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { return this.datasource .getMetricMetadata( + this.replace(this.target.subscription), this.replace(this.target.azureMonitor.resourceGroup), this.replace(this.target.azureMonitor.metricDefinition), this.replace(this.target.azureMonitor.resourceName), @@ -315,6 +346,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { this.target.azureMonitor.aggOptions = metadata.supportedAggTypes || [metadata.primaryAggType]; this.target.azureMonitor.aggregation = metadata.primaryAggType; this.target.azureMonitor.timeGrains = [{ text: 'auto', value: 'auto' }].concat(metadata.supportedTimeGrains); + this.target.azureMonitor.timeGrain = 'auto'; this.target.azureMonitor.dimensions = metadata.dimensions; if (metadata.dimensions.length > 0) { diff --git a/public/app/plugins/datasource/grafana/partials/annotations.editor.html b/public/app/plugins/datasource/grafana/partials/annotations.editor.html index e5a67d6a7dc7..c1164f7f8c74 100644 --- a/public/app/plugins/datasource/grafana/partials/annotations.editor.html +++ b/public/app/plugins/datasource/grafana/partials/annotations.editor.html @@ -7,7 +7,7 @@
  • Dashboard: This will fetch annotation and alert state changes for whole dashboard and show them only on the event's originating panel.
  • -
  • All: This will fetch any annotation events that match the tags filter.
  • +
  • Tags: This will fetch any annotation events that match the tags filter.
@@ -32,10 +32,19 @@ label-class="width-9" checked="ctrl.annotation.matchAny" on-change="ctrl.refresh()" - tooltip="By default Grafana will only show annotation that matches all tags in the query. Enabling this will make Grafana return any annotation with the tags you specify."> + tooltip="By default Grafana only shows annotations that match all tags in the query. Enabling this returns annotations that match any of the tags in the query.">
- Tags + + Tags + + A tag entered here as 'foo' will match +
    +
  • annotation tags 'foo'
  • +
  • annotation key-value tags formatted as 'foo:bar'
  • +
+
+
diff --git a/public/app/plugins/datasource/graphite/plugin.json b/public/app/plugins/datasource/graphite/plugin.json index f66c7fb202f6..a1cc0335b68e 100644 --- a/public/app/plugins/datasource/graphite/plugin.json +++ b/public/app/plugins/datasource/graphite/plugin.json @@ -6,10 +6,10 @@ "includes": [{ "type": "dashboard", "name": "Graphite Carbon Metrics", "path": "dashboards/carbon_metrics.json" }], + "hiddenQueries": true, "metrics": true, "alerting": true, "annotations": true, - "tables": false, "queryOptions": { "maxDataPoints": true, diff --git a/public/app/plugins/datasource/influxdb/influx_query.ts b/public/app/plugins/datasource/influxdb/influx_query.ts index 3ee9d703c54b..a655705ffcb6 100644 --- a/public/app/plugins/datasource/influxdb/influx_query.ts +++ b/public/app/plugins/datasource/influxdb/influx_query.ts @@ -146,7 +146,7 @@ export default class InfluxQuery { value = this.templateSrv.replace(value, this.scopedVars); } if (operator !== '>' && operator !== '<') { - value = "'" + value.replace(/\\/g, '\\\\') + "'"; + value = "'" + value.replace(/\\/g, '\\\\').replace(/\'/g, "\\'") + "'"; } } else if (interpolate) { value = this.templateSrv.replace(value, this.scopedVars, 'regex'); diff --git a/public/app/plugins/datasource/influxdb/plugin.json b/public/app/plugins/datasource/influxdb/plugin.json index fa660ee12329..785706dfc401 100644 --- a/public/app/plugins/datasource/influxdb/plugin.json +++ b/public/app/plugins/datasource/influxdb/plugin.json @@ -8,7 +8,6 @@ "metrics": true, "annotations": true, "alerting": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts index f8e65c21f2d2..ad76ea5309c5 100644 --- a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts +++ b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts @@ -139,6 +139,26 @@ describe('InfluxQuery', () => { }); }); + describe('field name with single quote should be escaped and', () => { + it('should generate correct query', () => { + const query = new InfluxQuery( + { + measurement: 'cpu', + groupBy: [{ type: 'time', params: ['auto'] }], + tags: [{ key: 'name', value: "Let's encrypt." }, { key: 'hostname', value: 'server2', condition: 'OR' }], + }, + templateSrv, + {} + ); + + const queryText = query.render(); + expect(queryText).toBe( + 'SELECT mean("value") FROM "cpu" WHERE ("name" = \'Let\\\'s encrypt.\' OR "hostname" = \'server2\') AND ' + + '$timeFilter GROUP BY time($__interval)' + ); + }); + }); + describe('query with value condition', () => { it('should not quote value', () => { const query = new InfluxQuery( diff --git a/public/app/plugins/datasource/input/plugin.json b/public/app/plugins/datasource/input/plugin.json index 91782a348065..dbfa0ad489a4 100644 --- a/public/app/plugins/datasource/input/plugin.json +++ b/public/app/plugins/datasource/input/plugin.json @@ -8,7 +8,6 @@ "alerting": false, "annotations": false, "logs": false, - "explore": false, "info": { "description": "Data source that supports manual table & CSV input", diff --git a/public/app/plugins/datasource/loki/datasource.ts b/public/app/plugins/datasource/loki/datasource.ts index 04f846584911..b689d02ba135 100644 --- a/public/app/plugins/datasource/loki/datasource.ts +++ b/public/app/plugins/datasource/loki/datasource.ts @@ -1,5 +1,8 @@ // Libraries import _ from 'lodash'; +import { Subscription, of } from 'rxjs'; +import { webSocket } from 'rxjs/webSocket'; +import { catchError, map } from 'rxjs/operators'; // Services & Utils import * as dateMath from '@grafana/ui/src/utils/datemath'; @@ -16,11 +19,15 @@ import { DataSourceApi, DataSourceInstanceSettings, DataQueryError, -} from '@grafana/ui/src/types'; + LogRowModel, + DataStreamObserver, + LoadingState, + DataStreamState, +} from '@grafana/ui'; import { LokiQuery, LokiOptions } from './types'; import { BackendSrv } from 'app/core/services/backend_srv'; import { TemplateSrv } from 'app/features/templating/template_srv'; -import { safeStringifyValue } from 'app/core/utils/explore'; +import { safeStringifyValue, convertToWebSocketUrl } from 'app/core/utils/explore'; export const DEFAULT_MAX_LINES = 1000; @@ -40,7 +47,13 @@ function serializeParams(data: any) { .join('&'); } +interface LokiContextQueryOptions { + direction?: 'BACKWARD' | 'FORWARD'; + limit?: number; +} + export class LokiDatasource extends DataSourceApi { + private subscriptions: { [key: string]: Subscription } = null; languageProvider: LanguageProvider; maxLines: number; @@ -54,6 +67,7 @@ export class LokiDatasource extends DataSourceApi { this.languageProvider = new LanguageProvider(this); const settingsData = instanceSettings.jsonData || {}; this.maxLines = parseInt(settingsData.maxLines, 10) || DEFAULT_MAX_LINES; + this.subscriptions = {}; } _request(apiUrl: string, data?, options?: any) { @@ -67,6 +81,21 @@ export class LokiDatasource extends DataSourceApi { return this.backendSrv.datasourceRequest(req); } + prepareLiveTarget(target: LokiQuery, options: DataQueryRequest) { + const interpolated = this.templateSrv.replace(target.expr); + const { query, regexp } = parseQuery(interpolated); + const refId = target.refId; + const baseUrl = this.instanceSettings.url; + const params = serializeParams({ query, regexp }); + const url = convertToWebSocketUrl(`${baseUrl}/api/prom/tail?${params}`); + return { + query, + regexp, + url, + refId, + }; + } + prepareQueryTarget(target: LokiQuery, options: DataQueryRequest) { const interpolated = this.templateSrv.replace(target.expr); const { query, regexp } = parseQuery(interpolated); @@ -84,9 +113,106 @@ export class LokiDatasource extends DataSourceApi { }; } - async query(options: DataQueryRequest) { + unsubscribe = (refId: string) => { + const subscription = this.subscriptions[refId]; + if (subscription && !subscription.closed) { + subscription.unsubscribe(); + delete this.subscriptions[refId]; + } + }; + + processError = (err: any, target: any): DataQueryError => { + const error: DataQueryError = { + message: 'Unknown error during query transaction. Please check JS console logs.', + refId: target.refId, + }; + + if (err.data) { + if (typeof err.data === 'string') { + error.message = err.data; + } else if (err.data.error) { + error.message = safeStringifyValue(err.data.error); + } + } else if (err.message) { + error.message = err.message; + } else if (typeof err === 'string') { + error.message = err; + } + + error.status = err.status; + error.statusText = err.statusText; + + return error; + }; + + processResult = (data: any, target: any): SeriesData[] => { + const series: SeriesData[] = []; + + if (Object.keys(data).length === 0) { + return series; + } + + if (!data.streams) { + return [{ ...logStreamToSeriesData(data), refId: target.refId }]; + } + + for (const stream of data.streams || []) { + const seriesData = logStreamToSeriesData(stream); + seriesData.refId = target.refId; + seriesData.meta = { + searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)), + limit: this.maxLines, + }; + series.push(seriesData); + } + + return series; + }; + + runLiveQueries = (options: DataQueryRequest, observer?: DataStreamObserver) => { + const liveTargets = options.targets + .filter(target => target.expr && !target.hide && target.live) + .map(target => this.prepareLiveTarget(target, options)); + + for (const liveTarget of liveTargets) { + const subscription = webSocket(liveTarget.url) + .pipe( + map((results: any[]) => { + const delta = this.processResult(results, liveTarget); + const state: DataStreamState = { + key: `loki-${liveTarget.refId}`, + request: options, + state: LoadingState.Streaming, + delta, + unsubscribe: () => this.unsubscribe(liveTarget.refId), + }; + + return state; + }), + catchError(err => { + const error = this.processError(err, liveTarget); + const state: DataStreamState = { + key: `loki-${liveTarget.refId}`, + request: options, + state: LoadingState.Error, + error, + unsubscribe: () => this.unsubscribe(liveTarget.refId), + }; + + return of(state); + }) + ) + .subscribe({ + next: state => observer(state), + }); + + this.subscriptions[liveTarget.refId] = subscription; + } + }; + + runQueries = async (options: DataQueryRequest) => { const queryTargets = options.targets - .filter(target => target.expr && !target.hide) + .filter(target => target.expr && !target.hide && !target.live) .map(target => this.prepareQueryTarget(target, options)); if (queryTargets.length === 0) { @@ -99,53 +225,29 @@ export class LokiDatasource extends DataSourceApi { return err; } - const error: DataQueryError = { - message: 'Unknown error during query transaction. Please check JS console logs.', - refId: target.refId, - }; - - if (err.data) { - if (typeof err.data === 'string') { - error.message = err.data; - } else if (err.data.error) { - error.message = safeStringifyValue(err.data.error); - } - } else if (err.message) { - error.message = err.message; - } else if (typeof err === 'string') { - error.message = err; - } - - error.status = err.status; - error.statusText = err.statusText; - + const error: DataQueryError = this.processError(err, target); throw error; }) ); return Promise.all(queries).then((results: any[]) => { - const series: Array = []; + let series: SeriesData[] = []; for (let i = 0; i < results.length; i++) { const result = results[i]; if (result.data) { - const refId = queryTargets[i].refId; - for (const stream of result.data.streams || []) { - const seriesData = logStreamToSeriesData(stream); - seriesData.refId = refId; - seriesData.meta = { - searchWords: getHighlighterExpressionsFromQuery( - formatQuery(queryTargets[i].query, queryTargets[i].regexp) - ), - limit: this.maxLines, - }; - series.push(seriesData); - } + series = series.concat(this.processResult(result.data, queryTargets[i])); } } return { data: series }; }); + }; + + async query(options: DataQueryRequest, observer?: DataStreamObserver) { + this.runLiveQueries(options, observer); + + return this.runQueries(options); } async importQueries(queries: LokiQuery[], originMeta: PluginMeta): Promise { @@ -187,6 +289,72 @@ export class LokiDatasource extends DataSourceApi { return Math.ceil(date.valueOf() * 1e6); } + prepareLogRowContextQueryTarget = (row: LogRowModel, limit: number, direction: 'BACKWARD' | 'FORWARD') => { + const query = Object.keys(row.labels) + .map(label => { + return `${label}="${row.labels[label]}"`; + }) + .join(','); + const contextTimeBuffer = 2 * 60 * 60 * 1000 * 1e6; // 2h buffer + const timeEpochNs = row.timeEpochMs * 1e6; + + const commontTargetOptons = { + limit, + query: `{${query}}`, + direction, + }; + + if (direction === 'BACKWARD') { + return { + ...commontTargetOptons, + start: timeEpochNs - contextTimeBuffer, + end: timeEpochNs, + direction, + }; + } else { + return { + ...commontTargetOptons, + start: timeEpochNs, // TODO: We should add 1ns here for the original row not no be included in the result + end: timeEpochNs + contextTimeBuffer, + }; + } + }; + + getLogRowContext = async (row: LogRowModel, options?: LokiContextQueryOptions) => { + const target = this.prepareLogRowContextQueryTarget( + row, + (options && options.limit) || 10, + (options && options.direction) || 'BACKWARD' + ); + const series: SeriesData[] = []; + + try { + const result = await this._request('/api/prom/query', target); + if (result.data) { + for (const stream of result.data.streams || []) { + const seriesData = logStreamToSeriesData(stream); + series.push(seriesData); + } + } + if (options && options.direction === 'FORWARD') { + if (series[0] && series[0].rows) { + series[0].rows.reverse(); + } + } + + return { + data: series, + }; + } catch (e) { + const error: DataQueryError = { + message: 'Error during context query. Please check JS console logs.', + status: e.status, + statusText: e.statusText, + }; + throw error; + } + }; + testDatasource() { return this._request('/api/prom/label') .then(res => { diff --git a/public/app/plugins/datasource/loki/language_provider.ts b/public/app/plugins/datasource/loki/language_provider.ts index 64bf876f2c77..ff187bd88420 100644 --- a/public/app/plugins/datasource/loki/language_provider.ts +++ b/public/app/plugins/datasource/loki/language_provider.ts @@ -16,6 +16,7 @@ import { } from 'app/types/explore'; import { LokiQuery } from './types'; import { dateTime } from '@grafana/ui/src/utils/moment_wrapper'; +import { PromQuery } from '../prometheus/types'; const DEFAULT_KEYS = ['job', 'namespace']; const EMPTY_SELECTOR = '{}'; @@ -168,8 +169,9 @@ export default class LokiLanguageProvider extends LanguageProvider { return Promise.all( queries.map(async query => { const expr = await this.importPrometheusQuery(query.expr); + const { context, ...rest } = query as PromQuery; return { - ...query, + ...rest, expr, }; }) diff --git a/public/app/plugins/datasource/loki/plugin.json b/public/app/plugins/datasource/loki/plugin.json index cd14a7fe48ad..ca630b56bc73 100644 --- a/public/app/plugins/datasource/loki/plugin.json +++ b/public/app/plugins/datasource/loki/plugin.json @@ -8,8 +8,7 @@ "alerting": false, "annotations": false, "logs": true, - "explore": true, - "tables": false, + "streaming": true, "info": { "description": "Like Prometheus but for logs. OSS logging solution from Grafana Labs", diff --git a/public/app/plugins/datasource/loki/types.ts b/public/app/plugins/datasource/loki/types.ts index 4c973f8a79ed..e733c3b47cb6 100644 --- a/public/app/plugins/datasource/loki/types.ts +++ b/public/app/plugins/datasource/loki/types.ts @@ -2,6 +2,9 @@ import { DataQuery, Labels, DataSourceJsonData } from '@grafana/ui/src/types'; export interface LokiQuery extends DataQuery { expr: string; + live?: boolean; + query?: string; + regexp?: string; } export interface LokiOptions extends DataSourceJsonData { diff --git a/public/app/plugins/datasource/mssql/plugin.json b/public/app/plugins/datasource/mssql/plugin.json index b3269b91100e..ef280e9209ee 100644 --- a/public/app/plugins/datasource/mssql/plugin.json +++ b/public/app/plugins/datasource/mssql/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/mysql/plugin.json b/public/app/plugins/datasource/mysql/plugin.json index 49d1996332fa..be0714560927 100644 --- a/public/app/plugins/datasource/mysql/plugin.json +++ b/public/app/plugins/datasource/mysql/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/opentsdb/plugin.json b/public/app/plugins/datasource/opentsdb/plugin.json index e7cae327c5b5..a19916482369 100644 --- a/public/app/plugins/datasource/opentsdb/plugin.json +++ b/public/app/plugins/datasource/opentsdb/plugin.json @@ -8,7 +8,6 @@ "defaultMatchFormat": "pipe", "annotations": true, "alerting": true, - "tables": false, "info": { "description": "Open source time series database", diff --git a/public/app/plugins/datasource/postgres/plugin.json b/public/app/plugins/datasource/postgres/plugin.json index 994578a7f2c8..ce72d3b0f2f8 100644 --- a/public/app/plugins/datasource/postgres/plugin.json +++ b/public/app/plugins/datasource/postgres/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx index 14d03df6d388..c432e9d58b4b 100644 --- a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx +++ b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx @@ -223,7 +223,7 @@ class PromQueryField extends React.PureComponent { type: string; @@ -83,7 +87,7 @@ export class PrometheusDatasource extends DataSourceApi } } - _request(url, data?, options?: any) { + _request(url: string, data?: any, options?: any) { options = _.defaults(options || {}, { url: this.url + url, method: this.httpMethod, @@ -119,11 +123,11 @@ export class PrometheusDatasource extends DataSourceApi } // Use this for tab completion features, wont publish response to other components - metadataRequest(url) { + metadataRequest(url: string) { return this._request(url, null, { method: 'GET', silent: true }); } - interpolateQueryExpr(value, variable, defaultFormatFn) { + interpolateQueryExpr(value: any, variable: any, defaultFormatFn: any) { // if no multi or include all do not regexEscape if (!variable.multi && !variable.includeAll) { return prometheusRegularEscape(value); @@ -141,34 +145,132 @@ export class PrometheusDatasource extends DataSourceApi return this.templateSrv.variableExists(target.expr); } - query(options: DataQueryRequest): Promise<{ data: any }> { - const start = this.getPrometheusTime(options.range.from, false); - const end = this.getPrometheusTime(options.range.to, true); + processResult = (response: any, query: PromQueryRequest, target: PromQuery, responseListLength: number) => { + // Keeping original start/end for transformers + const transformerOptions = { + format: target.format, + step: query.step, + legendFormat: target.legendFormat, + start: query.start, + end: query.end, + query: query.expr, + responseListLength, + refId: target.refId, + valueWithRefId: target.valueWithRefId, + }; + const series = this.resultTransformer.transform(response, transformerOptions); - const queries = []; - const activeTargets = []; + return series; + }; - options = _.clone(options); + runObserverQueries = ( + options: DataQueryRequest, + observer: DataStreamObserver, + queries: PromQueryRequest[], + activeTargets: PromQuery[], + end: number + ) => { + for (let index = 0; index < queries.length; index++) { + const query = queries[index]; + const target = activeTargets[index]; + let observable: Observable = null; + + if (query.instant) { + observable = from(this.performInstantQuery(query, end)); + } else { + observable = from(this.performTimeSeriesQuery(query, query.start, query.end)); + } + + observable + .pipe( + single(), // unsubscribes automatically after first result + filter((response: any) => (response.cancelled ? false : true)), + map((response: any) => { + return this.processResult(response, query, target, queries.length); + }) + ) + .subscribe({ + next: series => { + if (query.instant) { + observer({ + key: `prometheus-${target.refId}`, + state: LoadingState.Loading, + request: options, + series: null, + delta: series, + unsubscribe: () => undefined, + }); + } else { + observer({ + key: `prometheus-${target.refId}`, + state: LoadingState.Done, + request: options, + series: null, + delta: series, + unsubscribe: () => undefined, + }); + } + }, + }); + } + }; + + prepareTargets = (options: DataQueryRequest, start: number, end: number) => { + const queries: PromQueryRequest[] = []; + const activeTargets: PromQuery[] = []; for (const target of options.targets) { if (!target.expr || target.hide) { continue; } + if (target.context === 'explore') { + target.format = 'time_series'; + target.instant = false; + const instantTarget: any = _.cloneDeep(target); + instantTarget.format = 'table'; + instantTarget.instant = true; + instantTarget.valueWithRefId = true; + delete instantTarget.maxDataPoints; + instantTarget.requestId += '_instant'; + instantTarget.refId += '_instant'; + activeTargets.push(instantTarget); + queries.push(this.createQuery(instantTarget, options, start, end)); + } + activeTargets.push(target); queries.push(this.createQuery(target, options, start, end)); } + return { + queries, + activeTargets, + }; + }; + + query(options: DataQueryRequest, observer?: DataStreamObserver): Promise<{ data: any }> { + const start = this.getPrometheusTime(options.range.from, false); + const end = this.getPrometheusTime(options.range.to, true); + + options = _.clone(options); + const { queries, activeTargets } = this.prepareTargets(options, start, end); + // No valid targets, return the empty result to save a round trip. if (_.isEmpty(queries)) { return this.$q.when({ data: [] }) as Promise<{ data: any }>; } + if (observer && options.targets.filter(target => target.context === 'explore').length === options.targets.length) { + // using observer to make the instant query return immediately + this.runObserverQueries(options, observer, queries, activeTargets, end); + return this.$q.when({ data: [] }) as Promise<{ data: any }>; + } + const allQueryPromise = _.map(queries, query => { - if (!query.instant) { - return this.performTimeSeriesQuery(query, query.start, query.end); - } else { + if (query.instant) { return this.performInstantQuery(query, end); + } else { + return this.performTimeSeriesQuery(query, query.start, query.end); } }); @@ -180,19 +282,10 @@ export class PrometheusDatasource extends DataSourceApi return; } - // Keeping original start/end for transformers - const transformerOptions = { - format: activeTargets[index].format, - step: queries[index].step, - legendFormat: activeTargets[index].legendFormat, - start: queries[index].start, - end: queries[index].end, - query: queries[index].expr, - responseListLength: responseList.length, - refId: activeTargets[index].refId, - valueWithRefId: activeTargets[index].valueWithRefId, - }; - const series = this.resultTransformer.transform(response, transformerOptions); + const target = activeTargets[index]; + const query = queries[index]; + const series = this.processResult(response, query, target, queries.length); + result = [...result, ...series]; }); @@ -202,10 +295,16 @@ export class PrometheusDatasource extends DataSourceApi return allPromise as Promise<{ data: any }>; } - createQuery(target, options, start, end) { - const query: any = { + createQuery(target: PromQuery, options: DataQueryRequest, start: number, end: number) { + const query: PromQueryRequest = { hinting: target.hinting, instant: target.instant, + step: 0, + expr: '', + requestId: '', + refId: '', + start: 0, + end: 0, }; const range = Math.ceil(end - start); @@ -398,7 +497,7 @@ export class PrometheusDatasource extends DataSourceApi }; // Unsetting min interval for accurate event resolution const minStep = '1s'; - const query = this.createQuery({ expr, interval: minStep }, queryOptions, start, end); + const query = this.createQuery({ expr, interval: minStep, refId: 'X' }, queryOptions, start, end); const self = this; return this.performTimeSeriesQuery(query, query.start, query.end).then(results => { diff --git a/public/app/plugins/datasource/prometheus/plugin.json b/public/app/plugins/datasource/prometheus/plugin.json index fb9ebbb52b10..ba1144549489 100644 --- a/public/app/plugins/datasource/prometheus/plugin.json +++ b/public/app/plugins/datasource/prometheus/plugin.json @@ -24,8 +24,6 @@ "metrics": true, "alerting": true, "annotations": true, - "explore": true, - "tables": true, "queryOptions": { "minInterval": true }, diff --git a/public/app/plugins/datasource/prometheus/specs/completer.test.ts b/public/app/plugins/datasource/prometheus/specs/completer.test.ts index 2580b87f6d7f..8a7b3b8c7c33 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.test.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.test.ts @@ -7,7 +7,7 @@ import { TemplateSrv } from 'app/features/templating/template_srv'; import { TimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { IQService } from 'angular'; jest.mock('../datasource'); -jest.mock('app/core/services/backend_srv'); +jest.mock('@grafana/ui'); describe('Prometheus editor completer', () => { function getSessionStub(data) { diff --git a/public/app/plugins/datasource/prometheus/types.ts b/public/app/plugins/datasource/prometheus/types.ts index e83029df8356..a256f289cfe2 100644 --- a/public/app/plugins/datasource/prometheus/types.ts +++ b/public/app/plugins/datasource/prometheus/types.ts @@ -2,6 +2,14 @@ import { DataQuery, DataSourceJsonData } from '@grafana/ui/src/types'; export interface PromQuery extends DataQuery { expr: string; + context?: 'explore' | 'panel'; + format?: string; + instant?: boolean; + hinting?: boolean; + interval?: string; + intervalFactor?: number; + legendFormat?: string; + valueWithRefId?: boolean; } export interface PromOptions extends DataSourceJsonData { @@ -10,3 +18,10 @@ export interface PromOptions extends DataSourceJsonData { httpMethod: string; directUrl: string; } + +export interface PromQueryRequest extends PromQuery { + step?: number; + requestId?: string; + start: number; + end: number; +} diff --git a/public/app/plugins/datasource/stackdriver/components/Filter.tsx b/public/app/plugins/datasource/stackdriver/components/Filter.tsx index 6c63f1ed8913..08134789d3da 100644 --- a/public/app/plugins/datasource/stackdriver/components/Filter.tsx +++ b/public/app/plugins/datasource/stackdriver/components/Filter.tsx @@ -3,7 +3,7 @@ import _ from 'lodash'; import appEvents from 'app/core/app_events'; import { QueryMeta } from '../types'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { TemplateSrv } from 'app/features/templating/template_srv'; import StackdriverDatasource from '../datasource'; import '../query_filter_ctrl'; diff --git a/public/app/plugins/datasource/stackdriver/plugin.json b/public/app/plugins/datasource/stackdriver/plugin.json index 620a7b1c8cea..20cac315400a 100644 --- a/public/app/plugins/datasource/stackdriver/plugin.json +++ b/public/app/plugins/datasource/stackdriver/plugin.json @@ -7,7 +7,6 @@ "metrics": true, "alerting": true, "annotations": true, - "tables": false, "queryOptions": { "maxDataPoints": true, "cacheTimeout": true diff --git a/public/app/plugins/datasource/testdata/QueryEditor.tsx b/public/app/plugins/datasource/testdata/QueryEditor.tsx index f14d976ca384..324848400ffa 100644 --- a/public/app/plugins/datasource/testdata/QueryEditor.tsx +++ b/public/app/plugins/datasource/testdata/QueryEditor.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import _ from 'lodash'; // Services & Utils -import { getBackendSrv, BackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; // Components import { FormLabel, Select, SelectOptionItem } from '@grafana/ui'; @@ -21,7 +21,7 @@ interface State { type Props = QueryEditorProps; export class QueryEditor extends PureComponent { - backendSrv: BackendSrv = getBackendSrv(); + backendSrv = getBackendSrv(); state: State = { scenarioList: [], diff --git a/public/app/plugins/datasource/testdata/datasource.ts b/public/app/plugins/datasource/testdata/datasource.ts index 0f69028f7848..c3b706153b3c 100644 --- a/public/app/plugins/datasource/testdata/datasource.ts +++ b/public/app/plugins/datasource/testdata/datasource.ts @@ -32,10 +32,11 @@ export class TestDataDatasource extends DataSourceApi { scenarioId: item.scenarioId, intervalMs: options.intervalMs, maxDataPoints: options.maxDataPoints, + datasourceId: this.id, stringInput: item.stringInput, points: item.points, alias: item.alias, - datasourceId: this.id, + ...item, }; }); diff --git a/public/app/plugins/datasource/testdata/partials/query.editor.html b/public/app/plugins/datasource/testdata/partials/query.editor.html index e982ce9fb61f..e70abb4d96de 100644 --- a/public/app/plugins/datasource/testdata/partials/query.editor.html +++ b/public/app/plugins/datasource/testdata/partials/query.editor.html @@ -40,8 +40,8 @@
-
-
-
-
+ +
+
+ + +
+
+ +
+
diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index d0fa3c3183eb..3272323c3540 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -11,6 +11,8 @@ import TimeSeries from 'app/core/time_series2'; import { MetricsPanelCtrl } from 'app/plugins/sdk'; import { GrafanaThemeType, getValueFormat, getColorFromHexRgbOrName, isTableData } from '@grafana/ui'; +const BASE_FONT_SIZE = 38; + class SingleStatCtrl extends MetricsPanelCtrl { static templateUrl = 'module.html'; @@ -384,10 +386,11 @@ class SingleStatCtrl extends MetricsPanelCtrl { return valueString; } - function getSpan(className, fontSize, applyColoring, value) { + function getSpan(className, fontSizePercent, applyColoring, value) { value = $sanitize(templateSrv.replace(value, data.scopedVars)); value = applyColoring ? applyColoringThresholds(value) : value; - return '' + value + ''; + const pixelSize = (parseInt(fontSizePercent, 10) / 100) * BASE_FONT_SIZE; + return '' + value + ''; } function getBigValueHtml() { diff --git a/public/app/plugins/panel/table/specs/transformers.test.ts b/public/app/plugins/panel/table/specs/transformers.test.ts index 49926aa00a8f..c82c9dd6cd09 100644 --- a/public/app/plugins/panel/table/specs/transformers.test.ts +++ b/public/app/plugins/panel/table/specs/transformers.test.ts @@ -108,7 +108,6 @@ describe('when transforming time series table', () => { { type: 'foo', columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value' }], - rows: [[time, 'Label Value 1', 42]], }, ]; diff --git a/public/app/plugins/panel/table/transformers.ts b/public/app/plugins/panel/table/transformers.ts index dac63a935a09..ab3daf6c7be2 100644 --- a/public/app/plugins/panel/table/transformers.ts +++ b/public/app/plugins/panel/table/transformers.ts @@ -158,9 +158,8 @@ transformers['table'] = { if (!data || data.length === 0) { return; } - - const noTableIndex = _.findIndex(data, d => d.type !== 'table'); - if (noTableIndex > -1) { + const noTableIndex = _.findIndex(data, d => 'columns' in d && 'rows' in d); + if (noTableIndex < 0) { throw { message: `Result of query #${String.fromCharCode( 65 + noTableIndex diff --git a/public/app/routes/GrafanaCtrl.ts b/public/app/routes/GrafanaCtrl.ts index a37222091d05..c3c5b71ca68e 100644 --- a/public/app/routes/GrafanaCtrl.ts +++ b/public/app/routes/GrafanaCtrl.ts @@ -5,15 +5,15 @@ import Drop from 'tether-drop'; // Utils and servies import { colors } from '@grafana/ui'; +import { setBackendSrv, BackendSrv, setDataSourceSrv } from '@grafana/runtime'; import config from 'app/core/config'; import coreModule from 'app/core/core_module'; import { profiler } from 'app/core/profiler'; import appEvents from 'app/core/app_events'; -import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { TimeSrv, setTimeSrv } from 'app/features/dashboard/services/TimeSrv'; -import { DatasourceSrv, setDatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; import { KeybindingSrv, setKeybindingSrv } from 'app/core/services/keybindingSrv'; -import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularLoader, setAngularLoader } from '@grafana/runtime'; import { configureStore } from 'app/store/configureStore'; // Types @@ -37,7 +37,7 @@ export class GrafanaCtrl { // make angular loader service available to react components setAngularLoader(angularLoader); setBackendSrv(backendSrv); - setDatasourceSrv(datasourceSrv); + setDataSourceSrv(datasourceSrv); setTimeSrv(timeSrv); setKeybindingSrv(keybindingSrv); configureStore(); diff --git a/public/app/store/configureStore.ts b/public/app/store/configureStore.ts index 2638587e96d1..63d8eaaf718d 100644 --- a/public/app/store/configureStore.ts +++ b/public/app/store/configureStore.ts @@ -1,6 +1,7 @@ import { createStore, applyMiddleware, compose, combineReducers } from 'redux'; import thunk from 'redux-thunk'; -// import { createLogger } from 'redux-logger'; +import { combineEpics, createEpicMiddleware } from 'redux-observable'; +import { createLogger } from 'redux-logger'; import sharedReducers from 'app/core/reducers'; import alertingReducers from 'app/features/alerting/state/reducers'; import teamsReducers from 'app/features/teams/state/reducers'; @@ -14,6 +15,24 @@ import usersReducers from 'app/features/users/state/reducers'; import userReducers from 'app/features/profile/state/reducers'; import organizationReducers from 'app/features/org/state/reducers'; import { setStore } from './store'; +import { limitMessageRateEpic } from 'app/features/explore/state/epics/limitMessageRateEpic'; +import { stateSaveEpic } from 'app/features/explore/state/epics/stateSaveEpic'; +import { processQueryResultsEpic } from 'app/features/explore/state/epics/processQueryResultsEpic'; +import { processQueryErrorsEpic } from 'app/features/explore/state/epics/processQueryErrorsEpic'; +import { runQueriesEpic } from 'app/features/explore/state/epics/runQueriesEpic'; +import { runQueriesBatchEpic } from 'app/features/explore/state/epics/runQueriesBatchEpic'; +import { + DataSourceApi, + DataQueryResponse, + DataQuery, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, +} from '@grafana/ui'; +import { Observable } from 'rxjs'; +import { getQueryResponse } from 'app/core/utils/explore'; +import { StoreState } from 'app/types/store'; +import { toggleLogActionsMiddleware } from 'app/core/middlewares/application'; const rootReducers = { ...sharedReducers, @@ -34,15 +53,42 @@ export function addRootReducer(reducers) { Object.assign(rootReducers, ...reducers); } +export const rootEpic: any = combineEpics( + limitMessageRateEpic, + stateSaveEpic, + runQueriesEpic, + runQueriesBatchEpic, + processQueryResultsEpic, + processQueryErrorsEpic +); + +export interface EpicDependencies { + getQueryResponse: ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver + ) => Observable; +} + +const dependencies: EpicDependencies = { + getQueryResponse, +}; + +const epicMiddleware = createEpicMiddleware({ dependencies }); + export function configureStore() { const composeEnhancers = (window as any).__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; - const rootReducer = combineReducers(rootReducers); + const logger = createLogger({ + predicate: (getState: () => StoreState) => { + return getState().application.logActions; + }, + }); + const storeEnhancers = + process.env.NODE_ENV !== 'production' + ? applyMiddleware(toggleLogActionsMiddleware, thunk, epicMiddleware, logger) + : applyMiddleware(thunk, epicMiddleware); - if (process.env.NODE_ENV !== 'production') { - // DEV builds we had the logger middleware - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk)))); - } else { - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk)))); - } + setStore(createStore(rootReducer, {}, composeEnhancers(storeEnhancers))); + epicMiddleware.run(rootEpic); } diff --git a/public/app/types/application.ts b/public/app/types/application.ts new file mode 100644 index 000000000000..d4562d68c448 --- /dev/null +++ b/public/app/types/application.ts @@ -0,0 +1,3 @@ +export interface ApplicationState { + logActions: boolean; +} diff --git a/public/app/types/explore.ts b/public/app/types/explore.ts index a828cbf9d3ad..98d137f1e7a5 100644 --- a/public/app/types/explore.ts +++ b/public/app/types/explore.ts @@ -3,7 +3,6 @@ import { Value } from 'slate'; import { RawTimeRange, DataQuery, - DataQueryResponseData, DataSourceSelectItem, DataSourceApi, QueryHint, @@ -11,10 +10,12 @@ import { LogLevel, TimeRange, DataQueryError, + LogsModel, + LogsDedupStrategy, + LoadingState, } from '@grafana/ui'; -import { Emitter, TimeSeries } from 'app/core/core'; -import { LogsModel, LogsDedupStrategy } from 'app/core/logs_model'; +import { Emitter } from 'app/core/core'; import TableModel from 'app/core/table_model'; export enum ExploreMode { @@ -179,6 +180,7 @@ export interface ExploreItemState { * Log query result to be displayed in the logs result viewer. */ logsResult?: LogsModel; + /** * Query intervals for graph queries to determine how many datapoints to return. * Needs to be updated when `datasourceInstance` or `containerWidth` is changed. @@ -204,10 +206,6 @@ export interface ExploreItemState { * True if graph result viewer is expanded. Query runs will contain graph queries. */ showingGraph: boolean; - /** - * True if logs result viewer is expanded. Query runs will contain logs queries. - */ - showingLogs: boolean; /** * True StartPage needs to be shown. Typically set to `false` once queries have been run. */ @@ -216,22 +214,8 @@ export interface ExploreItemState { * True if table result viewer is expanded. Query runs will contain table queries. */ showingTable: boolean; - /** - * True if `datasourceInstance` supports graph queries. - */ - supportsGraph: boolean | null; - /** - * True if `datasourceInstance` supports logs queries. - */ - supportsLogs: boolean | null; - /** - * True if `datasourceInstance` supports table queries. - */ - supportsTable: boolean | null; - graphIsLoading: boolean; - logIsLoading: boolean; - tableIsLoading: boolean; + loadingState: LoadingState; /** * Table model that combines all query table results into a single table. */ @@ -262,9 +246,13 @@ export interface ExploreItemState { update: ExploreUpdateState; queryErrors: DataQueryError[]; + latency: number; supportedModes: ExploreMode[]; mode: ExploreMode; + + isLive: boolean; + urlReplaced: boolean; } export interface ExploreUpdateState { @@ -325,11 +313,8 @@ export interface QueryIntervals { export interface QueryOptions { interval: string; - format: string; - hinting?: boolean; - instant?: boolean; - valueWithRefId?: boolean; maxDataPoints?: number; + live?: boolean; } export interface QueryTransaction { @@ -341,23 +326,14 @@ export interface QueryTransaction { options: any; queries: DataQuery[]; result?: any; // Table model / Timeseries[] / Logs - resultType: ResultType; scanning?: boolean; } export type RangeScanner = () => RawTimeRange; -export type ResultGetter = ( - result: DataQueryResponseData, - transaction: QueryTransaction, - allTransactions: QueryTransaction[] -) => TimeSeries; - export interface TextMatch { text: string; start: number; length: number; end: number; } - -export type ResultType = 'Graph' | 'Logs' | 'Table'; diff --git a/public/app/types/store.ts b/public/app/types/store.ts index 975cd40ae71c..66a3db1a3cb9 100644 --- a/public/app/types/store.ts +++ b/public/app/types/store.ts @@ -13,6 +13,7 @@ import { OrganizationState } from './organization'; import { AppNotificationsState } from './appNotifications'; import { PluginsState } from './plugins'; import { NavIndex } from '@grafana/ui'; +import { ApplicationState } from './application'; export interface StoreState { navIndex: NavIndex; @@ -29,6 +30,7 @@ export interface StoreState { appNotifications: AppNotificationsState; user: UserState; plugins: PluginsState; + application: ApplicationState; } /* diff --git a/public/e2e-test/core/images.ts b/public/e2e-test/core/images.ts index eb4ca3538d23..2897ba8aa2df 100644 --- a/public/e2e-test/core/images.ts +++ b/public/e2e-test/core/images.ts @@ -23,8 +23,21 @@ export const compareScreenShots = async (fileName: string) => return; } - expect(screenShotFromTest.width).toEqual(screenShotFromTruth.width); - expect(screenShotFromTest.height).toEqual(screenShotFromTruth.height); + if (screenShotFromTest.width !== screenShotFromTruth.width) { + throw new Error( + `The screenshot:[${fileName}] taken during the test has a width:[${ + screenShotFromTest.width + }] that differs from the expected: [${screenShotFromTruth.width}].` + ); + } + + if (screenShotFromTest.height !== screenShotFromTruth.height) { + throw new Error( + `The screenshot:[${fileName}] taken during the test has a width:[${ + screenShotFromTest.height + }] that differs from the expected: [${screenShotFromTruth.height}].` + ); + } const diff = new PNG({ width: screenShotFromTest.width, height: screenShotFromTruth.height }); const numDiffPixels = pixelmatch( @@ -36,7 +49,27 @@ export const compareScreenShots = async (fileName: string) => { threshold: 0.1 } ); - expect(numDiffPixels).toBe(0); + if (numDiffPixels !== 0) { + const localMessage = `\nCompare the output from expected:[${constants.screenShotsTruthDir}] with outcome:[${ + constants.screenShotsOutputDir + }]`; + const circleCIMessage = '\nCheck the Artifacts tab in the CircleCi build output for the actual screenshots.'; + const checkMessage = process.env.CIRCLE_SHA1 ? circleCIMessage : localMessage; + let msg = `\nThe screenshot:[${ + constants.screenShotsOutputDir + }/${fileName}.png] taken during the test differs by:[${numDiffPixels}] pixels from the expected.`; + msg += '\n'; + msg += checkMessage; + msg += '\n'; + msg += '\n If the difference between expected and outcome is NOT acceptable then do the following:'; + msg += '\n - Check the code for changes that causes this difference, fix that and retry.'; + msg += '\n'; + msg += '\n If the difference between expected and outcome is acceptable then do the following:'; + msg += '\n - Replace the expected image with the outcome and retry.'; + msg += '\n'; + throw new Error(msg); + } + resolve(); }; diff --git a/public/e2e-test/install/install.ts b/public/e2e-test/install/install.ts index fa71acfb7851..61bfca0bfca1 100644 --- a/public/e2e-test/install/install.ts +++ b/public/e2e-test/install/install.ts @@ -11,7 +11,9 @@ export const downloadBrowserIfNeeded = async (): Promise => { console.log('Did not find any local revisions for browser, downloading latest this might take a while.'); await browserFetcher.download(constants.chromiumRevision, (downloaded, total) => { - console.log(`Downloaded ${downloaded}bytes of ${total}bytes.`); + if (downloaded === total) { + console.log('Chromium successfully downloaded'); + } }); }; diff --git a/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png b/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png index 8ea1294d4d6c..832163502300 100644 Binary files a/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png and b/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png differ diff --git a/public/sass/components/_add_data_source.scss b/public/sass/components/_add_data_source.scss index c14455d35c14..9e1bcb6ed778 100644 --- a/public/sass/components/_add_data_source.scss +++ b/public/sass/components/_add_data_source.scss @@ -77,6 +77,10 @@ } } +.add-datasource-item-actions__btn-icon { + margin-left: $space-sm; +} + .add-data-source-more { text-align: center; margin: $space-xl; diff --git a/public/sass/components/_buttons.scss b/public/sass/components/_buttons.scss index 1a9936bceab0..254ac5906abb 100644 --- a/public/sass/components/_buttons.scss +++ b/public/sass/components/_buttons.scss @@ -70,6 +70,7 @@ @include button-size($btn-padding-y-lg, $space-lg, $font-size-lg, $border-radius-sm); font-weight: normal; height: $height-lg; + .gicon { //font-size: 31px; margin-right: $space-sm; diff --git a/public/sass/components/_navbar.scss b/public/sass/components/_navbar.scss index 8c950f728590..67071bb61c86 100644 --- a/public/sass/components/_navbar.scss +++ b/public/sass/components/_navbar.scss @@ -67,11 +67,6 @@ min-height: $navbarHeight; line-height: $navbarHeight; - .fa-caret-down { - font-size: 60%; - padding-left: 6px; - } - .gicon { top: -2px; position: relative; @@ -85,17 +80,32 @@ display: inline-block; } } +} - &--folder { - color: $text-color-weak; - display: none; +.navbar-page-btn__folder { + color: $text-color-weak; + display: none; - @include media-breakpoint-up(lg) { - display: inline-block; - } + @include media-breakpoint-up(lg) { + display: inline-block; } } +// element is needed here to override font-awesome specificity +i.navbar-page-btn__folder-icon { + font-size: $font-size-sm; + color: $text-color-weak; + padding: 0 $space-sm; + position: relative; + top: -1px; +} + +// element is needed here to override font-awesome specificity +i.navbar-page-btn__search { + font-size: $font-size-xs; + padding: 0 $space-xs; +} + .navbar-buttons { // height: $navbarHeight; display: flex; diff --git a/public/sass/components/_panel_header.scss b/public/sass/components/_panel_header.scss index 40afc2cd3aae..d9882d213511 100644 --- a/public/sass/components/_panel_header.scss +++ b/public/sass/components/_panel_header.scss @@ -167,6 +167,15 @@ $panel-header-no-title-zindex: 1; } } +.panel-info-content { + a { + color: $white; + &:hover { + color: darken($white, 10%); + } + } +} + .panel-time-info { font-weight: $font-weight-semi-bold; float: right; diff --git a/public/sass/components/_panel_logs.scss b/public/sass/components/_panel_logs.scss index 8007e77a81d8..3c6ffd83a6fe 100644 --- a/public/sass/components/_panel_logs.scss +++ b/public/sass/components/_panel_logs.scss @@ -73,6 +73,7 @@ $column-horizontal-spacing: 10px; padding-right: $column-horizontal-spacing; border-top: 1px solid transparent; border-bottom: 1px solid transparent; + height: 100%; } &:hover { diff --git a/public/sass/components/_panel_singlestat.scss b/public/sass/components/_panel_singlestat.scss index 7854ac2093fa..df8cbd0c037b 100644 --- a/public/sass/components/_panel_singlestat.scss +++ b/public/sass/components/_panel_singlestat.scss @@ -6,17 +6,13 @@ } .singlestat-panel-value-container { - // line-height 0 is imporant here as the font-size is on this - // level but overriden one level deeper and but the line-height: is still - // based on the base font size on this level. Using line-height: 0 fixes that - line-height: 0; display: table-cell; vertical-align: middle; text-align: center; position: relative; z-index: 1; font-weight: $font-weight-semi-bold; - font-size: 38px; + line-height: 1; } // Helps diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss index d77992038327..c06af5864c76 100644 --- a/public/sass/pages/_explore.scss +++ b/public/sass/pages/_explore.scss @@ -22,6 +22,11 @@ .ds-picker { min-width: 200px; max-width: 200px; + + .gf-form-select-box__img-value { + max-width: 150px; + overflow: hidden; + } } } @@ -31,20 +36,24 @@ flex-flow: row wrap; justify-content: flex-start; height: auto; - padding: 0 $dashboard-padding 0 50px; + padding: 0 $dashboard-padding; border-bottom: 1px solid #0000; transition-duration: 0.35s; transition-timing-function: ease-in-out; transition-property: box-shadow, border-bottom; - - @include media-breakpoint-up(md) { - padding-left: $dashboard-padding; - } } .explore-toolbar-item { position: relative; align-self: center; + + &:first-child { + padding-left: 34px; + + @include media-breakpoint-up(md) { + padding-left: 0; + } + } } .explore-toolbar.splitted { @@ -134,7 +143,13 @@ } .explore { + display: flex; flex: 1 1 auto; + flex-direction: column; +} + +.explore.explore-live { + flex-direction: column-reverse; } .explore + .explore { @@ -142,9 +157,16 @@ } .explore-container { + display: flex; + flex: 1 1 auto; + flex-direction: column; padding: $dashboard-padding; } +.explore-container.explore-live { + flex-direction: column-reverse; +} + .explore-wrapper { display: flex; @@ -164,7 +186,7 @@ .explore-panel__header { padding: $space-sm $space-md 0 $space-md; display: flex; - cursor: pointer; + cursor: inherit; transition: all 0.1s linear; } @@ -176,9 +198,20 @@ } .explore-panel__header-buttons { - margin-right: $space-sm; - font-size: $font-size-lg; - line-height: $font-size-h6; + display: none; +} + +.explore-panel--collapsible { + .explore-panel__header { + cursor: pointer; + } + + .explore-panel__header-buttons { + margin-right: $space-sm; + font-size: $font-size-lg; + line-height: $font-size-h6; + display: inherit; + } } .time-series-disclaimer { diff --git a/public/test/core/redux/epicTester.ts b/public/test/core/redux/epicTester.ts new file mode 100644 index 000000000000..88638f556c68 --- /dev/null +++ b/public/test/core/redux/epicTester.ts @@ -0,0 +1,91 @@ +import { Epic, ActionsObservable, StateObservable } from 'redux-observable'; +import { Subject } from 'rxjs'; +import { + DataSourceApi, + DataQuery, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, + DataQueryResponse, + DataStreamState, +} from '@grafana/ui'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { EpicDependencies } from 'app/store/configureStore'; + +export const epicTester = ( + epic: Epic, ActionOf, StoreState, EpicDependencies>, + state?: Partial +) => { + const resultingActions: Array> = []; + const action$ = new Subject>(); + const state$ = new Subject(); + const actionObservable$ = new ActionsObservable(action$); + const stateObservable$ = new StateObservable(state$, (state as StoreState) || ({} as StoreState)); + const queryResponse$ = new Subject(); + const observer$ = new Subject(); + const getQueryResponse = ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver + ) => { + if (observer) { + observer$.subscribe({ next: event => observer(event) }); + } + return queryResponse$; + }; + + const dependencies: EpicDependencies = { + getQueryResponse, + }; + + epic(actionObservable$, stateObservable$, dependencies).subscribe({ next: action => resultingActions.push(action) }); + + const whenActionIsDispatched = (action: ActionOf) => { + action$.next(action); + + return instance; + }; + + const whenQueryReceivesResponse = (response: DataQueryResponse) => { + queryResponse$.next(response); + + return instance; + }; + + const whenQueryThrowsError = (error: any) => { + queryResponse$.error(error); + + return instance; + }; + + const whenQueryObserverReceivesEvent = (event: DataStreamState) => { + observer$.next(event); + + return instance; + }; + + const thenResultingActionsEqual = (...actions: Array>) => { + expect(actions).toEqual(resultingActions); + + return instance; + }; + + const thenNoActionsWhereDispatched = () => { + expect(resultingActions).toEqual([]); + + return instance; + }; + + const instance = { + whenActionIsDispatched, + whenQueryReceivesResponse, + whenQueryThrowsError, + whenQueryObserverReceivesEvent, + thenResultingActionsEqual, + thenNoActionsWhereDispatched, + }; + + return instance; +}; diff --git a/public/test/mocks/mockExploreState.ts b/public/test/mocks/mockExploreState.ts new file mode 100644 index 000000000000..981f1fb2dbe4 --- /dev/null +++ b/public/test/mocks/mockExploreState.ts @@ -0,0 +1,86 @@ +import { DataSourceApi } from '@grafana/ui/src/types/datasource'; + +import { ExploreId, ExploreItemState, ExploreState } from 'app/types/explore'; +import { makeExploreItemState } from 'app/features/explore/state/reducers'; +import { StoreState } from 'app/types'; + +export const mockExploreState = (options: any = {}) => { + const isLive = options.isLive || false; + const history = []; + const eventBridge = { + emit: jest.fn(), + }; + const streaming = options.streaming || undefined; + const datasourceInterval = options.datasourceInterval || ''; + const refreshInterval = options.refreshInterval || ''; + const containerWidth = options.containerWidth || 1980; + const queries = options.queries || []; + const datasourceError = options.datasourceError || null; + const scanner = options.scanner || jest.fn(); + const scanning = options.scanning || false; + const datasourceId = options.datasourceId || '1337'; + const exploreId = ExploreId.left; + const datasourceInstance: DataSourceApi = options.datasourceInstance || { + id: 1337, + query: jest.fn(), + name: 'test', + testDatasource: jest.fn(), + meta: { + id: datasourceId, + streaming, + }, + interval: datasourceInterval, + }; + const urlReplaced = options.urlReplaced || false; + const left: ExploreItemState = options.left || { + ...makeExploreItemState(), + containerWidth, + datasourceError, + datasourceInstance, + eventBridge, + history, + isLive, + queries, + refreshInterval, + scanner, + scanning, + urlReplaced, + }; + const right: ExploreItemState = options.right || { + ...makeExploreItemState(), + containerWidth, + datasourceError, + datasourceInstance, + eventBridge, + history, + isLive, + queries, + refreshInterval, + scanner, + scanning, + urlReplaced, + }; + const split: boolean = options.split || false; + const explore: ExploreState = { + left, + right, + split, + }; + const state: Partial = { + explore, + }; + + return { + containerWidth, + datasourceId, + datasourceInstance, + datasourceInterval, + eventBridge, + exploreId, + history, + queries, + refreshInterval, + state, + scanner, + }; +}; diff --git a/scripts/backend-lint.sh b/scripts/backend-lint.sh index b447071efa0e..09b035bff6ee 100755 --- a/scripts/backend-lint.sh +++ b/scripts/backend-lint.sh @@ -27,6 +27,7 @@ exit_if_fail gometalinter --enable-gc --vendor --deadline 10m --disable-all \ exit_if_fail golangci-lint run --deadline 10m --disable-all \ --enable=deadcode\ --enable=gofmt\ + --enable=gosimple\ --enable=ineffassign\ --enable=structcheck\ --enable=unconvert\ @@ -35,4 +36,5 @@ exit_if_fail golangci-lint run --deadline 10m --disable-all \ exit_if_fail go vet ./pkg/... exit_if_fail make revive +exit_if_fail make revive-alerting exit_if_fail make gosec diff --git a/scripts/build/ci-msi-build/ci-msi-build-oss.sh b/scripts/build/ci-msi-build/ci-msi-build-oss.sh index 74ba4b001a3f..02f015dae234 100755 --- a/scripts/build/ci-msi-build/ci-msi-build-oss.sh +++ b/scripts/build/ci-msi-build/ci-msi-build-oss.sh @@ -24,5 +24,11 @@ python3 generator/build.py "$@" chmod a+x /tmp/scratch/*.msi echo "MSI: Copy to $WORKING_DIRECTORY/dist" cp /tmp/scratch/*.msi $WORKING_DIRECTORY/dist +echo "MSI: Generate SHA256" +MSI_FILE=`ls $WORKING_DIRECTORY/dist/*.msi` +SHA256SUM=`sha256sum $MSI_FILE | cut -f1 -d' '` +echo $SHA256SUM > $MSI_FILE.sha256 +echo "MSI: SHA256 file content:" +cat $MSI_FILE.sha256 echo "MSI: contents of $WORKING_DIRECTORY/dist" ls -al $WORKING_DIRECTORY/dist diff --git a/scripts/cli/tasks/cherrypick.ts b/scripts/cli/tasks/cherrypick.ts index 543b46a85869..ac92f223a7eb 100644 --- a/scripts/cli/tasks/cherrypick.ts +++ b/scripts/cli/tasks/cherrypick.ts @@ -7,6 +7,10 @@ const cherryPickRunner: TaskRunner = async () => { let client = axios.create({ baseURL: 'https://api.github.com/repos/grafana/grafana', timeout: 10000, + // auth: { + // username: '', + // password: '', + // }, }); const res = await client.get('/issues', { @@ -16,22 +20,32 @@ const cherryPickRunner: TaskRunner = async () => { }, }); - // sort by closed date + // sort by closed date ASC res.data.sort(function(a, b) { - return new Date(b.closed_at).getTime() - new Date(a.closed_at).getTime(); + return new Date(a.closed_at).getTime() - new Date(b.closed_at).getTime(); }); + let commands = ''; + + console.log('--------------------------------------------------------------------'); + console.log('Printing PRs with cherry-pick-needed, in ASC merge date order'); + console.log('--------------------------------------------------------------------'); + for (const item of res.data) { if (!item.milestone) { console.log(item.number + ' missing milestone!'); continue; } - console.log(`${item.title} (${item.number}) closed_at ${item.closed_at}`); - console.log(`\tURL: ${item.closed_at} ${item.html_url}`); const issueDetails = await client.get(item.pull_request.url); - console.log(`\tMerge sha: ${issueDetails.data.merge_commit_sha}`); + console.log(`* ${item.title}, (#${item.number}), merge-sha: ${issueDetails.data.merge_commit_sha}`); + commands += `git cherry-pick -x ${issueDetails.data.merge_commit_sha}\n`; } + + console.log('--------------------------------------------------------------------'); + console.log('Commands (in order of how they should be executed)'); + console.log('--------------------------------------------------------------------'); + console.log(commands); }; export const cherryPickTask = new Task(); diff --git a/scripts/cli/tasks/grafanaui.build.ts b/scripts/cli/tasks/grafanaui.build.ts index 6fce809bef91..1a48bb1a7243 100644 --- a/scripts/cli/tasks/grafanaui.build.ts +++ b/scripts/cli/tasks/grafanaui.build.ts @@ -7,7 +7,7 @@ import { Task, TaskRunner } from './task'; let distDir, cwd; -const clean = useSpinner('Cleaning', async () => await execa('npm', ['run', 'clean'])); +export const clean = useSpinner('Cleaning', async () => await execa('npm', ['run', 'clean'])); const compile = useSpinner('Compiling sources', () => execa('tsc', ['-p', './tsconfig.build.json'])); diff --git a/scripts/cli/tasks/grafanaui.release.ts b/scripts/cli/tasks/grafanaui.release.ts index f0e53a4e7bad..03bcdd0219e5 100644 --- a/scripts/cli/tasks/grafanaui.release.ts +++ b/scripts/cli/tasks/grafanaui.release.ts @@ -1,11 +1,11 @@ import execa from 'execa'; import { execTask } from '../utils/execTask'; -import { changeCwdToGrafanaUiDist, changeCwdToGrafanaUi } from '../utils/cwd'; +import { changeCwdToGrafanaUiDist, changeCwdToGrafanaUi, restoreCwd } from '../utils/cwd'; import semver from 'semver'; import inquirer from 'inquirer'; import chalk from 'chalk'; import { useSpinner } from '../utils/useSpinner'; -import { savePackage, buildTask } from './grafanaui.build'; +import { savePackage, buildTask, clean } from './grafanaui.build'; import { TaskRunner, Task } from './task'; type VersionBumpType = 'prerelease' | 'patch' | 'minor' | 'major'; @@ -81,12 +81,6 @@ const bumpVersion = (version: string) => const publishPackage = (name: string, version: string) => useSpinner(`Publishing ${name} @ ${version} to npm registry...`, async () => { changeCwdToGrafanaUiDist(); - console.log(chalk.yellowBright.bold(`\nReview dist package.json before proceeding!\n`)); - const { confirmed } = await promptConfirm('Are you ready to publish to npm?'); - - if (!confirmed) { - process.exit(); - } await execa('npm', ['publish', '--access', 'public']); })(); @@ -111,13 +105,18 @@ const releaseTaskRunner: TaskRunner = async ({ usePackageJsonVersion, createVersionCommit, }) => { - await runChecksAndTests(); + changeCwdToGrafanaUi(); + await clean(); // Clean previous build if exists + restoreCwd(); + if (publishToNpm) { // TODO: Ensure release branch // When need to update this when we star keeping @grafana/ui releases in sync with core await ensureMasterBranch(); } + runChecksAndTests(); + await execTask(buildTask)(); let releaseConfirmed = false; @@ -169,6 +168,13 @@ const releaseTaskRunner: TaskRunner = async ({ } if (publishToNpm) { + console.log(chalk.yellowBright.bold(`\nReview dist package.json before proceeding!\n`)); + const { confirmed } = await promptConfirm('Are you ready to publish to npm?'); + + if (!confirmed) { + process.exit(); + } + await publishPackage(pkg.name, nextVersion); console.log(chalk.green(`\nVersion ${nextVersion} of ${pkg.name} succesfully released!`)); console.log(chalk.yellow(`\nUpdated @grafana/ui/package.json with version bump created.`)); diff --git a/scripts/go/configs/revive.toml b/scripts/go/configs/revive.toml index 2d4410ee5489..a40486c31333 100644 --- a/scripts/go/configs/revive.toml +++ b/scripts/go/configs/revive.toml @@ -1,7 +1,7 @@ ignoreGeneratedHeader = false severity = "error" confidence = 0.8 -errorCode = 0 +errorCode = 1 [rule.context-as-argument] [rule.error-return] diff --git a/scripts/grunt/default_task.js b/scripts/grunt/default_task.js index 95a2522ccfc5..f910941d630d 100644 --- a/scripts/grunt/default_task.js +++ b/scripts/grunt/default_task.js @@ -34,7 +34,8 @@ module.exports = function(grunt) { grunt.registerTask('no-only-tests', function() { var files = grunt.file.expand( 'public/**/*@(_specs|.test).@(ts|js|tsx|jsx)', - 'packages/grafana-ui/**/*@(_specs|.test).@(ts|js|tsx|jsx)' + 'packages/grafana-ui/**/*@(_specs|.test).@(ts|js|tsx|jsx)', + 'packages/grafana-runtime/**/*@(_specs|.test).@(ts|js|tsx|jsx)' ); grepFiles(files, '.only(', 'found only statement in test: '); }); diff --git a/yarn.lock b/yarn.lock index 1710b7e87102..57a84a7d860d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2284,10 +2284,10 @@ resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-1.10.35.tgz#4e5c2b1e5b3bf0b863efb8c5e70081f52e6c9518" integrity sha512-SVtqEcudm7yjkTwoRA1gC6CNMhGDdMx4Pg8BPdiqI7bXXdCn1BPmtxgeWYQOgDxrq53/5YTlhq5ULxBEAlWIBg== -"@types/lodash@4.14.119", "@types/lodash@4.14.123": - version "4.14.119" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.119.tgz#be847e5f4bc3e35e46d041c394ead8b603ad8b39" - integrity sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw== +"@types/lodash@4.14.123": + version "4.14.123" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.123.tgz#39be5d211478c8dd3bdae98ee75bb7efe4abfe4d" + integrity sha512-pQvPkc4Nltyx7G1Ww45OjVqUsJP4UsZm+GWJpigXgkikZqJgRm4c48g027o6tdgubWHwFRF15iFd+Y4Pmqv6+Q== "@types/minimatch@*": version "3.0.3" @@ -3510,13 +3510,13 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== -axios@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.18.0.tgz#32d53e4851efdc0a11993b6cd000789d70c05102" - integrity sha1-MtU+SFHv3AoRmTts0AB4nXDAUQI= +axios@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.0.tgz#8e09bff3d9122e133f7b8101c8fbdd00ed3d2ab8" + integrity sha512-1uvKqKQta3KBxIz14F2v06AEHZ/dIoeKfbTRkK1E5oqjDnuEerLmYTgJB5AiQZHJcljpg1TuRzdjDR06qNk0DQ== dependencies: - follow-redirects "^1.3.0" - is-buffer "^1.1.5" + follow-redirects "1.5.10" + is-buffer "^2.0.2" babel-code-frame@^6.22.0: version "6.26.0" @@ -4054,6 +4054,11 @@ boolbase@^1.0.0, boolbase@~1.0.0: resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= +bowser@^1.7.3: + version "1.9.4" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-1.9.4.tgz#890c58a2813a9d3243704334fa81b96a5c150c9a" + integrity sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ== + boxen@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/boxen/-/boxen-1.3.0.tgz#55c6c39a8ba58d9c61ad22cd877532deb665a20b" @@ -4465,6 +4470,11 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" +caniuse-db@1.0.30000772: + version "1.0.30000772" + resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000772.tgz#51aae891768286eade4a3d8319ea76d6a01b512b" + integrity sha1-UarokXaChureSj2DGep21qAbUSs= + caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000929, caniuse-lite@^1.0.30000947, caniuse-lite@^1.0.30000957, caniuse-lite@^1.0.30000963: version "1.0.30000966" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000966.tgz#f3c6fefacfbfbfb981df6dfa68f2aae7bff41b64" @@ -5143,7 +5153,7 @@ copy-descriptor@^0.1.0: resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= -copy-to-clipboard@^3.0.8: +copy-to-clipboard@^3.0.8, copy-to-clipboard@^3.1.0: version "3.2.0" resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.2.0.tgz#d2724a3ccbfed89706fac8a894872c979ac74467" integrity sha512-eOZERzvCmxS8HWzugj4Uxl8OJxa7T2k1Gi0X5qavwydHIfuSHq2dTD09LOg/XyGq4Zpb5IsR/2OJ5lbOegz78w== @@ -5366,6 +5376,14 @@ css-declaration-sorter@^4.0.1: postcss "^7.0.1" timsort "^0.3.0" +css-in-js-utils@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/css-in-js-utils/-/css-in-js-utils-2.0.1.tgz#3b472b398787291b47cfe3e44fecfdd9e914ba99" + integrity sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA== + dependencies: + hyphenate-style-name "^1.0.2" + isobject "^3.0.1" + css-loader@2.1.1, css-loader@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-2.1.1.tgz#d8254f72e412bb2238bb44dd674ffbef497333ea" @@ -5416,7 +5434,7 @@ css-tree@1.0.0-alpha.28: mdn-data "~1.1.0" source-map "^0.5.3" -css-tree@1.0.0-alpha.29: +css-tree@1.0.0-alpha.29, css-tree@^1.0.0-alpha.28: version "1.0.0-alpha.29" resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.29.tgz#3fa9d4ef3142cbd1c301e7664c1f352bd82f5a39" integrity sha512-sRNb1XydwkW9IOci6iB2xmy8IGCj6r/fr+JWitvJ2JxQRPzN3T4AGGVWCMlVmVwM1gtgALJRmGIlWv5ppnGGkg== @@ -5541,7 +5559,7 @@ cssstyle@^1.0.0: dependencies: cssom "0.3.x" -csstype@^2.2.0, csstype@^2.5.2, csstype@^2.5.7: +csstype@^2.2.0, csstype@^2.5.2, csstype@^2.5.5, csstype@^2.5.7: version "2.6.4" resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.4.tgz#d585a6062096e324e7187f80e04f92bd0f00e37f" integrity sha512-lAJUJP3M6HxFXbqtGRc0iZrdyeN+WzOWeY0q/VnFzI+kqVrYIzC7bWlKqCW7oCIdzoPkvfp82EVvrTlQ8zsWQg== @@ -6095,7 +6113,7 @@ debug@2.6.9, debug@^2.1.1, debug@^2.1.3, debug@^2.2.0, debug@^2.3.3, debug@^2.6. dependencies: ms "2.0.0" -debug@3.1.0: +debug@3.1.0, debug@=3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== @@ -6801,6 +6819,13 @@ error-ex@^1.2.0, error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" +error-stack-parser@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.0.2.tgz#4ae8dbaa2bf90a8b450707b9149dcabca135520d" + integrity sha512-E1fPutRDdIj/hohG0UpT5mayXNCxXP9d+snxFsPU9X0XgccOumKraa3juDMwTUyi7+Bu5+mCGagjg4IYeNbOdw== + dependencies: + stackframe "^1.0.4" + es-abstract@^1.10.0, es-abstract@^1.11.0, es-abstract@^1.12.0, es-abstract@^1.13.0, es-abstract@^1.4.3, es-abstract@^1.5.0, es-abstract@^1.5.1, es-abstract@^1.7.0, es-abstract@^1.9.0: version "1.13.0" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.13.0.tgz#ac86145fdd5099d8dd49558ccba2eaf9b88e24e9" @@ -7355,6 +7380,11 @@ fast-text-encoding@^1.0.0: resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz#3e5ce8293409cfaa7177a71b9ca84e1b1e6f25ef" integrity sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ== +fastest-stable-stringify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/fastest-stable-stringify/-/fastest-stable-stringify-1.0.1.tgz#9122d406d4c9d98bea644a6b6853d5874b87b028" + integrity sha1-kSLUBtTJ2YvqZEpraFPVh0uHsCg= + fastparse@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9" @@ -7606,7 +7636,14 @@ focus-lock@^0.6.3: resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.6.3.tgz#ef0e82ebac0023f841039d60bf329725d6438028" integrity sha512-EU6ePgEauhWrzJEN5RtG1d1ayrWXhEnfzTjnieHj+jG9tNHDEhKTAnCn1TN3gs9h6XWCDH6cpeX1VXY/lzLwZg== -follow-redirects@^1.0.0, follow-redirects@^1.3.0: +follow-redirects@1.5.10: + version "1.5.10" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a" + integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ== + dependencies: + debug "=3.1.0" + +follow-redirects@^1.0.0: version "1.7.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.7.0.tgz#489ebc198dc0e7f64167bd23b03c4c19b5784c76" integrity sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ== @@ -8837,6 +8874,11 @@ husky@1.3.1: run-node "^1.0.0" slash "^2.0.0" +hyphenate-style-name@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.3.tgz#097bb7fa0b8f1a9cf0bd5c734cf95899981a9b48" + integrity sha512-EcuixamT82oplpoJ2XU4pDtKGWQ7b00CD9f1ug9IaQ3p1bkHMiKCZ9ut9QDI6qsa6cpUuB+A/I+zLtdNK4n2DQ== + iconv-lite@0.4, iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4, iconv-lite@~0.4.13: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" @@ -9024,6 +9066,14 @@ init-package-json@^1.10.3: validate-npm-package-license "^3.0.1" validate-npm-package-name "^3.0.0" +inline-style-prefixer@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/inline-style-prefixer/-/inline-style-prefixer-4.0.2.tgz#d390957d26f281255fe101da863158ac6eb60911" + integrity sha512-N8nVhwfYga9MiV9jWlwfdj1UDIaZlBFu4cJSJkIr7tZX7sHpHhGR5su1qdpW+7KPL8ISTvCIkcaFi/JdBknvPg== + dependencies: + bowser "^1.7.3" + css-in-js-utils "^2.0.0" + inquirer@6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.2.1.tgz#9943fc4882161bdb0b0c9276769c75b32dbfcd52" @@ -9209,7 +9259,7 @@ is-buffer@^1.0.2, is-buffer@^1.1.5: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== -is-buffer@^2.0.0: +is-buffer@^2.0.0, is-buffer@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.3.tgz#4ecf3fcf749cbd1e472689e109ac66261a25e725" integrity sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw== @@ -10100,10 +10150,10 @@ jest@24.8.0: import-local "^2.0.0" jest-cli "^24.8.0" -jquery@3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.0.tgz#8de513fa0fa4b2c7d2e48a530e26f0596936efdf" - integrity sha512-ggRCXln9zEqv6OqAGXFEcshF5dSBvCkzj6Gm2gzuR5fWawaX8t7cxKVkkygKODrDAzKdoYw3l/e3pm3vlT4IbQ== +jquery@3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.1.tgz#714f1f8d9dde4bdfa55764ba37ef214630d80ef2" + integrity sha512-36+AdBzCL+y6qjw5Tx7HgzeGCzC81MDDgaUP8ld2zhx58HdqXGoBd+tHdrBMiyjGQs0Hxs/MLZTu/eHNJJuWPw== js-base64@^2.1.8, js-base64@^2.1.9: version "2.5.1" @@ -11565,6 +11615,20 @@ nan@^2.10.0, nan@^2.12.1, nan@^2.6.2: resolved "https://registry.yarnpkg.com/nan/-/nan-2.13.2.tgz#f51dc7ae66ba7d5d55e1e6d4d8092e802c9aefe7" integrity sha512-TghvYc72wlMGMVMluVo9WRJc0mB8KxxF/gZ4YYFy7V2ZQX9l7rgbPg7vjS9mt6U5HXODVFVI2bOduCzwOMv/lw== +nano-css@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/nano-css/-/nano-css-5.1.0.tgz#03c2b4ea2edefd445ac0c0e0f2565ea62e2aa81a" + integrity sha512-08F1rBmp0JuAteOR/uk/c40q/+UxWr224m/ZCHjjgy8dhkFQptvNwj/408KYQc13PIV9aGvqmtUD49PqBB5Ppg== + dependencies: + css-tree "^1.0.0-alpha.28" + csstype "^2.5.5" + fastest-stable-stringify "^1.0.1" + inline-style-prefixer "^4.0.0" + rtl-css-js "^1.9.0" + sourcemap-codec "^1.4.1" + stacktrace-js "^2.0.0" + stylis "3.5.0" + nanomatch@^1.2.9: version "1.2.13" resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" @@ -14100,7 +14164,7 @@ react-error-overlay@^5.1.4: resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-5.1.5.tgz#884530fd055476c764eaa8ab13b8ecf1f57bbf2c" integrity sha512-O9JRum1Zq/qCPFH5qVEvDDrVun8Jv9vbHtZXCR1EuRj9sKg1xJTlHxBzU6AkCzpvxRLuiY4OKImy3cDLQ+UTdg== -react-fast-compare@^2.0.2: +react-fast-compare@^2.0.2, react-fast-compare@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-2.0.4.tgz#e84b4d455b0fec113e0402c329352715196f81f9" integrity sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw== @@ -14379,6 +14443,19 @@ react-transition-group@^2.2.1: prop-types "^15.6.2" react-lifecycles-compat "^3.0.4" +react-use@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/react-use/-/react-use-9.0.0.tgz#142bec53fa465db2a6e43c68a8c9ef2acc000592" + integrity sha512-jlXJneB96yl4VvAXDKyE6cmdIeWk0cO7Gomh870Qu0vXZ9YM2JjjR09E9vIPPPI2M27RWo2dZKXspv44Wxtoog== + dependencies: + copy-to-clipboard "^3.1.0" + nano-css "^5.1.0" + react-fast-compare "^2.0.4" + react-wait "^0.3.0" + screenfull "^4.1.0" + throttle-debounce "^2.0.1" + ts-easing "^0.2.0" + react-virtualized@9.21.0: version "9.21.0" resolved "https://registry.yarnpkg.com/react-virtualized/-/react-virtualized-9.21.0.tgz#8267c40ffb48db35b242a36dea85edcf280a6506" @@ -14391,6 +14468,11 @@ react-virtualized@9.21.0: prop-types "^15.6.0" react-lifecycles-compat "^3.0.4" +react-wait@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/react-wait/-/react-wait-0.3.0.tgz#0cdd4d919012451a5bc3ab0a16d00c6fd9a8c10b" + integrity sha512-kB5x/kMKWcn0uVr9gBdNz21/oGbQwEQnF3P9p6E9yLfJ9DRcKS0fagbgYMFI0YFOoyKDj+2q6Rwax0kTYJF37g== + react-window@1.7.1: version "1.7.1" resolved "https://registry.yarnpkg.com/react-window/-/react-window-1.7.1.tgz#c1db640415b97b85bc0a1c66eb82dadabca39b86" @@ -14665,6 +14747,11 @@ redux-mock-store@1.5.3: dependencies: lodash.isplainobject "^4.0.6" +redux-observable@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/redux-observable/-/redux-observable-1.1.0.tgz#323a8fe53e89fdb519be2807b55f08e21c13e6f1" + integrity sha512-G0nxgmTZwTK3Z3KoQIL8VQu9n0YCUwEP3wc3zxKQ8zAZm+iYkoZvBqAnBJfLi4EsD1E64KR4s4jFH/dFXpV9Og== + redux-thunk@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/redux-thunk/-/redux-thunk-2.3.0.tgz#51c2c19a185ed5187aaa9a2d08b666d0d6467622" @@ -15206,6 +15293,13 @@ rsvp@^4.8.4: resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.4.tgz#b50e6b34583f3dd89329a2f23a8a2be072845911" integrity sha512-6FomvYPfs+Jy9TfXmBpBuMWNH94SgCsZmJKcanySzgNNP6LjWxBvyLTa9KaMfDDM5oxRfrKDB0r/qeRsLwnBfA== +rtl-css-js@^1.9.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/rtl-css-js/-/rtl-css-js-1.11.0.tgz#a7151930ef9d54656607d754ebb172ddfc9ef836" + integrity sha512-YnZ6jWxZxlWlcQAGF9vOmiF9bEmoQmSHE+wsrsiILkdK9HqiRPAIll4SY/QDzbvEu2lB2h62+hfg3TYzjnldbA== + dependencies: + "@babel/runtime" "^7.1.2" + run-async@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389" @@ -15400,6 +15494,11 @@ schema-utils@^1.0.0: ajv-errors "^1.0.0" ajv-keywords "^3.1.0" +screenfull@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/screenfull/-/screenfull-4.2.0.tgz#d5252a5a0f56504719abbed9ebbcd9208115da03" + integrity sha512-qpyI9XbwuMJElWRP5vTgxkFAl4k7HpyhIqBFOZEwX9QBXn0MAuRSpn7LOc6/4CeSwoz61oBu1VPV+2fbIWC+5Q== + scss-tokenizer@^0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/scss-tokenizer/-/scss-tokenizer-0.2.3.tgz#8eb06db9a9723333824d3f5530641149847ce5d1" @@ -15978,7 +16077,7 @@ source-map@^0.7.2, source-map@^0.7.3: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== -sourcemap-codec@^1.4.4: +sourcemap-codec@^1.4.1, sourcemap-codec@^1.4.4: version "1.4.4" resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.4.tgz#c63ea927c029dd6bd9a2b7fa03b3fec02ad56e9f" integrity sha512-CYAPYdBu34781kLHkaW3m6b/uUSyMOC2R61gcYMWooeuaGtjof86ZA/8T+qVPPt7np1085CR9hmMGrySwEc8Xg== @@ -16093,6 +16192,13 @@ stable@^0.1.8, stable@~0.1.3, stable@~0.1.5: resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== +stack-generator@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/stack-generator/-/stack-generator-2.0.3.tgz#bb74385c67ffc4ccf3c4dee5831832d4e509c8a0" + integrity sha512-kdzGoqrnqsMxOEuXsXyQTmvWXZmG0f3Ql2GDx5NtmZs59sT2Bt9Vdyq0XdtxUi58q/+nxtbF9KOQ9HkV1QznGg== + dependencies: + stackframe "^1.0.4" + stack-parser@^0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/stack-parser/-/stack-parser-0.0.1.tgz#7d3b63a17887e9e2c2bf55dbd3318fe34a39d1e7" @@ -16103,6 +16209,28 @@ stack-utils@^1.0.1: resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.2.tgz#33eba3897788558bebfc2db059dc158ec36cebb8" integrity sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA== +stackframe@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.0.4.tgz#357b24a992f9427cba6b545d96a14ed2cbca187b" + integrity sha512-to7oADIniaYwS3MhtCa/sQhrxidCCQiF/qp4/m5iN3ipf0Y7Xlri0f6eG29r08aL7JYl8n32AF3Q5GYBZ7K8vw== + +stacktrace-gps@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/stacktrace-gps/-/stacktrace-gps-3.0.2.tgz#33f8baa4467323ab2bd1816efa279942ba431ccc" + integrity sha512-9o+nWhiz5wFnrB3hBHs2PTyYrS60M1vvpSzHxwxnIbtY2q9Nt51hZvhrG1+2AxD374ecwyS+IUwfkHRE/2zuGg== + dependencies: + source-map "0.5.6" + stackframe "^1.0.4" + +stacktrace-js@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/stacktrace-js/-/stacktrace-js-2.0.0.tgz#776ca646a95bc6c6b2b90776536a7fc72c6ddb58" + integrity sha1-d2ymRqlbxsayuQd2U2p/xyxt21g= + dependencies: + error-stack-parser "^2.0.1" + stack-generator "^2.0.1" + stacktrace-gps "^3.0.1" + staged-git-files@1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/staged-git-files/-/staged-git-files-1.1.2.tgz#4326d33886dc9ecfa29a6193bf511ba90a46454b" @@ -16410,6 +16538,11 @@ stylis-rule-sheet@^0.0.10: resolved "https://registry.yarnpkg.com/stylis-rule-sheet/-/stylis-rule-sheet-0.0.10.tgz#44e64a2b076643f4b52e5ff71efc04d8c3c4a430" integrity sha512-nTbZoaqoBnmK+ptANthb10ZRZOGC+EmTLLUxeYIuHNkEKcmKgXX1XWKkUBT2Ac4es3NybooPe0SmvKdhKJZAuw== +stylis@3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-3.5.0.tgz#016fa239663d77f868fef5b67cf201c4b7c701e1" + integrity sha512-pP7yXN6dwMzAR29Q0mBrabPCe0/mNO1MSr93bhay+hcZondvMMTpeGyd8nbhYJdyperNT2DRxONQuUGcJr5iPw== + stylis@^3.5.0: version "3.5.4" resolved "https://registry.yarnpkg.com/stylis/-/stylis-3.5.4.tgz#f665f25f5e299cf3d64654ab949a57c768b73fbe" @@ -16665,6 +16798,11 @@ throat@^4.0.0: resolved "https://registry.yarnpkg.com/throat/-/throat-4.1.0.tgz#89037cbc92c56ab18926e6ba4cbb200e15672a6a" integrity sha1-iQN8vJLFarGJJua6TLsgDhVnKmo= +throttle-debounce@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-2.1.0.tgz#257e648f0a56bd9e54fe0f132c4ab8611df4e1d5" + integrity sha512-AOvyNahXQuU7NN+VVvOOX+uW6FPaWdAOdRP5HfwYxAfCzXTFKRMoIMk+n+po318+ktcChx+F1Dd91G3YHeMKyg== + throttleit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-1.0.0.tgz#9e785836daf46743145a5984b6268d828528ac6c" @@ -16903,6 +17041,11 @@ tryor@~0.1.2: resolved "https://registry.yarnpkg.com/tryor/-/tryor-0.1.2.tgz#8145e4ca7caff40acde3ccf946e8b8bb75b4172b" integrity sha1-gUXkynyv9ArN48z5Rui4u3W0Fys= +ts-easing@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ts-easing/-/ts-easing-0.2.0.tgz#c8a8a35025105566588d87dbda05dd7fbfa5a4ec" + integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== + ts-jest@24.0.2: version "24.0.2" resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-24.0.2.tgz#8dde6cece97c31c03e80e474c749753ffd27194d" @@ -17570,10 +17713,10 @@ webidl-conversions@^4.0.2: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== -webpack-bundle-analyzer@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.1.0.tgz#2f19cbb87bb6d4f3cb4e59cb67c837bd9436e89d" - integrity sha512-nyDyWEs7C6DZlgvu1pR1zzJfIWSiGPbtaByZr8q+Fd2xp70FuM/8ngCJzj3Er1TYRLSFmp1F1OInbEm4DZH8NA== +webpack-bundle-analyzer@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.3.2.tgz#3da733a900f515914e729fcebcd4c40dde71fc6f" + integrity sha512-7qvJLPKB4rRWZGjVp5U1KEjwutbDHSKboAl0IfafnrdXMrgC0tOtZbQD6Rw0u4cmpgRN4O02Fc0t8eAT+FgGzA== dependencies: acorn "^6.0.7" acorn-walk "^6.1.1"