diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ebb038546e9c..58570c89c1866 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ * **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano) * **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon) +* **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622) +* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda) ### Minor @@ -11,11 +13,13 @@ * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) +* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) +* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) # 5.2.2 (unreleased) diff --git a/Gopkg.lock b/Gopkg.lock index 5acaf2a542ce0..6f08e208ecd0e 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -32,6 +32,7 @@ "aws/credentials/ec2rolecreds", "aws/credentials/endpointcreds", "aws/credentials/stscreds", + "aws/csm", "aws/defaults", "aws/ec2metadata", "aws/endpoints", @@ -43,6 +44,8 @@ "internal/shareddefaults", "private/protocol", "private/protocol/ec2query", + "private/protocol/eventstream", + "private/protocol/eventstream/eventstreamapi", "private/protocol/query", "private/protocol/query/queryutil", "private/protocol/rest", @@ -54,8 +57,8 @@ "service/s3", "service/sts" ] - revision = "c7cd1ebe87257cde9b65112fc876b0339ea0ac30" - version = "v1.13.49" + revision = "fde4ded7becdeae4d26bf1212916aabba79349b4" + version = "v1.14.12" [[projects]] branch = "master" @@ -424,6 +427,12 @@ revision = "1744e2970ca51c86172c8190fadad617561ed6e7" version = "v1.0.0" +[[projects]] + branch = "master" + name = "github.com/shurcooL/sanitized_anchor_name" + packages = ["."] + revision = "86672fcb3f950f35f2e675df2240550f2a50762f" + [[projects]] name = "github.com/smartystreets/assertions" packages = [ @@ -670,6 +679,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "85cc057e0cc074ab5b43bd620772d63d51e07b04e8782fcfe55e6929d2fc40f7" + inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 1768059f0b860..6c91ec3722111 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -36,7 +36,7 @@ ignored = [ [[constraint]] name = "github.com/aws/aws-sdk-go" - version = "1.12.65" + version = "1.13.56" [[constraint]] branch = "master" diff --git a/build.go b/build.go index 77cbde50c412b..bcb9b2ddf7d7b 100644 --- a/build.go +++ b/build.go @@ -330,6 +330,7 @@ func createPackage(options linuxPackageOptions) { name := "grafana" if enterprise { name += "-enterprise" + args = append(args, "--replaces", "grafana") } args = append(args, "--name", name) diff --git a/conf/ldap.toml b/conf/ldap.toml index 166d85eabb11c..a74b2b6cc2c7f 100644 --- a/conf/ldap.toml +++ b/conf/ldap.toml @@ -72,6 +72,8 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,dc=grafana,dc=org" org_role = "Admin" +# To make user an instance admin (Grafana Admin) uncomment line below +# grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used # org_id = 1 diff --git a/devenv/README.md b/devenv/README.md index 4ec6f672f258d..9abf3596776c3 100644 --- a/devenv/README.md +++ b/devenv/README.md @@ -1,11 +1,16 @@ This folder contains useful scripts and configuration for... -* Configuring datasources in Grafana -* Provision example dashboards in Grafana -* Run preconfiured datasources as docker containers - -want to know more? run setup! +* Configuring dev datasources in Grafana +* Configuring dev & test scenarios dashboards. ```bash ./setup.sh ``` + +After restarting grafana server there should now be a number of datasources named `gdev-` provisioned as well as a dashboard folder named `gdev dashboards`. This folder contains dashboard & panel features tests dashboards. + +# Dev dashboards + +Please update these dashboards or make new ones as new panels & dashboards features are developed or new bugs are found. The dashboards are located in the `devenv/dev-dashboards` folder. + + diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index e93c0217f2706..241381097b126 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -14,6 +14,9 @@ datasources: isDefault: true url: http://localhost:9090 + - name: gdev-testdata + type: testdata + - name: gdev-influxdb type: influxdb access: proxy @@ -60,7 +63,8 @@ datasources: url: localhost:5432 database: grafana user: grafana - password: password + secureJsonData: + password: password jsonData: sslmode: "disable" @@ -71,3 +75,4 @@ datasources: authType: credentials defaultRegion: eu-west-2 + diff --git a/devenv/dev-dashboards/dashboard_with_rows.json b/devenv/dev-dashboards/dashboard_with_rows.json deleted file mode 100644 index 335c27bc80a11..0000000000000 --- a/devenv/dev-dashboards/dashboard_with_rows.json +++ /dev/null @@ -1,592 +0,0 @@ -{ - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": "-- Grafana --", - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "id": 59, - "links": [], - "panels": [ - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 9, - "panels": [], - "title": "Row title", - "type": "row" - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 1 - }, - "id": 12, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 1 - }, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 5 - }, - "id": 7, - "panels": [], - "title": "Row", - "type": "row" - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 6 - }, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 6 - }, - "id": 13, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 10 - }, - "id": 11, - "panels": [], - "title": "Row title", - "type": "row" - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 11 - }, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 11 - }, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - } - ], - "schemaVersion": 16, - "style": "dark", - "tags": [], - "templating": { - "list": [] - }, - "time": { - "from": "now-30m", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "timezone": "", - "title": "Dashboard with rows", - "uid": "1DdOzBNmk", - "version": 5 -} diff --git a/devenv/dev-dashboards/panel_tests_graph.json b/devenv/dev-dashboards/panel_tests_graph.json new file mode 100644 index 0000000000000..8a1770f0fa608 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_graph.json @@ -0,0 +1,1558 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 1, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "no_data_points", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "No Data Points Warning", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "datapoints_outside_range", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Datapoints Outside Range Warning", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 0 + }, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "random_walk", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Random walk series", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 7 + }, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "random_walk", + "target": "" + } + ], + "thresholds": [], + "timeFrom": "2s", + "timeShift": null, + "title": "Millisecond res x-axis and tooltip", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Just verify that the tooltip time has millisecond resolution ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 6, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 9, + "w": 16, + "x": 0, + "y": 14 + }, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "yaxis": 2 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "2000,3000,4000,1000,3000,10000", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "2 yaxis and axis labels", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": "Perecent", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": "Pressure", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Verify that axis labels look ok", + "editable": true, + "error": false, + "gridPos": { + "h": 9, + "w": 8, + "x": 16, + "y": 14 + }, + "id": 7, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 23 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "null value connected", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 23 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null as zero", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "null value null as zero", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Should be a long line connecting the null region in the `connected` mode, and in zero it should just be a line with zero value at the null points. ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 23 + }, + "id": 13, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 30 + }, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "zindex": -3 + } + ], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "hide": false, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Stacking value ontop of nulls", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Stacking values on top of nulls, should treat the null values as zero. ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 30 + }, + "id": 14, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 37 + }, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "zindex": -3 + } + ], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "alias": "", + "hide": false, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Stacking all series null segment", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Stacking when all values are null should leave a gap in the graph", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 37 + }, + "id": 15, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 44 + }, + "id": 20, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table Single Series Should Take Minimum Height", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 51 + }, + "id": 16, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 51 + }, + "id": 17, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "F", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "G", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "I", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "J", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table Should Scroll", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 58 + }, + "id": 18, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 58 + }, + "id": 19, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "F", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "G", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "I", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "J", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "K", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "L", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Graph", + "uid": "5SdHCadmz", + "version": 3 +} diff --git a/devenv/dev-dashboards/panel_tests_singlestat.json b/devenv/dev-dashboards/panel_tests_singlestat.json new file mode 100644 index 0000000000000..2d69f27bcb6bb --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_singlestat.json @@ -0,0 +1,574 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 2, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "postfix", + "postfixFontSize": "50%", + "prefix": "prefix", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "prefix 3 ms (green) postfixt + sparkline", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#d44a3a", + "rgba(237, 129, 40, 0.89)", + "#299c46" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 3, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "3 ms (red) + full height sparkline", + "type": "singlestat", + "valueFontSize": "200%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorPrefix": false, + "colorValue": false, + "colors": [ + "#d44a3a", + "rgba(237, 129, 40, 0.89)", + "#299c46" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 0 + }, + "id": 4, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "3 ms + red background", + "type": "singlestat", + "valueFontSize": "200%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": true, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 7 + }, + "id": 5, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 7 + }, + "id": 6, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90, no labels", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": false + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 7, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90, no markers or labels", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Singlestat", + "uid": "singlestat", + "version": 14 +} diff --git a/devenv/dev-dashboards/panel_tests_table.json b/devenv/dev-dashboards/panel_tests_table.json new file mode 100644 index 0000000000000..8337e9cd746cd --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_table.json @@ -0,0 +1,453 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 11, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 3, + "links": [], + "pageSize": 10, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "server1", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "server2", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "title": "Time series to rows (2 pages)", + "transform": "timeseries_to_rows", + "type": "table" + }, + { + "columns": [ + { + "text": "Avg", + "value": "avg" + }, + { + "text": "Max", + "value": "max" + }, + { + "text": "Current", + "value": "current" + } + ], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 11, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 4, + "links": [], + "pageSize": 10, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "server1", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "server2", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "title": "Time series aggregations", + "transform": "timeseries_aggregations", + "type": "table" + }, + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 11 + }, + "id": 5, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "row", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "/Color/", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "ColorValue", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + } + ], + "title": "color row by threshold", + "transform": "timeseries_to_columns", + "type": "table" + }, + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 2, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "ColorValue", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "ColorCell", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "5,1,2,3,4,5,10,20" + } + ], + "title": "Column style thresholds & units", + "transform": "timeseries_to_columns", + "type": "table" + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Table", + "uid": "pttable", + "version": 1 +} diff --git a/public/app/plugins/app/testdata/dashboards/alerts.json b/devenv/dev-dashboards/testdata_alerts.json similarity index 98% rename from public/app/plugins/app/testdata/dashboards/alerts.json rename to devenv/dev-dashboards/testdata_alerts.json index 159df0f458bf6..8c2edebf155e9 100644 --- a/public/app/plugins/app/testdata/dashboards/alerts.json +++ b/devenv/dev-dashboards/testdata_alerts.json @@ -1,6 +1,6 @@ { "revision": 2, - "title": "TestData - Alerts", + "title": "Alerting with TestData", "tags": [ "grafana-test" ], @@ -48,7 +48,7 @@ }, "aliasColors": {}, "bars": false, - "datasource": "Grafana TestData", + "datasource": "gdev-testdata", "editable": true, "error": false, "fill": 1, @@ -161,7 +161,7 @@ }, "aliasColors": {}, "bars": false, - "datasource": "Grafana TestData", + "datasource": "gdev-testdata", "editable": true, "error": false, "fill": 1, diff --git a/devenv/setup.sh b/devenv/setup.sh index 78dbfc1a366ed..6412bbc98eabf 100755 --- a/devenv/setup.sh +++ b/devenv/setup.sh @@ -1,4 +1,4 @@ -#/bin/bash +#!/bin/bash bulkDashboard() { @@ -22,31 +22,37 @@ requiresJsonnet() { fi } -defaultDashboards() { +devDashboards() { + echo -e "\xE2\x9C\x94 Setting up all dev dashboards using provisioning" ln -s -f ../../../devenv/dashboards.yaml ../conf/provisioning/dashboards/dev.yaml } -defaultDatasources() { - echo "setting up all default datasources using provisioning" +devDatasources() { + echo -e "\xE2\x9C\x94 Setting up all dev datasources using provisioning" ln -s -f ../../../devenv/datasources.yaml ../conf/provisioning/datasources/dev.yaml } usage() { - echo -e "install.sh\n\tThis script setups dev provision for datasources and dashboards" + echo -e "\n" echo "Usage:" echo " bulk-dashboards - create and provisioning 400 dashboards" echo " no args - provisiong core datasources and dev dashboards" } main() { + echo -e "------------------------------------------------------------------" + echo -e "This script setups provisioning for dev datasources and dashboards" + echo -e "------------------------------------------------------------------" + echo -e "\n" + local cmd=$1 if [[ $cmd == "bulk-dashboards" ]]; then bulkDashboard else - defaultDashboards - defaultDatasources + devDashboards + devDatasources fi if [[ -z "$cmd" ]]; then diff --git a/docker/blocks/nginx_proxy/Dockerfile b/docker/blocks/nginx_proxy/Dockerfile index 9ded20dfdda26..04de507499d1b 100644 --- a/docker/blocks/nginx_proxy/Dockerfile +++ b/docker/blocks/nginx_proxy/Dockerfile @@ -1,3 +1,4 @@ FROM nginx:alpine -COPY nginx.conf /etc/nginx/nginx.conf \ No newline at end of file +COPY nginx.conf /etc/nginx/nginx.conf +COPY htpasswd /etc/nginx/htpasswd diff --git a/docker/blocks/nginx_proxy/htpasswd b/docker/blocks/nginx_proxy/htpasswd new file mode 100755 index 0000000000000..e2c5eeeff7b9b --- /dev/null +++ b/docker/blocks/nginx_proxy/htpasswd @@ -0,0 +1,3 @@ +user1:$apr1$1odeeQb.$kwV8D/VAAGUDU7pnHuKoV0 +user2:$apr1$A2kf25r.$6S0kp3C7vIuixS5CL0XA9. +admin:$apr1$IWn4DoRR$E2ol7fS/dkI18eU4bXnBO1 diff --git a/docker/blocks/nginx_proxy/nginx.conf b/docker/blocks/nginx_proxy/nginx.conf index 18e27b3fb01e9..860d3d0b89fc6 100644 --- a/docker/blocks/nginx_proxy/nginx.conf +++ b/docker/blocks/nginx_proxy/nginx.conf @@ -13,7 +13,26 @@ http { listen 10080; location /grafana/ { + ################################################################ + # Enable these settings to test with basic auth and an auth proxy header + # the htpasswd file contains an admin user with password admin and + # user1: grafana and user2: grafana + ################################################################ + + # auth_basic "Restricted Content"; + # auth_basic_user_file /etc/nginx/htpasswd; + + ################################################################ + # To use the auth proxy header, set the following in custom.ini: + # [auth.proxy] + # enabled = true + # header_name = X-WEBAUTH-USER + # header_property = username + ################################################################ + + # proxy_set_header X-WEBAUTH-USER $remote_user; + proxy_pass http://localhost:3000/; } } -} \ No newline at end of file +} diff --git a/docker/blocks/openldap/ldap_dev.toml b/docker/blocks/openldap/ldap_dev.toml new file mode 100644 index 0000000000000..e79771b57dea4 --- /dev/null +++ b/docker/blocks/openldap/ldap_dev.toml @@ -0,0 +1,85 @@ +# To troubleshoot and get more log info enable ldap debug logging in grafana.ini +# [log] +# filters = ldap:debug + +[[servers]] +# Ldap server host (specify multiple hosts space separated) +host = "127.0.0.1" +# Default port is 389 or 636 if use_ssl = true +port = 389 +# Set to true if ldap server supports TLS +use_ssl = false +# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) +start_tls = false +# set to true if you want to skip ssl cert validation +ssl_skip_verify = false +# set to the path to your root CA certificate or leave unset to use system defaults +# root_ca_cert = "/path/to/certificate.crt" + +# Search user bind dn +bind_dn = "cn=admin,dc=grafana,dc=org" +# Search user bind password +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" +bind_password = 'grafana' + +# User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" +search_filter = "(cn=%s)" + +# An array of base dns to search through +search_base_dns = ["dc=grafana,dc=org"] + +# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups. +# This is done by enabling group_search_filter below. You must also set member_of= "cn" +# in [servers.attributes] below. + +# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN +# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of +# below in such a way that the user's recursive group membership is considered. +# +# Nested Groups + Active Directory (AD) Example: +# +# AD groups store the Distinguished Names (DNs) of members, so your filter must +# recursively search your groups for the authenticating user's DN. For example: +# +# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)" +# group_search_filter_user_attribute = "distinguishedName" +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] +# +# [servers.attributes] +# ... +# member_of = "distinguishedName" + +## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) +# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" +## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter. +## Defaults to the value of username in [server.attributes] +## Valid options are any of your values in [servers.attributes] +## If you are using nested groups you probably want to set this and member_of in +## [servers.attributes] to "distinguishedName" +# group_search_filter_user_attribute = "distinguishedName" +## An array of the base DNs to search through for groups. Typically uses ou=groups +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] + +# Specify names of the ldap attributes your ldap uses +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" + +# Map ldap groups to grafana org roles +[[servers.group_mappings]] +group_dn = "cn=admins,ou=groups,dc=grafana,dc=org" +org_role = "Admin" +# The Grafana organization database id, optional, if left out the default org (id 1) will be used +# org_id = 1 + +[[servers.group_mappings]] +group_dn = "cn=editors,ou=groups,dc=grafana,dc=org" +org_role = "Editor" + +[[servers.group_mappings]] +# If you want to match all (or no ldap groups) then you can use wildcard +group_dn = "*" +org_role = "Viewer" diff --git a/docker/blocks/openldap/notes.md b/docker/blocks/openldap/notes.md index 8de23d5ccf27f..65155423616d5 100644 --- a/docker/blocks/openldap/notes.md +++ b/docker/blocks/openldap/notes.md @@ -14,12 +14,12 @@ After adding ldif files to `prepopulate`: ## Enabling LDAP in Grafana -The default `ldap.toml` file in `conf` has host set to `127.0.0.1` and port to set to 389 so all you need to do is enable it in the .ini file to get Grafana to use this block: +Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block: ```ini [auth.ldap] enabled = true -config_file = conf/ldap.toml +config_file = conf/ldap_dev.toml ; allow_sign_up = true ``` @@ -43,6 +43,3 @@ editors no groups ldap-viewer - - - diff --git a/docs/sources/http_api/playlist.md b/docs/sources/http_api/playlist.md new file mode 100644 index 0000000000000..7c33900969b58 --- /dev/null +++ b/docs/sources/http_api/playlist.md @@ -0,0 +1,286 @@ ++++ +title = "Playlist HTTP API " +description = "Playlist Admin HTTP API" +keywords = ["grafana", "http", "documentation", "api", "playlist"] +aliases = ["/http_api/playlist/"] +type = "docs" +[menu.docs] +name = "Playlist" +parent = "http_api" ++++ + +# Playlist API + +## Search Playlist + +`GET /api/playlists` + +Get all existing playlist for the current organization using pagination + +**Example Request**: + +```bash +GET /api/playlists HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + + Querystring Parameters: + + These parameters are used as querystring parameters. + + - **query** - Limit response to playlist having a name like this value. + - **limit** - Limit response to *X* number of playlist. + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 1, + "name": "my playlist", + "interval": "5m" + } +] +``` + +## Get one playlist + +`GET /api/playlists/:id` + +**Example Request**: + +```bash +GET /api/playlists/1 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{ + "id" : 1, + "name": "my playlist", + "interval": "5m", + "orgId": "my org", + "items": [ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] +} +``` + +## Get Playlist items + +`GET /api/playlists/:id/items` + +**Example Request**: + +```bash +GET /api/playlists/1/items HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } +] +``` + +## Get Playlist dashboards + +`GET /api/playlists/:id/dashboards` + +**Example Request**: + +```bash +GET /api/playlists/1/dashboards HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 3, + "title": "my third dasboard", + "order": 1, + }, + { + "id": 5, + "title":"my other dasboard" + "order": 2, + + } +] +``` + +## Create a playlist + +`POST /api/playlists/` + +**Example Request**: + +```bash +PUT /api/playlists/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + { + "name": "my playlist", + "interval": "5m", + "items": [ + { + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] + } +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json + { + "id": 1, + "name": "my playlist", + "interval": "5m" + } +``` + +## Update a playlist + +`PUT /api/playlists/:id` + +**Example Request**: + +```bash +PUT /api/playlists/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + { + "name": "my playlist", + "interval": "5m", + "items": [ + { + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] + } +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{ + "id" : 1, + "name": "my playlist", + "interval": "5m", + "orgId": "my org", + "items": [ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] +} +``` + +## Delete a playlist + +`DELETE /api/playlists/:id` + +**Example Request**: + +```bash +DELETE /api/playlists/1 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{} +``` diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 668a44fcb2b3d..e3db7a1d60b31 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -296,6 +296,12 @@ Set to `true` to automatically add new users to the main organization (id 1). When set to `false`, new users will automatically cause a new organization to be created for that new user. +### auto_assign_org_id + +Set this value to automatically add new users to the provided org. +This requires `auto_assign_org` to be set to `true`. Please make sure +that this organization does already exists. + ### auto_assign_org_role The role new users will be assigned for the main organization (if the @@ -857,7 +863,7 @@ Secret key. e.g. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Url to where Grafana will send PUT request with images ### public_url -Optional parameter. Url to send to users in notifications, directly appended with the resulting uploaded file name. +Optional parameter. Url to send to users in notifications. If the string contains the sequence ${file}, it will be replaced with the uploaded filename. Otherwise, the file name will be appended to the path part of the url, leaving any query string unchanged. ### username basic auth username diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md index 85501e51d85b5..9a381b9e467c0 100644 --- a/docs/sources/installation/ldap.md +++ b/docs/sources/installation/ldap.md @@ -23,8 +23,9 @@ specific configuration file (default: `/etc/grafana/ldap.toml`). ### Example config ```toml -# Set to true to log user information returned from LDAP -verbose_logging = false +# To troubleshoot and get more log info enable ldap debug logging in grafana.ini +# [log] +# filters = ldap:debug [[servers]] # Ldap server host (specify multiple hosts space separated) @@ -73,6 +74,8 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,dc=grafana,dc=org" org_role = "Admin" +# To make user an instance admin (Grafana Admin) uncomment line below +# grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used. Setting this allows for multiple group_dn's to be assigned to the same org_role provided the org_id differs # org_id = 1 @@ -132,6 +135,10 @@ Users page, this change will be reset the next time the user logs in. If you change the LDAP groups of a user, the change will take effect the next time the user logs in. +### Grafana Admin +with a servers.group_mappings section you can set grafana_admin = true or false to sync Grafana Admin permission. A Grafana server admin has admin access over all orgs & +users. + ### Priority The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the TOML config will be used. diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 8341b9770bdad..efe9db61e3deb 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -11,7 +11,7 @@ weight = 1 # Variables Variables allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application -and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of +and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard. {{< docs-imagebox img="/img/docs/v50/variables_dashboard.png" >}} diff --git a/pkg/api/api.go b/pkg/api/api.go index 8870b9b095e76..84425fdae3d44 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -73,8 +73,7 @@ func (hs *HTTPServer) registerRoutes() { r.Get("/dashboards/", reqSignedIn, Index) r.Get("/dashboards/*", reqSignedIn, Index) - r.Get("/explore/", reqEditorRole, Index) - r.Get("/explore/*", reqEditorRole, Index) + r.Get("/explore", reqEditorRole, Index) r.Get("/playlists/", reqSignedIn, Index) r.Get("/playlists/*", reqSignedIn, Index) diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index a90b6425cb62d..0963df7d4c479 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -160,6 +160,7 @@ func CreatePlaylist(c *m.ReqContext, cmd m.CreatePlaylistCommand) Response { func UpdatePlaylist(c *m.ReqContext, cmd m.UpdatePlaylistCommand) Response { cmd.OrgId = c.OrgId + cmd.Id = c.ParamsInt64(":id") if err := bus.Dispatch(&cmd); err != nil { return Error(500, "Failed to save playlist", err) diff --git a/pkg/components/imguploader/webdavuploader.go b/pkg/components/imguploader/webdavuploader.go index f5478ea8a2f03..ed6b14725c066 100644 --- a/pkg/components/imguploader/webdavuploader.go +++ b/pkg/components/imguploader/webdavuploader.go @@ -9,6 +9,7 @@ import ( "net/http" "net/url" "path" + "strings" "time" "github.com/grafana/grafana/pkg/util" @@ -35,6 +36,16 @@ var netClient = &http.Client{ Transport: netTransport, } +func (u *WebdavUploader) PublicURL(filename string) string { + if strings.Contains(u.public_url, "${file}") { + return strings.Replace(u.public_url, "${file}", filename, -1) + } else { + publicURL, _ := url.Parse(u.public_url) + publicURL.Path = path.Join(publicURL.Path, filename) + return publicURL.String() + } +} + func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) { url, _ := url.Parse(u.url) filename := util.GetRandomString(20) + ".png" @@ -65,9 +76,7 @@ func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) } if u.public_url != "" { - publicURL, _ := url.Parse(u.public_url) - publicURL.Path = path.Join(publicURL.Path, filename) - return publicURL.String(), nil + return u.PublicURL(filename), nil } return url.String(), nil diff --git a/pkg/components/imguploader/webdavuploader_test.go b/pkg/components/imguploader/webdavuploader_test.go index 5a8abd0542de1..0178c9cda6cb4 100644 --- a/pkg/components/imguploader/webdavuploader_test.go +++ b/pkg/components/imguploader/webdavuploader_test.go @@ -2,6 +2,7 @@ package imguploader import ( "context" + "net/url" "testing" . "github.com/smartystreets/goconvey/convey" @@ -26,3 +27,15 @@ func TestUploadToWebdav(t *testing.T) { So(path, ShouldStartWith, "http://publicurl:8888/webdav/") }) } + +func TestPublicURL(t *testing.T) { + Convey("Given a public URL with parameters, and no template", t, func() { + webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://cloudycloud.me/s/DOIFDOMV/download?files=") + parsed, _ := url.Parse(webdavUploader.PublicURL("fileyfile.png")) + So(parsed.Path, ShouldEndWith, "fileyfile.png") + }) + Convey("Given a public URL with parameters, and a template", t, func() { + webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://cloudycloud.me/s/DOIFDOMV/download?files=${file}") + So(webdavUploader.PublicURL("fileyfile.png"), ShouldEndWith, "fileyfile.png") + }) +} diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go index d6eaf9a975e04..a421e3ebe0a72 100644 --- a/pkg/login/ext_user.go +++ b/pkg/login/ext_user.go @@ -72,6 +72,13 @@ func UpsertUser(cmd *m.UpsertUserCommand) error { return err } + // Sync isGrafanaAdmin permission + if extUser.IsGrafanaAdmin != nil && *extUser.IsGrafanaAdmin != cmd.Result.IsAdmin { + if err := bus.Dispatch(&m.UpdateUserPermissionsCommand{UserId: cmd.Result.Id, IsGrafanaAdmin: *extUser.IsGrafanaAdmin}); err != nil { + return err + } + } + err = bus.Dispatch(&m.SyncTeamsCommand{ User: cmd.Result, ExternalUser: extUser, diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index bdf87b2db540e..9e4918f0290c5 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -175,6 +175,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo if ldapUser.isMemberOf(group.GroupDN) { extUser.OrgRoles[group.OrgId] = group.OrgRole + extUser.IsGrafanaAdmin = group.IsGrafanaAdmin } } @@ -190,18 +191,18 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo } // add/update user in grafana - userQuery := &m.UpsertUserCommand{ + upsertUserCmd := &m.UpsertUserCommand{ ReqContext: ctx, ExternalUser: extUser, SignupAllowed: setting.LdapAllowSignup, } - err := bus.Dispatch(userQuery) + err := bus.Dispatch(upsertUserCmd) if err != nil { return nil, err } - return userQuery.Result, nil + return upsertUserCmd.Result, nil } func (a *ldapAuther) serverBind() error { diff --git a/pkg/login/ldap_settings.go b/pkg/login/ldap_settings.go index 497d8725e293b..c4f5982b23721 100644 --- a/pkg/login/ldap_settings.go +++ b/pkg/login/ldap_settings.go @@ -44,9 +44,10 @@ type LdapAttributeMap struct { } type LdapGroupToOrgRole struct { - GroupDN string `toml:"group_dn"` - OrgId int64 `toml:"org_id"` - OrgRole m.RoleType `toml:"org_role"` + GroupDN string `toml:"group_dn"` + OrgId int64 `toml:"org_id"` + IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatability) + OrgRole m.RoleType `toml:"org_role"` } var LdapCfg LdapConfig diff --git a/pkg/login/ldap_test.go b/pkg/login/ldap_test.go index 5080840704e35..1cf98bd1e144b 100644 --- a/pkg/login/ldap_test.go +++ b/pkg/login/ldap_test.go @@ -98,6 +98,10 @@ func TestLdapAuther(t *testing.T) { So(result.Login, ShouldEqual, "torkelo") }) + Convey("Should set isGrafanaAdmin to false by default", func() { + So(result.IsAdmin, ShouldBeFalse) + }) + }) }) @@ -223,8 +227,32 @@ func TestLdapAuther(t *testing.T) { So(sc.addOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) + + Convey("Should not update permissions unless specified", func() { + So(err, ShouldBeNil) + So(sc.updateUserPermissionsCmd, ShouldBeNil) + }) }) + ldapAutherScenario("given ldap groups with grafana_admin=true", func(sc *scenarioContext) { + trueVal := true + + ldapAuther := NewLdapAuthenticator(&LdapServerConf{ + LdapGroups: []*LdapGroupToOrgRole{ + {GroupDN: "cn=admins", OrgId: 1, OrgRole: "Admin", IsGrafanaAdmin: &trueVal}, + }, + }) + + sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ + MemberOf: []string{"cn=admins"}, + }) + + Convey("Should create user with admin set to true", func() { + So(err, ShouldBeNil) + So(sc.updateUserPermissionsCmd.IsGrafanaAdmin, ShouldBeTrue) + }) + }) }) Convey("When calling SyncUser", t, func() { @@ -332,6 +360,11 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { return nil }) + bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpdateUserPermissionsCommand) error { + sc.updateUserPermissionsCmd = cmd + return nil + }) + bus.AddHandler("test", func(cmd *m.GetUserByAuthInfoQuery) error { sc.getUserByAuthInfoQuery = cmd sc.getUserByAuthInfoQuery.Result = &m.User{Login: cmd.Login} @@ -379,14 +412,15 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { } type scenarioContext struct { - getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery - getUserOrgListQuery *m.GetUserOrgListQuery - createUserCmd *m.CreateUserCommand - addOrgUserCmd *m.AddOrgUserCommand - updateOrgUserCmd *m.UpdateOrgUserCommand - removeOrgUserCmd *m.RemoveOrgUserCommand - updateUserCmd *m.UpdateUserCommand - setUsingOrgCmd *m.SetUsingOrgCommand + getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery + getUserOrgListQuery *m.GetUserOrgListQuery + createUserCmd *m.CreateUserCommand + addOrgUserCmd *m.AddOrgUserCommand + updateOrgUserCmd *m.UpdateOrgUserCommand + removeOrgUserCmd *m.RemoveOrgUserCommand + updateUserCmd *m.UpdateUserCommand + setUsingOrgCmd *m.SetUsingOrgCommand + updateUserPermissionsCmd *m.UpdateUserPermissionsCommand } func (sc *scenarioContext) userQueryReturns(user *m.User) { diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 4dd84c1215178..a8d9f7308fa3c 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -44,6 +44,7 @@ var ( M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter + M_Aws_CloudWatch_GetMetricData prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers @@ -218,6 +219,12 @@ func init() { Namespace: exporterName, }) + M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_get_metric_data_total", + Help: "counter for getting metric data time series from aws", + Namespace: exporterName, + }) + M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", @@ -307,6 +314,7 @@ func initMetricVars() { M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, + M_Aws_CloudWatch_GetMetricData, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, diff --git a/pkg/models/playlist.go b/pkg/models/playlist.go index 5c49bb9256c55..c52da202293d6 100644 --- a/pkg/models/playlist.go +++ b/pkg/models/playlist.go @@ -63,7 +63,7 @@ type PlaylistDashboards []*PlaylistDashboard type UpdatePlaylistCommand struct { OrgId int64 `json:"-"` - Id int64 `json:"id" binding:"Required"` + Id int64 `json:"id"` Name string `json:"name" binding:"Required"` Interval string `json:"interval"` Items []PlaylistItemDTO `json:"items"` diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go index 162a4d867a9a0..28189005737cb 100644 --- a/pkg/models/user_auth.go +++ b/pkg/models/user_auth.go @@ -13,14 +13,15 @@ type UserAuth struct { } type ExternalUserInfo struct { - AuthModule string - AuthId string - UserId int64 - Email string - Login string - Name string - Groups []string - OrgRoles map[int64]RoleType + AuthModule string + AuthId string + UserId int64 + Email string + Login string + Name string + Groups []string + OrgRoles map[int64]RoleType + IsGrafanaAdmin *bool // This is a pointer to know if we should sync this or not (nil = ignore sync) } // --------------------- diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 2fec6acbf5443..ff44805e35f16 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -17,11 +17,14 @@ import ( plugin "github.com/hashicorp/go-plugin" ) +// DataSourcePlugin contains all metadata about a datasource plugin type DataSourcePlugin struct { FrontendPluginBase Annotations bool `json:"annotations"` Metrics bool `json:"metrics"` Alerting bool `json:"alerting"` + Explore bool `json:"explore"` + Logs bool `json:"logs"` QueryOptions map[string]bool `json:"queryOptions,omitempty"` BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index 531a70b2101ab..af911dc22e6e8 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -73,6 +73,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { alert.name, alert.state, alert.new_state_date, + alert.eval_data, alert.eval_date, alert.execution_error, dashboard.uid as dashboard_uid, diff --git a/pkg/services/sqlstore/alert_test.go b/pkg/services/sqlstore/alert_test.go index 79fa99864e771..d97deb45f0ecc 100644 --- a/pkg/services/sqlstore/alert_test.go +++ b/pkg/services/sqlstore/alert_test.go @@ -13,7 +13,7 @@ func mockTimeNow() { var timeSeed int64 timeNow = func() time.Time { fakeNow := time.Unix(timeSeed, 0) - timeSeed += 1 + timeSeed++ return fakeNow } } @@ -30,7 +30,7 @@ func TestAlertingDataAccess(t *testing.T) { InitTestDB(t) testDash := insertTestDashboard("dashboard with alerts", 1, 0, false, "alert") - + evalData, _ := simplejson.NewJson([]byte(`{"test": "test"}`)) items := []*m.Alert{ { PanelId: 1, @@ -40,6 +40,7 @@ func TestAlertingDataAccess(t *testing.T) { Message: "Alerting message", Settings: simplejson.New(), Frequency: 1, + EvalData: evalData, }, } @@ -104,8 +105,18 @@ func TestAlertingDataAccess(t *testing.T) { alert := alertQuery.Result[0] So(err2, ShouldBeNil) + So(alert.Id, ShouldBeGreaterThan, 0) + So(alert.DashboardId, ShouldEqual, testDash.Id) + So(alert.PanelId, ShouldEqual, 1) So(alert.Name, ShouldEqual, "Alerting title") So(alert.State, ShouldEqual, "pending") + So(alert.NewStateDate, ShouldNotBeNil) + So(alert.EvalData, ShouldNotBeNil) + So(alert.EvalData.Get("test").MustString(), ShouldEqual, "test") + So(alert.EvalDate, ShouldNotBeNil) + So(alert.ExecutionError, ShouldEqual, "") + So(alert.DashboardUid, ShouldNotBeNil) + So(alert.DashboardSlug, ShouldEqual, "dashboard-with-alerts") }) Convey("Viewer cannot read alerts", func() { diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index e4aecf0391da2..0ca1c5d67e49f 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -387,6 +387,7 @@ func insertTestDashboardForPlugin(title string, orgId int64, folderId int64, isF func createUser(name string, role string, isAdmin bool) m.User { setting.AutoAssignOrg = true + setting.AutoAssignOrgId = 1 setting.AutoAssignOrgRole = role currentUserCmd := m.CreateUserCommand{Login: name, Email: name + "@test.com", Name: "a " + name, IsAdmin: isAdmin} diff --git a/pkg/services/sqlstore/org_test.go b/pkg/services/sqlstore/org_test.go index 521a2a11c0501..af8500707d508 100644 --- a/pkg/services/sqlstore/org_test.go +++ b/pkg/services/sqlstore/org_test.go @@ -17,6 +17,7 @@ func TestAccountDataAccess(t *testing.T) { Convey("Given single org mode", func() { setting.AutoAssignOrg = true + setting.AutoAssignOrgId = 1 setting.AutoAssignOrgRole = "Viewer" Convey("Users should be added to default organization", func() { diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 5e9a085b26d00..0ec1a94787009 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -42,16 +42,23 @@ func getOrgIdForNewUser(cmd *m.CreateUserCommand, sess *DBSession) (int64, error var org m.Org if setting.AutoAssignOrg { - // right now auto assign to org with id 1 - has, err := sess.Where("id=?", 1).Get(&org) + has, err := sess.Where("id=?", setting.AutoAssignOrgId).Get(&org) if err != nil { return 0, err } if has { return org.Id, nil + } else { + if setting.AutoAssignOrgId == 1 { + org.Name = "Main Org." + org.Id = int64(setting.AutoAssignOrgId) + } else { + sqlog.Info("Could not create user: organization id %v does not exist", + setting.AutoAssignOrgId) + return 0, fmt.Errorf("Could not create user: organization id %v does not exist", + setting.AutoAssignOrgId) + } } - org.Name = "Main Org." - org.Id = 1 } else { org.Name = cmd.OrgName if len(org.Name) == 0 { diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index d8c8e6431c0bf..eb61568261dcd 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -100,6 +100,7 @@ var ( AllowUserSignUp bool AllowUserOrgCreate bool AutoAssignOrg bool + AutoAssignOrgId int AutoAssignOrgRole string VerifyEmailEnabled bool LoginHint string @@ -592,6 +593,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { AllowUserSignUp = users.Key("allow_sign_up").MustBool(true) AllowUserOrgCreate = users.Key("allow_org_create").MustBool(true) AutoAssignOrg = users.Key("auto_assign_org").MustBool(true) + AutoAssignOrgId = users.Key("auto_assign_org_id").MustInt(1) AutoAssignOrgRole = users.Key("auto_assign_org_role").In("Editor", []string{"Editor", "Admin", "Viewer"}) VerifyEmailEnabled = users.Key("verify_email_enabled").MustBool(false) LoginHint = users.Key("login_hint").String() diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 8af97575ae9ee..38fbac3aa292d 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" + "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" @@ -88,48 +89,67 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo Results: make(map[string]*tsdb.QueryResult), } - errCh := make(chan error, 1) - resCh := make(chan *tsdb.QueryResult, 1) + eg, ectx := errgroup.WithContext(ctx) - currentlyExecuting := 0 + getMetricDataQueries := make(map[string]map[string]*CloudWatchQuery) for i, model := range queryContext.Queries { queryType := model.Model.Get("type").MustString() if queryType != "timeSeriesQuery" && queryType != "" { continue } - currentlyExecuting++ - go func(refId string, index int) { - queryRes, err := e.executeQuery(ctx, queryContext.Queries[index].Model, queryContext) - currentlyExecuting-- + + query, err := parseQuery(queryContext.Queries[i].Model) + if err != nil { + return nil, err + } + query.RefId = queryContext.Queries[i].RefId + + if query.Id != "" { + if _, ok := getMetricDataQueries[query.Region]; !ok { + getMetricDataQueries[query.Region] = make(map[string]*CloudWatchQuery) + } + getMetricDataQueries[query.Region][query.Id] = query + continue + } + + if query.Id == "" && query.Expression != "" { + return nil, fmt.Errorf("Invalid query: id should be set if using expression") + } + + eg.Go(func() error { + queryRes, err := e.executeQuery(ectx, query, queryContext) if err != nil { - errCh <- err - } else { - queryRes.RefId = refId - resCh <- queryRes + return err } - }(model.RefId, i) + result.Results[queryRes.RefId] = queryRes + return nil + }) } - for currentlyExecuting != 0 { - select { - case res := <-resCh: - result.Results[res.RefId] = res - case err := <-errCh: - return result, err - case <-ctx.Done(): - return result, ctx.Err() + if len(getMetricDataQueries) > 0 { + for region, getMetricDataQuery := range getMetricDataQueries { + q := getMetricDataQuery + eg.Go(func() error { + queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) + if err != nil { + return err + } + for _, queryRes := range queryResponses { + result.Results[queryRes.RefId] = queryRes + } + return nil + }) } } - return result, nil -} - -func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { - query, err := parseQuery(parameters) - if err != nil { + if err := eg.Wait(); err != nil { return nil, err } + return result, nil +} + +func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatchQuery, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { client, err := e.getClient(query.Region) if err != nil { return nil, err @@ -201,6 +221,139 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl return queryRes, nil } +func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, region string, queries map[string]*CloudWatchQuery, queryContext *tsdb.TsdbQuery) ([]*tsdb.QueryResult, error) { + queryResponses := make([]*tsdb.QueryResult, 0) + + // validate query + for _, query := range queries { + if !(len(query.Statistics) == 1 && len(query.ExtendedStatistics) == 0) && + !(len(query.Statistics) == 0 && len(query.ExtendedStatistics) == 1) { + return queryResponses, errors.New("Statistics count should be 1") + } + } + + client, err := e.getClient(region) + if err != nil { + return queryResponses, err + } + + startTime, err := queryContext.TimeRange.ParseFrom() + if err != nil { + return queryResponses, err + } + + endTime, err := queryContext.TimeRange.ParseTo() + if err != nil { + return queryResponses, err + } + + params := &cloudwatch.GetMetricDataInput{ + StartTime: aws.Time(startTime), + EndTime: aws.Time(endTime), + ScanBy: aws.String("TimestampAscending"), + } + for _, query := range queries { + // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { + return nil, errors.New("too long query period") + } + + mdq := &cloudwatch.MetricDataQuery{ + Id: aws.String(query.Id), + ReturnData: aws.Bool(query.ReturnData), + } + if query.Expression != "" { + mdq.Expression = aws.String(query.Expression) + } else { + mdq.MetricStat = &cloudwatch.MetricStat{ + Metric: &cloudwatch.Metric{ + Namespace: aws.String(query.Namespace), + MetricName: aws.String(query.MetricName), + }, + Period: aws.Int64(int64(query.Period)), + } + for _, d := range query.Dimensions { + mdq.MetricStat.Metric.Dimensions = append(mdq.MetricStat.Metric.Dimensions, + &cloudwatch.Dimension{ + Name: d.Name, + Value: d.Value, + }) + } + if len(query.Statistics) == 1 { + mdq.MetricStat.Stat = query.Statistics[0] + } else { + mdq.MetricStat.Stat = query.ExtendedStatistics[0] + } + } + params.MetricDataQueries = append(params.MetricDataQueries, mdq) + } + + nextToken := "" + mdr := make(map[string]*cloudwatch.MetricDataResult) + for { + if nextToken != "" { + params.NextToken = aws.String(nextToken) + } + resp, err := client.GetMetricDataWithContext(ctx, params) + if err != nil { + return queryResponses, err + } + metrics.M_Aws_CloudWatch_GetMetricData.Add(float64(len(params.MetricDataQueries))) + + for _, r := range resp.MetricDataResults { + if _, ok := mdr[*r.Id]; !ok { + mdr[*r.Id] = r + } else { + mdr[*r.Id].Timestamps = append(mdr[*r.Id].Timestamps, r.Timestamps...) + mdr[*r.Id].Values = append(mdr[*r.Id].Values, r.Values...) + } + } + + if resp.NextToken == nil || *resp.NextToken == "" { + break + } + nextToken = *resp.NextToken + } + + for i, r := range mdr { + if *r.StatusCode != "Complete" { + return queryResponses, fmt.Errorf("Part of query is failed: %s", *r.StatusCode) + } + + queryRes := tsdb.NewQueryResult() + queryRes.RefId = queries[i].RefId + query := queries[*r.Id] + + series := tsdb.TimeSeries{ + Tags: map[string]string{}, + Points: make([]tsdb.TimePoint, 0), + } + for _, d := range query.Dimensions { + series.Tags[*d.Name] = *d.Value + } + s := "" + if len(query.Statistics) == 1 { + s = *query.Statistics[0] + } else { + s = *query.ExtendedStatistics[0] + } + series.Name = formatAlias(query, s, series.Tags) + + for j, t := range r.Timestamps { + expectedTimestamp := r.Timestamps[j].Add(time.Duration(query.Period) * time.Second) + if j > 0 && expectedTimestamp.Before(*t) { + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(expectedTimestamp.Unix()*1000))) + } + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(*r.Values[j]), float64((*t).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &series) + queryResponses = append(queryResponses, queryRes) + } + + return queryResponses, nil +} + func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) { var result []*cloudwatch.Dimension @@ -257,6 +410,9 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { return nil, err } + id := model.Get("id").MustString("") + expression := model.Get("expression").MustString("") + dimensions, err := parseDimensions(model) if err != nil { return nil, err @@ -295,6 +451,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { alias = "{{metric}}_{{stat}}" } + returnData := model.Get("returnData").MustBool(false) highResolution := model.Get("highResolution").MustBool(false) return &CloudWatchQuery{ @@ -306,11 +463,18 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { ExtendedStatistics: aws.StringSlice(extendedStatistics), Period: period, Alias: alias, + Id: id, + Expression: expression, + ReturnData: returnData, HighResolution: highResolution, }, nil } func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string { + if len(query.Id) > 0 && len(query.Expression) > 0 { + return query.Id + } + data := map[string]string{} data["region"] = query.Region data["namespace"] = query.Namespace @@ -338,6 +502,7 @@ func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]stri func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) { queryRes := tsdb.NewQueryResult() + queryRes.RefId = query.RefId var value float64 for _, s := range append(query.Statistics, query.ExtendedStatistics...) { series := tsdb.TimeSeries{ diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index 0737b64686da1..1225fb9b31ba8 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -5,6 +5,7 @@ import ( ) type CloudWatchQuery struct { + RefId string Region string Namespace string MetricName string @@ -13,5 +14,8 @@ type CloudWatchQuery struct { ExtendedStatistics []*string Period int Alias string + Id string + Expression string + ReturnData bool HighResolution bool } diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 82a9b8f0d88af..ec908aeb9de8b 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -68,6 +68,7 @@ func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSour engine.SetMaxOpenConns(10) engine.SetMaxIdleConns(10) + engineCache.versions[dsInfo.Id] = dsInfo.Version engineCache.cache[dsInfo.Id] = engine e.XormEngine = engine diff --git a/pkg/tsdb/testdata/testdata.go b/pkg/tsdb/testdata/testdata.go index a1ab250ad373f..c2c2ea3f696c4 100644 --- a/pkg/tsdb/testdata/testdata.go +++ b/pkg/tsdb/testdata/testdata.go @@ -21,7 +21,7 @@ func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, err } func init() { - tsdb.RegisterTsdbQueryEndpoint("grafana-testdata-datasource", NewTestDataExecutor) + tsdb.RegisterTsdbQueryEndpoint("testdata", NewTestDataExecutor) } func (e *TestDataExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx index deebe84f2c8b3..178e53198d454 100644 --- a/public/app/containers/Explore/Explore.tsx +++ b/public/app/containers/Explore/Explore.tsx @@ -1,16 +1,20 @@ import React from 'react'; import { hot } from 'react-hot-loader'; +import Select from 'react-select'; + +import kbn from 'app/core/utils/kbn'; import colors from 'app/core/utils/colors'; import TimeSeries from 'app/core/time_series2'; +import { decodePathComponent } from 'app/core/utils/location_util'; +import { parse as parseDate } from 'app/core/utils/datemath'; import ElapsedTime from './ElapsedTime'; import QueryRows from './QueryRows'; import Graph from './Graph'; +import Logs from './Logs'; import Table from './Table'; import TimePicker, { DEFAULT_RANGE } from './TimePicker'; -import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; -import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query'; -import { decodePathComponent } from 'app/core/utils/location_util'; +import { ensureQueries, generateQueryKey, hasQuery } from './utils/query'; function makeTimeSeriesList(dataList, options) { return dataList.map((seriesData, index) => { @@ -30,74 +34,136 @@ function makeTimeSeriesList(dataList, options) { }); } -function parseInitialState(initial) { - try { - const parsed = JSON.parse(decodePathComponent(initial)); - return { - queries: parsed.queries.map(q => q.query), - range: parsed.range, - }; - } catch (e) { - console.error(e); - return { queries: [], range: DEFAULT_RANGE }; +function parseInitialState(initial: string | undefined) { + if (initial) { + try { + const parsed = JSON.parse(decodePathComponent(initial)); + return { + datasource: parsed.datasource, + queries: parsed.queries.map(q => q.query), + range: parsed.range, + }; + } catch (e) { + console.error(e); + } } + return { datasource: null, queries: [], range: DEFAULT_RANGE }; } interface IExploreState { datasource: any; datasourceError: any; - datasourceLoading: any; + datasourceLoading: boolean | null; + datasourceMissing: boolean; graphResult: any; + initialDatasource?: string; latency: number; loading: any; + logsResult: any; queries: any; queryError: any; range: any; requestOptions: any; showingGraph: boolean; + showingLogs: boolean; showingTable: boolean; + supportsGraph: boolean | null; + supportsLogs: boolean | null; + supportsTable: boolean | null; tableResult: any; } -// @observer export class Explore extends React.Component { - datasourceSrv: DatasourceSrv; + el: any; constructor(props) { super(props); - const { range, queries } = parseInitialState(props.routeParams.initial); + const { datasource, queries, range } = parseInitialState(props.routeParams.state); this.state = { datasource: null, datasourceError: null, - datasourceLoading: true, + datasourceLoading: null, + datasourceMissing: false, graphResult: null, + initialDatasource: datasource, latency: 0, loading: false, + logsResult: null, queries: ensureQueries(queries), queryError: null, range: range || { ...DEFAULT_RANGE }, requestOptions: null, showingGraph: true, + showingLogs: true, showingTable: true, + supportsGraph: null, + supportsLogs: null, + supportsTable: null, tableResult: null, ...props.initialState, }; } async componentDidMount() { - const datasource = await this.props.datasourceSrv.get(); - const testResult = await datasource.testDatasource(); - if (testResult.status === 'success') { - this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + const { datasourceSrv } = this.props; + const { initialDatasource } = this.state; + if (!datasourceSrv) { + throw new Error('No datasource service passed as props.'); + } + const datasources = datasourceSrv.getExploreSources(); + if (datasources.length > 0) { + this.setState({ datasourceLoading: true }); + // Priority: datasource in url, default datasource, first explore datasource + let datasource; + if (initialDatasource) { + datasource = await datasourceSrv.get(initialDatasource); + } else { + datasource = await datasourceSrv.get(); + } + if (!datasource.meta.explore) { + datasource = await datasourceSrv.get(datasources[0].name); + } + this.setDatasource(datasource); } else { - this.setState({ datasource: null, datasourceError: testResult.message, datasourceLoading: false }); + this.setState({ datasourceMissing: true }); } } componentDidCatch(error) { + this.setState({ datasourceError: error }); console.error(error); } + async setDatasource(datasource) { + const supportsGraph = datasource.meta.metrics; + const supportsLogs = datasource.meta.logs; + const supportsTable = datasource.meta.metrics; + let datasourceError = null; + + try { + const testResult = await datasource.testDatasource(); + datasourceError = testResult.status === 'success' ? null : testResult.message; + } catch (error) { + datasourceError = (error && error.statusText) || error; + } + + this.setState( + { + datasource, + datasourceError, + supportsGraph, + supportsLogs, + supportsTable, + datasourceLoading: false, + }, + () => datasourceError === null && this.handleSubmit() + ); + } + + getRef = el => { + this.el = el; + }; + handleAddQueryRow = index => { const { queries } = this.state; const nextQueries = [ @@ -108,6 +174,19 @@ export class Explore extends React.Component { this.setState({ queries: nextQueries }); }; + handleChangeDatasource = async option => { + this.setState({ + datasource: null, + datasourceError: null, + datasourceLoading: true, + graphResult: null, + logsResult: null, + tableResult: null, + }); + const datasource = await this.props.datasourceSrv.get(option.value); + this.setDatasource(datasource); + }; + handleChangeQuery = (query, index) => { const { queries } = this.state; const nextQuery = { @@ -138,6 +217,10 @@ export class Explore extends React.Component { this.setState(state => ({ showingGraph: !state.showingGraph })); }; + handleClickLogsButton = () => { + this.setState(state => ({ showingLogs: !state.showingLogs })); + }; + handleClickSplit = () => { const { onChangeSplit } = this.props; if (onChangeSplit) { @@ -159,29 +242,45 @@ export class Explore extends React.Component { }; handleSubmit = () => { - const { showingGraph, showingTable } = this.state; - if (showingTable) { + const { showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable } = this.state; + if (showingTable && supportsTable) { this.runTableQuery(); } - if (showingGraph) { + if (showingGraph && supportsGraph) { this.runGraphQuery(); } + if (showingLogs && supportsLogs) { + this.runLogsQuery(); + } }; - async runGraphQuery() { + buildQueryOptions(targetOptions: { format: string; instant?: boolean }) { const { datasource, queries, range } = this.state; + const resolution = this.el.offsetWidth; + const absoluteRange = { + from: parseDate(range.from, false), + to: parseDate(range.to, true), + }; + const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval); + const targets = queries.map(q => ({ + ...targetOptions, + expr: q.query, + })); + return { + interval, + range, + targets, + }; + } + + async runGraphQuery() { + const { datasource, queries } = this.state; if (!hasQuery(queries)) { return; } this.setState({ latency: 0, loading: true, graphResult: null, queryError: null }); const now = Date.now(); - const options = buildQueryOptions({ - format: 'time_series', - interval: datasource.interval, - instant: false, - range, - queries: queries.map(q => q.query), - }); + const options = this.buildQueryOptions({ format: 'time_series', instant: false }); try { const res = await datasource.query(options); const result = makeTimeSeriesList(res.data, options); @@ -195,18 +294,15 @@ export class Explore extends React.Component { } async runTableQuery() { - const { datasource, queries, range } = this.state; + const { datasource, queries } = this.state; if (!hasQuery(queries)) { return; } this.setState({ latency: 0, loading: true, queryError: null, tableResult: null }); const now = Date.now(); - const options = buildQueryOptions({ + const options = this.buildQueryOptions({ format: 'table', - interval: datasource.interval, instant: true, - range, - queries: queries.map(q => q.query), }); try { const res = await datasource.query(options); @@ -220,35 +316,71 @@ export class Explore extends React.Component { } } + async runLogsQuery() { + const { datasource, queries } = this.state; + if (!hasQuery(queries)) { + return; + } + this.setState({ latency: 0, loading: true, queryError: null, logsResult: null }); + const now = Date.now(); + const options = this.buildQueryOptions({ + format: 'logs', + }); + + try { + const res = await datasource.query(options); + const logsData = res.data; + const latency = Date.now() - now; + this.setState({ latency, loading: false, logsResult: logsData, requestOptions: options }); + } catch (response) { + console.error(response); + const queryError = response.data ? response.data.error : response; + this.setState({ loading: false, queryError }); + } + } + request = url => { const { datasource } = this.state; return datasource.metadataRequest(url); }; render() { - const { position, split } = this.props; + const { datasourceSrv, position, split } = this.props; const { datasource, datasourceError, datasourceLoading, + datasourceMissing, graphResult, latency, loading, + logsResult, queries, queryError, range, requestOptions, showingGraph, + showingLogs, showingTable, + supportsGraph, + supportsLogs, + supportsTable, tableResult, } = this.state; const showingBoth = showingGraph && showingTable; const graphHeight = showingBoth ? '200px' : '400px'; const graphButtonActive = showingBoth || showingGraph ? 'active' : ''; + const logsButtonActive = showingLogs ? 'active' : ''; const tableButtonActive = showingBoth || showingTable ? 'active' : ''; const exploreClass = split ? 'explore explore-split' : 'explore'; + const datasources = datasourceSrv.getExploreSources().map(ds => ({ + value: ds.name, + label: ds.name, + })); + const selectedDatasource = datasource ? datasource.name : undefined; + return ( -
+
{position === 'left' ? (
@@ -264,6 +396,18 @@ export class Explore extends React.Component {
)} + {!datasourceMissing ? ( +
+ +
+
+ + +
+
+ +
+
+ - +
-
- - - +
+ + + Alias replacement variables:
  • {{metric}}
  • @@ -54,12 +71,12 @@
-
- +
+
-
-
+
+
diff --git a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts index 0b47ebd70690a..689cf270febcd 100644 --- a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts +++ b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts @@ -27,6 +27,9 @@ export class CloudWatchQueryParameterCtrl { target.dimensions = target.dimensions || {}; target.period = target.period || ''; target.region = target.region || 'default'; + target.id = target.id || ''; + target.expression = target.expression || ''; + target.returnData = target.returnData || false; target.highResolution = target.highResolution || false; $scope.regionSegment = uiSegmentSrv.getSegmentForValue($scope.target.region, 'select region'); diff --git a/public/app/plugins/datasource/logging/README.md b/public/app/plugins/datasource/logging/README.md new file mode 100644 index 0000000000000..333726059735a --- /dev/null +++ b/public/app/plugins/datasource/logging/README.md @@ -0,0 +1,3 @@ +# Grafana Logging Datasource - Native Plugin + +This is a **built in** datasource that allows you to connect to Grafana's logging service. \ No newline at end of file diff --git a/public/app/plugins/datasource/logging/datasource.jest.ts b/public/app/plugins/datasource/logging/datasource.jest.ts new file mode 100644 index 0000000000000..212d352dfca4a --- /dev/null +++ b/public/app/plugins/datasource/logging/datasource.jest.ts @@ -0,0 +1,38 @@ +import { parseQuery } from './datasource'; + +describe('parseQuery', () => { + it('returns empty for empty string', () => { + expect(parseQuery('')).toEqual({ + query: '', + regexp: '', + }); + }); + + it('returns regexp for strings without query', () => { + expect(parseQuery('test')).toEqual({ + query: '', + regexp: 'test', + }); + }); + + it('returns query for strings without regexp', () => { + expect(parseQuery('{foo="bar"}')).toEqual({ + query: '{foo="bar"}', + regexp: '', + }); + }); + + it('returns query for strings with query and search string', () => { + expect(parseQuery('x {foo="bar"}')).toEqual({ + query: '{foo="bar"}', + regexp: 'x', + }); + }); + + it('returns query for strings with query and regexp', () => { + expect(parseQuery('{foo="bar"} x|y')).toEqual({ + query: '{foo="bar"}', + regexp: 'x|y', + }); + }); +}); diff --git a/public/app/plugins/datasource/logging/datasource.ts b/public/app/plugins/datasource/logging/datasource.ts new file mode 100644 index 0000000000000..22edba5807aa9 --- /dev/null +++ b/public/app/plugins/datasource/logging/datasource.ts @@ -0,0 +1,134 @@ +import _ from 'lodash'; + +import * as dateMath from 'app/core/utils/datemath'; + +import { processStreams } from './result_transformer'; + +const DEFAULT_LIMIT = 100; + +const DEFAULT_QUERY_PARAMS = { + direction: 'BACKWARD', + limit: DEFAULT_LIMIT, + regexp: '', + query: '', +}; + +const QUERY_REGEXP = /({\w+="[^"]+"})?\s*(\w[^{]+)?\s*({\w+="[^"]+"})?/; +export function parseQuery(input: string) { + const match = input.match(QUERY_REGEXP); + let query = ''; + let regexp = ''; + + if (match) { + if (match[1]) { + query = match[1]; + } + if (match[2]) { + regexp = match[2].trim(); + } + if (match[3]) { + if (match[1]) { + query = `${match[1].slice(0, -1)},${match[3].slice(1)}`; + } else { + query = match[3]; + } + } + } + + return { query, regexp }; +} + +function serializeParams(data: any) { + return Object.keys(data) + .map(k => { + const v = data[k]; + return encodeURIComponent(k) + '=' + encodeURIComponent(v); + }) + .join('&'); +} + +export default class LoggingDatasource { + /** @ngInject */ + constructor(private instanceSettings, private backendSrv, private templateSrv) {} + + _request(apiUrl: string, data?, options?: any) { + const baseUrl = this.instanceSettings.url; + const params = data ? serializeParams(data) : ''; + const url = `${baseUrl}${apiUrl}?${params}`; + const req = { + ...options, + url, + }; + return this.backendSrv.datasourceRequest(req); + } + + prepareQueryTarget(target, options) { + const interpolated = this.templateSrv.replace(target.expr); + const start = this.getTime(options.range.from, false); + const end = this.getTime(options.range.to, true); + return { + ...DEFAULT_QUERY_PARAMS, + ...parseQuery(interpolated), + start, + end, + }; + } + + query(options) { + const queryTargets = options.targets + .filter(target => target.expr) + .map(target => this.prepareQueryTarget(target, options)); + if (queryTargets.length === 0) { + return Promise.resolve({ data: [] }); + } + + const queries = queryTargets.map(target => this._request('/api/prom/query', target)); + + return Promise.all(queries).then((results: any[]) => { + // Flatten streams from multiple queries + const allStreams = results.reduce((acc, response, i) => { + const streams = response.data.streams || []; + // Inject search for match highlighting + const search = queryTargets[i].regexp; + streams.forEach(s => { + s.search = search; + }); + return [...acc, ...streams]; + }, []); + const model = processStreams(allStreams, DEFAULT_LIMIT); + return { data: model }; + }); + } + + metadataRequest(url) { + // HACK to get label values for {job=|}, will be replaced when implementing LoggingQueryField + const apiUrl = url.replace('v1', 'prom'); + return this._request(apiUrl, { silent: true }).then(res => { + const data = { data: { data: res.data.values || [] } }; + return data; + }); + } + + getTime(date, roundUp) { + if (_.isString(date)) { + date = dateMath.parse(date, roundUp); + } + return Math.ceil(date.valueOf() * 1e6); + } + + testDatasource() { + return this._request('/api/prom/label') + .then(res => { + if (res && res.data && res.data.values && res.data.values.length > 0) { + return { status: 'success', message: 'Data source connected and labels found.' }; + } + return { + status: 'error', + message: 'Data source connected, but no labels received. Verify that logging is configured properly.', + }; + }) + .catch(err => { + return { status: 'error', message: err.message }; + }); + } +} diff --git a/public/app/plugins/datasource/logging/img/grafana_icon.svg b/public/app/plugins/datasource/logging/img/grafana_icon.svg new file mode 100644 index 0000000000000..72702223dc77b --- /dev/null +++ b/public/app/plugins/datasource/logging/img/grafana_icon.svg @@ -0,0 +1,57 @@ + + + + + + + + + + + + diff --git a/public/app/plugins/datasource/logging/module.ts b/public/app/plugins/datasource/logging/module.ts new file mode 100644 index 0000000000000..5e3ffb3282a89 --- /dev/null +++ b/public/app/plugins/datasource/logging/module.ts @@ -0,0 +1,7 @@ +import Datasource from './datasource'; + +export class LoggingConfigCtrl { + static templateUrl = 'partials/config.html'; +} + +export { Datasource, LoggingConfigCtrl as ConfigCtrl }; diff --git a/public/app/plugins/datasource/logging/partials/config.html b/public/app/plugins/datasource/logging/partials/config.html new file mode 100644 index 0000000000000..8e79cc0adccb1 --- /dev/null +++ b/public/app/plugins/datasource/logging/partials/config.html @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/public/app/plugins/datasource/logging/plugin.json b/public/app/plugins/datasource/logging/plugin.json new file mode 100644 index 0000000000000..9aa844f21cbbf --- /dev/null +++ b/public/app/plugins/datasource/logging/plugin.json @@ -0,0 +1,28 @@ +{ + "type": "datasource", + "name": "Grafana Logging", + "id": "logging", + "metrics": false, + "alerting": false, + "annotations": false, + "logs": true, + "explore": true, + "info": { + "description": "Grafana Logging Data Source for Grafana", + "author": { + "name": "Grafana Project", + "url": "https://grafana.com" + }, + "logos": { + "small": "img/grafana_icon.svg", + "large": "img/grafana_icon.svg" + }, + "links": [ + { + "name": "Grafana Logging", + "url": "https://grafana.com/" + } + ], + "version": "5.3.0" + } +} \ No newline at end of file diff --git a/public/app/plugins/datasource/logging/result_transformer.jest.ts b/public/app/plugins/datasource/logging/result_transformer.jest.ts new file mode 100644 index 0000000000000..0d203f748ba49 --- /dev/null +++ b/public/app/plugins/datasource/logging/result_transformer.jest.ts @@ -0,0 +1,45 @@ +import { LogLevel } from 'app/core/logs_model'; + +import { getLogLevel, getSearchMatches } from './result_transformer'; + +describe('getSearchMatches()', () => { + it('gets no matches for when search and or line are empty', () => { + expect(getSearchMatches('', '')).toEqual([]); + expect(getSearchMatches('foo', '')).toEqual([]); + expect(getSearchMatches('', 'foo')).toEqual([]); + }); + + it('gets no matches for unmatched search string', () => { + expect(getSearchMatches('foo', 'bar')).toEqual([]); + }); + + it('gets matches for matched search string', () => { + expect(getSearchMatches('foo', 'foo')).toEqual([{ length: 3, start: 0, text: 'foo' }]); + expect(getSearchMatches(' foo ', 'foo')).toEqual([{ length: 3, start: 1, text: 'foo' }]); + }); + + expect(getSearchMatches(' foo foo bar ', 'foo|bar')).toEqual([ + { length: 3, start: 1, text: 'foo' }, + { length: 3, start: 5, text: 'foo' }, + { length: 3, start: 9, text: 'bar' }, + ]); +}); + +describe('getLoglevel()', () => { + it('returns no log level on empty line', () => { + expect(getLogLevel('')).toBe(undefined); + }); + + it('returns no log level on when level is part of a word', () => { + expect(getLogLevel('this is a warning')).toBe(undefined); + }); + + it('returns log level on line contains a log level', () => { + expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn); + expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn); + }); + + it('returns first log level found', () => { + expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn); + }); +}); diff --git a/public/app/plugins/datasource/logging/result_transformer.ts b/public/app/plugins/datasource/logging/result_transformer.ts new file mode 100644 index 0000000000000..e238778614cc0 --- /dev/null +++ b/public/app/plugins/datasource/logging/result_transformer.ts @@ -0,0 +1,71 @@ +import _ from 'lodash'; +import moment from 'moment'; + +import { LogLevel, LogsModel, LogRow } from 'app/core/logs_model'; + +export function getLogLevel(line: string): LogLevel { + if (!line) { + return undefined; + } + let level: LogLevel; + Object.keys(LogLevel).forEach(key => { + if (!level) { + const regexp = new RegExp(`\\b${key}\\b`, 'i'); + if (regexp.test(line)) { + level = LogLevel[key]; + } + } + }); + return level; +} + +export function getSearchMatches(line: string, search: string) { + // Empty search can send re.exec() into infinite loop, exit early + if (!line || !search) { + return []; + } + const regexp = new RegExp(`(?:${search})`, 'g'); + const matches = []; + let match; + while ((match = regexp.exec(line))) { + matches.push({ + text: match[0], + start: match.index, + length: match[0].length, + }); + } + return matches; +} + +export function processEntry(entry: { line: string; timestamp: string }, stream): LogRow { + const { line, timestamp } = entry; + const { labels } = stream; + const key = `EK${timestamp}${labels}`; + const time = moment(timestamp); + const timeFromNow = time.fromNow(); + const timeLocal = time.format('YYYY-MM-DD HH:mm:ss'); + const searchMatches = getSearchMatches(line, stream.search); + const logLevel = getLogLevel(line); + + return { + key, + logLevel, + searchMatches, + timeFromNow, + timeLocal, + entry: line, + timestamp: timestamp, + }; +} + +export function processStreams(streams, limit?: number): LogsModel { + const combinedEntries = streams.reduce((acc, stream) => { + return [...acc, ...stream.entries.map(entry => processEntry(entry, stream))]; + }, []); + const sortedEntries = _.chain(combinedEntries) + .sortBy('timestamp') + .reverse() + .slice(0, limit || combinedEntries.length) + .value(); + return { rows: sortedEntries }; +} diff --git a/public/app/plugins/datasource/opentsdb/datasource.ts b/public/app/plugins/datasource/opentsdb/datasource.ts index 39ad6c64e1143..07ec4a794eca3 100644 --- a/public/app/plugins/datasource/opentsdb/datasource.ts +++ b/public/app/plugins/datasource/opentsdb/datasource.ts @@ -480,17 +480,17 @@ export default class OpenTsDatasource { mapMetricsToTargets(metrics, options, tsdbVersion) { var interpolatedTagValue, arrTagV; - return _.map(metrics, function(metricData) { + return _.map(metrics, metricData => { if (tsdbVersion === 3) { return metricData.query.index; } else { - return _.findIndex(options.targets, function(target) { + return _.findIndex(options.targets, target => { if (target.filters && target.filters.length > 0) { return target.metric === metricData.metric; } else { return ( target.metric === metricData.metric && - _.every(target.tags, function(tagV, tagK) { + _.every(target.tags, (tagV, tagK) => { interpolatedTagValue = this.templateSrv.replace(tagV, options.scopedVars, 'pipe'); arrTagV = interpolatedTagValue.split('|'); return _.includes(arrTagV, metricData.tags[tagK]) || interpolatedTagValue === '*'; diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 88d6141696d9f..75a946d6f3687 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -17,11 +17,17 @@ export function alignRange(start, end, step) { } export function prometheusRegularEscape(value) { - return value.replace(/'/g, "\\\\'"); + if (typeof value === 'string') { + return value.replace(/'/g, "\\\\'"); + } + return value; } export function prometheusSpecialRegexEscape(value) { - return prometheusRegularEscape(value.replace(/\\/g, '\\\\\\\\').replace(/[$^*{}\[\]+?.()]/g, '\\\\$&')); + if (typeof value === 'string') { + return prometheusRegularEscape(value.replace(/\\/g, '\\\\\\\\').replace(/[$^*{}\[\]+?.()]/g, '\\\\$&')); + } + return value; } export class PrometheusDatasource { @@ -190,13 +196,14 @@ export class PrometheusDatasource { var intervalFactor = target.intervalFactor || 1; // Adjust the interval to take into account any specified minimum and interval factor plus Prometheus limits var adjustedInterval = this.adjustInterval(interval, minInterval, range, intervalFactor); - var scopedVars = options.scopedVars; + var scopedVars = { ...options.scopedVars, ...this.getRangeScopedVars() }; // If the interval was adjusted, make a shallow copy of scopedVars with updated interval vars if (interval !== adjustedInterval) { interval = adjustedInterval; scopedVars = Object.assign({}, options.scopedVars, { __interval: { text: interval + 's', value: interval + 's' }, - __interval_ms: { text: String(interval * 1000), value: String(interval * 1000) }, + __interval_ms: { text: interval * 1000, value: interval * 1000 }, + ...this.getRangeScopedVars(), }); } query.step = interval; @@ -279,11 +286,26 @@ export class PrometheusDatasource { return this.$q.when([]); } - let interpolated = this.templateSrv.replace(query, {}, this.interpolateQueryExpr); + let scopedVars = { + __interval: { text: this.interval, value: this.interval }, + __interval_ms: { text: kbn.interval_to_ms(this.interval), value: kbn.interval_to_ms(this.interval) }, + ...this.getRangeScopedVars(), + }; + let interpolated = this.templateSrv.replace(query, scopedVars, this.interpolateQueryExpr); var metricFindQuery = new PrometheusMetricFindQuery(this, interpolated, this.timeSrv); return metricFindQuery.process(); } + getRangeScopedVars() { + let range = this.timeSrv.timeRange(); + let msRange = range.to.diff(range.from); + let regularRange = kbn.secondsToHms(msRange / 1000); + return { + __range_ms: { text: msRange, value: msRange }, + __range: { text: regularRange, value: regularRange }, + }; + } + annotationQuery(options) { var annotation = options.annotation; var expr = annotation.expr || ''; @@ -357,6 +379,7 @@ export class PrometheusDatasource { state = { ...state, queries, + datasource: this.name, }; } return state; diff --git a/public/app/plugins/datasource/prometheus/plugin.json b/public/app/plugins/datasource/prometheus/plugin.json index 88847765159dc..2b723fd0b9d95 100644 --- a/public/app/plugins/datasource/prometheus/plugin.json +++ b/public/app/plugins/datasource/prometheus/plugin.json @@ -2,21 +2,30 @@ "type": "datasource", "name": "Prometheus", "id": "prometheus", - "includes": [ - {"type": "dashboard", "name": "Prometheus Stats", "path": "dashboards/prometheus_stats.json"}, - {"type": "dashboard", "name": "Prometheus 2.0 Stats", "path": "dashboards/prometheus_2_stats.json"}, - {"type": "dashboard", "name": "Grafana Stats", "path": "dashboards/grafana_stats.json"} + { + "type": "dashboard", + "name": "Prometheus Stats", + "path": "dashboards/prometheus_stats.json" + }, + { + "type": "dashboard", + "name": "Prometheus 2.0 Stats", + "path": "dashboards/prometheus_2_stats.json" + }, + { + "type": "dashboard", + "name": "Grafana Stats", + "path": "dashboards/grafana_stats.json" + } ], - "metrics": true, "alerting": true, "annotations": true, - + "explore": true, "queryOptions": { "minInterval": true }, - "info": { "description": "Prometheus Data Source for Grafana", "author": { @@ -28,8 +37,11 @@ "large": "img/prometheus_logo.svg" }, "links": [ - {"name": "Prometheus", "url": "https://prometheus.io/"} + { + "name": "Prometheus", + "url": "https://prometheus.io/" + } ], "version": "5.0.0" } -} +} \ No newline at end of file diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts index 219b990e5dd37..b8b2b50f59095 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts @@ -2,6 +2,7 @@ import _ from 'lodash'; import moment from 'moment'; import q from 'q'; import { alignRange, PrometheusDatasource, prometheusSpecialRegexEscape, prometheusRegularEscape } from '../datasource'; +jest.mock('../metric_find_query'); describe('PrometheusDatasource', () => { let ctx: any = {}; @@ -18,7 +19,14 @@ describe('PrometheusDatasource', () => { ctx.templateSrvMock = { replace: a => a, }; - ctx.timeSrvMock = {}; + ctx.timeSrvMock = { + timeRange: () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }, + }; beforeEach(() => { ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); @@ -166,6 +174,9 @@ describe('PrometheusDatasource', () => { }); describe('Prometheus regular escaping', function() { + it('should not escape non-string', function() { + expect(prometheusRegularEscape(12)).toEqual(12); + }); it('should not escape simple string', function() { expect(prometheusRegularEscape('cryptodepression')).toEqual('cryptodepression'); }); @@ -201,4 +212,37 @@ describe('PrometheusDatasource', () => { expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?'); }); }); + + describe('metricFindQuery', () => { + beforeEach(() => { + let query = 'query_result(topk(5,rate(http_request_duration_microseconds_count[$__interval])))'; + ctx.templateSrvMock.replace = jest.fn(); + ctx.timeSrvMock.timeRange = () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }; + ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); + ctx.ds.metricFindQuery(query); + }); + + it('should call templateSrv.replace with scopedVars', () => { + expect(ctx.templateSrvMock.replace.mock.calls[0][1]).toBeDefined(); + }); + + it('should have the correct range and range_ms', () => { + let range = ctx.templateSrvMock.replace.mock.calls[0][1].__range; + let rangeMs = ctx.templateSrvMock.replace.mock.calls[0][1].__range_ms; + expect(range).toEqual({ text: '21s', value: '21s' }); + expect(rangeMs).toEqual({ text: 21031, value: 21031 }); + }); + + it('should pass the default interval value', () => { + let interval = ctx.templateSrvMock.replace.mock.calls[0][1].__interval; + let intervalMs = ctx.templateSrvMock.replace.mock.calls[0][1].__interval_ms; + expect(interval).toEqual({ text: '15s', value: '15s' }); + expect(intervalMs).toEqual({ text: 15000, value: 15000 }); + }); + }); }); diff --git a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts index 09aa934dd63e5..c5da671b75768 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts @@ -452,7 +452,7 @@ describe('PrometheusDatasource', function() { interval: '10s', scopedVars: { __interval: { text: '10s', value: '10s' }, - __interval_ms: { text: String(10 * 1000), value: String(10 * 1000) }, + __interval_ms: { text: 10 * 1000, value: 10 * 1000 }, }, }; var urlExpected = @@ -463,8 +463,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('10s'); expect(query.scopedVars.__interval.value).to.be('10s'); - expect(query.scopedVars.__interval_ms.text).to.be(String(10 * 1000)); - expect(query.scopedVars.__interval_ms.value).to.be(String(10 * 1000)); + expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000); + expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000); }); it('should be min interval when it is greater than auto interval', function() { var query = { @@ -479,7 +479,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, + __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, }, }; var urlExpected = @@ -490,8 +490,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); - expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); }); it('should account for intervalFactor', function() { var query = { @@ -507,7 +507,7 @@ describe('PrometheusDatasource', function() { interval: '10s', scopedVars: { __interval: { text: '10s', value: '10s' }, - __interval_ms: { text: String(10 * 1000), value: String(10 * 1000) }, + __interval_ms: { text: 10 * 1000, value: 10 * 1000 }, }, }; var urlExpected = @@ -518,8 +518,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('10s'); expect(query.scopedVars.__interval.value).to.be('10s'); - expect(query.scopedVars.__interval_ms.text).to.be(String(10 * 1000)); - expect(query.scopedVars.__interval_ms.value).to.be(String(10 * 1000)); + expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000); + expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000); }); it('should be interval * intervalFactor when greater than min interval', function() { var query = { @@ -535,7 +535,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, + __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, }, }; var urlExpected = @@ -546,8 +546,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); - expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); }); it('should be min interval when greater than interval * intervalFactor', function() { var query = { @@ -563,7 +563,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, + __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, }, }; var urlExpected = @@ -574,8 +574,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); - expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); }); it('should be determined by the 11000 data points limit, accounting for intervalFactor', function() { var query = { @@ -590,7 +590,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, + __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, }, }; var end = 7 * 24 * 60 * 60; @@ -609,8 +609,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); - expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); }); }); }); diff --git a/public/app/plugins/app/testdata/datasource/datasource.ts b/public/app/plugins/datasource/testdata/datasource.ts similarity index 100% rename from public/app/plugins/app/testdata/datasource/datasource.ts rename to public/app/plugins/datasource/testdata/datasource.ts diff --git a/public/app/plugins/app/testdata/datasource/module.ts b/public/app/plugins/datasource/testdata/module.ts similarity index 100% rename from public/app/plugins/app/testdata/datasource/module.ts rename to public/app/plugins/datasource/testdata/module.ts diff --git a/public/app/plugins/app/testdata/partials/query.editor.html b/public/app/plugins/datasource/testdata/partials/query.editor.html similarity index 99% rename from public/app/plugins/app/testdata/partials/query.editor.html rename to public/app/plugins/datasource/testdata/partials/query.editor.html index 247918bce1ffd..fc16f2a8b440b 100644 --- a/public/app/plugins/app/testdata/partials/query.editor.html +++ b/public/app/plugins/datasource/testdata/partials/query.editor.html @@ -37,4 +37,3 @@
- diff --git a/public/app/plugins/app/testdata/datasource/plugin.json b/public/app/plugins/datasource/testdata/plugin.json similarity index 60% rename from public/app/plugins/app/testdata/datasource/plugin.json rename to public/app/plugins/datasource/testdata/plugin.json index 80445dfb3bc73..774603982e027 100644 --- a/public/app/plugins/app/testdata/datasource/plugin.json +++ b/public/app/plugins/datasource/testdata/plugin.json @@ -1,7 +1,7 @@ { "type": "datasource", - "name": "Grafana TestDataDB", - "id": "grafana-testdata-datasource", + "name": "TestData DB", + "id": "testdata", "metrics": true, "alerting": true, @@ -13,8 +13,8 @@ "url": "https://grafana.com" }, "logos": { - "small": "", - "large": "" + "small": "../../../../img/grafana_icon.svg", + "large": "../../../../img/grafana_icon.svg" } } } diff --git a/public/app/plugins/app/testdata/datasource/query_ctrl.ts b/public/app/plugins/datasource/testdata/query_ctrl.ts similarity index 100% rename from public/app/plugins/app/testdata/datasource/query_ctrl.ts rename to public/app/plugins/datasource/testdata/query_ctrl.ts diff --git a/public/app/plugins/panel/graph/jquery.flot.events.js b/public/app/plugins/panel/graph/jquery.flot.events.js deleted file mode 100644 index 3ea3ca8f33044..0000000000000 --- a/public/app/plugins/panel/graph/jquery.flot.events.js +++ /dev/null @@ -1,604 +0,0 @@ -define([ - 'jquery', - 'lodash', - 'angular', - 'tether-drop', -], -function ($, _, angular, Drop) { - 'use strict'; - - function createAnnotationToolip(element, event, plot) { - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var eventManager = plot.getOptions().events.manager; - var tmpScope = $rootScope.$new(true); - tmpScope.event = event; - tmpScope.onEdit = function() { - eventManager.editEvent(event); - }; - - $compile(content)(tmpScope); - tmpScope.$digest(); - tmpScope.$destroy(); - - var drop = new Drop({ - target: element[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--annotation', - openOn: 'hover', - hoverCloseDelay: 200, - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - - drop.on('close', function() { - setTimeout(function() { - drop.destroy(); - }); - }); - }]); - } - - var markerElementToAttachTo = null; - - function createEditPopover(element, event, plot) { - var eventManager = plot.getOptions().events.manager; - if (eventManager.editorOpen) { - // update marker element to attach to (needed in case of legend on the right - // when there is a double render pass and the initial marker element is removed) - markerElementToAttachTo = element; - return; - } - - // mark as openend - eventManager.editorOpened(); - // set marker element to attache to - markerElementToAttachTo = element; - - // wait for element to be attached and positioned - setTimeout(function() { - - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var scope = $rootScope.$new(true); - var drop; - - scope.event = event; - scope.panelCtrl = eventManager.panelCtrl; - scope.close = function() { - drop.close(); - }; - - $compile(content)(scope); - scope.$digest(); - - drop = new Drop({ - target: markerElementToAttachTo[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--form', - openOn: 'click', - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - eventManager.editorOpened(); - - drop.on('close', function() { - // need timeout here in order call drop.destroy - setTimeout(function() { - eventManager.editorClosed(); - scope.$destroy(); - drop.destroy(); - }); - }); - }]); - - }, 100); - } - - /* - * jquery.flot.events - * - * description: Flot plugin for adding events/markers to the plot - * version: 0.2.5 - * authors: - * Alexander Wunschik - * Joel Oughton - * Nicolas Joseph - * - * website: https://github.com/mojoaxel/flot-events - * - * released under MIT License and GPLv2+ - */ - - /** - * A class that allows for the drawing an remove of some object - */ - var DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { - var _object = object; - var _drawFunc = drawFunc; - var _clearFunc = clearFunc; - var _moveFunc = moveFunc; - var _position = { left: left, top: top }; - var _width = width; - var _height = height; - - this.width = function() { return _width; }; - this.height = function() { return _height; }; - this.position = function() { return _position; }; - this.draw = function() { _drawFunc(_object); }; - this.clear = function() { _clearFunc(_object); }; - this.getObject = function() { return _object; }; - this.moveTo = function(position) { - _position = position; - _moveFunc(_object, _position); - }; - }; - - /** - * Event class that stores options (eventType, min, max, title, description) and the object to draw. - */ - var VisualEvent = function(options, drawableEvent) { - var _parent; - var _options = options; - var _drawableEvent = drawableEvent; - var _hidden = false; - - this.visual = function() { return _drawableEvent; }; - this.getOptions = function() { return _options; }; - this.getParent = function() { return _parent; }; - this.isHidden = function() { return _hidden; }; - this.hide = function() { _hidden = true; }; - this.unhide = function() { _hidden = false; }; - }; - - /** - * A Class that handles the event-markers inside the given plot - */ - var EventMarkers = function(plot) { - var _events = []; - - this._types = []; - this._plot = plot; - this.eventsEnabled = false; - - this.getEvents = function() { - return _events; - }; - - this.setTypes = function(types) { - return this._types = types; - }; - - /** - * create internal objects for the given events - */ - this.setupEvents = function(events) { - var that = this; - var parts = _.partition(events, 'isRegion'); - var regions = parts[0]; - events = parts[1]; - - $.each(events, function(index, event) { - var ve = new VisualEvent(event, that._buildDiv(event)); - _events.push(ve); - }); - - $.each(regions, function (index, event) { - var vre = new VisualEvent(event, that._buildRegDiv(event)); - _events.push(vre); - }); - - _events.sort(function(a, b) { - var ao = a.getOptions(), bo = b.getOptions(); - if (ao.min > bo.min) { return 1; } - if (ao.min < bo.min) { return -1; } - return 0; - }); - }; - - /** - * draw the events to the plot - */ - this.drawEvents = function() { - var that = this; - // var o = this._plot.getPlotOffset(); - - $.each(_events, function(index, event) { - // check event is inside the graph range - if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { - event.visual().draw(); - } else { - event.visual().getObject().hide(); - } - }); - }; - - /** - * update the position of the event-markers (e.g. after scrolling or zooming) - */ - this.updateEvents = function() { - var that = this; - var o = this._plot.getPlotOffset(), left, top; - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - - $.each(_events, function(index, event) { - top = o.top + that._plot.height() - event.visual().height(); - left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; - event.visual().moveTo({ top: top, left: left }); - }); - }; - - /** - * remove all events from the plot - */ - this._clearEvents = function() { - $.each(_events, function(index, val) { - val.visual().clear(); - }); - _events = []; - }; - - /** - * create a DOM element for the given event - */ - this._buildDiv = function(event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; - var markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { - markerSize = 8; //default marker size - } else { - markerSize = this._types[eventTypeId].markerSize; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { - markerShow = true; - } else { - markerShow = this._types[eventTypeId].markerShow; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - var topOffset = xaxis.options.eventSectionHeight || 0; - topOffset = topOffset / 3; - - top = o.top + this._plot.height() + topOffset; - left = xaxis.p2c(event.min) + o.left; - - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": left + 'px', - "top": 8, - "width": lineWidth + "px", - "height": this._plot.height() + topOffset * 0.8, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }) - .appendTo(container); - - if (markerShow) { - var marker = $('
').css({ - "position": "absolute", - "left": (-markerSize - Math.round(lineWidth / 2)) + "px", - "font-size": 0, - "line-height": 0, - "width": 0, - "height": 0, - "border-left": markerSize+"px solid transparent", - "border-right": markerSize+"px solid transparent" - }); - - marker.appendTo(line); - - if (this._types[eventTypeId] && this._types[eventTypeId].position && this._types[eventTypeId].position.toUpperCase() === 'BOTTOM') { - marker.css({ - "top": top-markerSize-8 +"px", - "border-top": "none", - "border-bottom": markerSize+"px solid " + color - }); - } else { - marker.css({ - "top": "0px", - "border-top": markerSize+"px solid " + color, - "border-bottom": "none" - }); - } - - marker.data({ - "event": event - }); - - var mouseenter = function() { - createAnnotationToolip(marker, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(marker, event.editModel, that._plot); - } - - var mouseleave = function() { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - marker.css({ "cursor": "help" }); - marker.hover(mouseenter, mouseleave); - } - } - - var drawableEvent = new DrawableEvent( - line, - function drawFunc(obj) { obj.show(); }, - function(obj) { obj.remove(); }, - function(obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - line.width(), - line.height() - ); - - return drawableEvent; - }; - - /** - * create a DOM element for the given region - */ - this._buildRegDiv = function (event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - var topOffset = 2; - top = o.top + this._plot.height() + topOffset; - - var timeFrom = Math.min(event.min, event.timeEnd); - var timeTo = Math.max(event.min, event.timeEnd); - left = xaxis.p2c(timeFrom) + o.left; - var right = xaxis.p2c(timeTo) + o.left; - regionWidth = right - left; - - _.each([left, right], function(position) { - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": position + 'px', - "top": 8, - "width": lineWidth + "px", - "height": that._plot.height() + topOffset, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }); - line.appendTo(container); - }); - - var region = $('
').css({ - "position": "absolute", - "opacity": 0.5, - "left": left + 'px', - "top": top, - "width": Math.round(regionWidth + lineWidth) + "px", - "height": "0.5rem", - "border-left-color": color, - "color": color, - "background-color": color - }); - region.appendTo(container); - - region.data({ - "event": event - }); - - var mouseenter = function () { - createAnnotationToolip(region, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(region, event.editModel, that._plot); - } - - var mouseleave = function () { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - region.css({ "cursor": "help" }); - region.hover(mouseenter, mouseleave); - } - - var drawableEvent = new DrawableEvent( - region, - function drawFunc(obj) { obj.show(); }, - function (obj) { obj.remove(); }, - function (obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - region.width(), - region.height() - ); - - return drawableEvent; - }; - - /** - * check if the event is inside visible range - */ - this._insidePlot = function(x) { - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var xc = xaxis.p2c(x); - return xc > 0 && xc < xaxis.p2c(xaxis.max); - }; - }; - - /** - * initialize the plugin for the given plot - */ - function init(plot) { - /*jshint validthis:true */ - var that = this; - var eventMarkers = new EventMarkers(plot); - - plot.getEvents = function() { - return eventMarkers._events; - }; - - plot.hideEvents = function() { - $.each(eventMarkers._events, function(index, event) { - event.visual().getObject().hide(); - }); - }; - - plot.showEvents = function() { - plot.hideEvents(); - $.each(eventMarkers._events, function(index, event) { - event.hide(); - }); - - that.eventMarkers.drawEvents(); - }; - - // change events on an existing plot - plot.setEvents = function(events) { - if (eventMarkers.eventsEnabled) { - eventMarkers.setupEvents(events); - } - }; - - plot.hooks.processOptions.push(function(plot, options) { - // enable the plugin - if (options.events.data != null) { - eventMarkers.eventsEnabled = true; - } - }); - - plot.hooks.draw.push(function(plot) { - var options = plot.getOptions(); - - if (eventMarkers.eventsEnabled) { - // check for first run - if (eventMarkers.getEvents().length < 1) { - eventMarkers.setTypes(options.events.types); - eventMarkers.setupEvents(options.events.data); - } else { - eventMarkers.updateEvents(); - } - } - - eventMarkers.drawEvents(); - }); - } - - var defaultOptions = { - events: { - data: null, - types: null, - xaxis: 1, - position: 'BOTTOM' - } - }; - - $.plot.plugins.push({ - init: init, - options: defaultOptions, - name: "events", - version: "0.2.5" - }); -}); diff --git a/public/app/plugins/panel/graph/jquery.flot.events.ts b/public/app/plugins/panel/graph/jquery.flot.events.ts new file mode 100644 index 0000000000000..9dfe0a8573f35 --- /dev/null +++ b/public/app/plugins/panel/graph/jquery.flot.events.ts @@ -0,0 +1,671 @@ +import angular from 'angular'; +import $ from 'jquery'; +import _ from 'lodash'; +import Drop from 'tether-drop'; + +/** @ngInject */ +export function createAnnotationToolip(element, event, plot) { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let eventManager = plot.getOptions().events.manager; + let tmpScope = $rootScope.$new(true); + tmpScope.event = event; + tmpScope.onEdit = function() { + eventManager.editEvent(event); + }; + + $compile(content)(tmpScope); + tmpScope.$digest(); + tmpScope.$destroy(); + + let drop = new Drop({ + target: element[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--annotation', + openOn: 'hover', + hoverCloseDelay: 200, + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + + drop.on('close', function() { + setTimeout(function() { + drop.destroy(); + }); + }); + }, + ]); +} + +let markerElementToAttachTo = null; + +/** @ngInject */ +export function createEditPopover(element, event, plot) { + let eventManager = plot.getOptions().events.manager; + if (eventManager.editorOpen) { + // update marker element to attach to (needed in case of legend on the right + // when there is a double render pass and the inital marker element is removed) + markerElementToAttachTo = element; + return; + } + + // mark as openend + eventManager.editorOpened(); + // set marker elment to attache to + markerElementToAttachTo = element; + + // wait for element to be attached and positioned + setTimeout(function() { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let scope = $rootScope.$new(true); + let drop; + + scope.event = event; + scope.panelCtrl = eventManager.panelCtrl; + scope.close = function() { + drop.close(); + }; + + $compile(content)(scope); + scope.$digest(); + + drop = new Drop({ + target: markerElementToAttachTo[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--form', + openOn: 'click', + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + eventManager.editorOpened(); + + drop.on('close', function() { + // need timeout here in order call drop.destroy + setTimeout(function() { + eventManager.editorClosed(); + scope.$destroy(); + drop.destroy(); + }); + }); + }, + ]); + }, 100); +} + +/* + * jquery.flot.events + * + * description: Flot plugin for adding events/markers to the plot + * version: 0.2.5 + * authors: + * Alexander Wunschik + * Joel Oughton + * Nicolas Joseph + * + * website: https://github.com/mojoaxel/flot-events + * + * released under MIT License and GPLv2+ + */ + +/** + * A class that allows for the drawing an remove of some object + */ +export class DrawableEvent { + _object: any; + _drawFunc: any; + _clearFunc: any; + _moveFunc: any; + _position: any; + _width: any; + _height: any; + + /** @ngInject */ + constructor(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { + this._object = object; + this._drawFunc = drawFunc; + this._clearFunc = clearFunc; + this._moveFunc = moveFunc; + this._position = { left: left, top: top }; + this._width = width; + this._height = height; + } + + width() { + return this._width; + } + height() { + return this._height; + } + position() { + return this._position; + } + draw() { + this._drawFunc(this._object); + } + clear() { + this._clearFunc(this._object); + } + getObject() { + return this._object; + } + moveTo(position) { + this._position = position; + this._moveFunc(this._object, this._position); + } +} + +/** + * Event class that stores options (eventType, min, max, title, description) and the object to draw. + */ +export class VisualEvent { + _parent: any; + _options: any; + _drawableEvent: any; + _hidden: any; + + /** @ngInject */ + constructor(options, drawableEvent) { + this._options = options; + this._drawableEvent = drawableEvent; + this._hidden = false; + } + + visual() { + return this._drawableEvent; + } + getOptions() { + return this._options; + } + getParent() { + return this._parent; + } + isHidden() { + return this._hidden; + } + hide() { + this._hidden = true; + } + unhide() { + this._hidden = false; + } +} + +/** + * A Class that handles the event-markers inside the given plot + */ +export class EventMarkers { + _events: any; + _types: any; + _plot: any; + eventsEnabled: any; + + /** @ngInject */ + constructor(plot) { + this._events = []; + this._types = []; + this._plot = plot; + this.eventsEnabled = false; + } + + getEvents() { + return this._events; + } + + setTypes(types) { + return (this._types = types); + } + + /** + * create internal objects for the given events + */ + setupEvents(events) { + let parts = _.partition(events, 'isRegion'); + let regions = parts[0]; + events = parts[1]; + + $.each(events, (index, event) => { + let ve = new VisualEvent(event, this._buildDiv(event)); + this._events.push(ve); + }); + + $.each(regions, (index, event) => { + let vre = new VisualEvent(event, this._buildRegDiv(event)); + this._events.push(vre); + }); + + this._events.sort((a, b) => { + let ao = a.getOptions(), + bo = b.getOptions(); + if (ao.min > bo.min) { + return 1; + } + if (ao.min < bo.min) { + return -1; + } + return 0; + }); + } + + /** + * draw the events to the plot + */ + drawEvents() { + // var o = this._plot.getPlotOffset(); + + $.each(this._events, (index, event) => { + // check event is inside the graph range + if (this._insidePlot(event.getOptions().min) && !event.isHidden()) { + event.visual().draw(); + } else { + event + .visual() + .getObject() + .hide(); + } + }); + } + + /** + * update the position of the event-markers (e.g. after scrolling or zooming) + */ + updateEvents() { + let o = this._plot.getPlotOffset(), + left, + top; + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + + $.each(this._events, (index, event) => { + top = o.top + this._plot.height() - event.visual().height(); + left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; + event.visual().moveTo({ top: top, left: left }); + }); + } + + /** + * remove all events from the plot + */ + _clearEvents() { + $.each(this._events, (index, val) => { + val.visual().clear(); + }); + this._events = []; + } + + /** + * create a DOM element for the given event + */ + _buildDiv(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let top, left, color, markerSize, markerShow, lineStyle, lineWidth; + let markerTooltip; + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { + markerSize = 8; //default marker size + } else { + markerSize = this._types[eventTypeId].markerSize; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { + markerShow = true; + } else { + markerShow = this._types[eventTypeId].markerShow; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + let topOffset = xaxis.options.eventSectionHeight || 0; + topOffset = topOffset / 3; + + top = o.top + this._plot.height() + topOffset; + left = xaxis.p2c(event.min) + o.left; + + let line = $('
') + .css({ + position: 'absolute', + opacity: 0.8, + left: left + 'px', + top: 8, + width: lineWidth + 'px', + height: this._plot.height() + topOffset * 0.8, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }) + .appendTo(container); + + if (markerShow) { + let marker = $('
').css({ + position: 'absolute', + left: -markerSize - Math.round(lineWidth / 2) + 'px', + 'font-size': 0, + 'line-height': 0, + width: 0, + height: 0, + 'border-left': markerSize + 'px solid transparent', + 'border-right': markerSize + 'px solid transparent', + }); + + marker.appendTo(line); + + if ( + this._types[eventTypeId] && + this._types[eventTypeId].position && + this._types[eventTypeId].position.toUpperCase() === 'BOTTOM' + ) { + marker.css({ + top: top - markerSize - 8 + 'px', + 'border-top': 'none', + 'border-bottom': markerSize + 'px solid ' + color, + }); + } else { + marker.css({ + top: '0px', + 'border-top': markerSize + 'px solid ' + color, + 'border-bottom': 'none', + }); + } + + marker.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(marker, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(marker, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + marker.css({ cursor: 'help' }); + marker.hover(mouseenter, mouseleave); + } + } + + let drawableEvent = new DrawableEvent( + line, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + line.width(), + line.height() + ); + + return drawableEvent; + } + + /** + * create a DOM element for the given region + */ + _buildRegDiv(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + let topOffset = 2; + top = o.top + this._plot.height() + topOffset; + + let timeFrom = Math.min(event.min, event.timeEnd); + let timeTo = Math.max(event.min, event.timeEnd); + left = xaxis.p2c(timeFrom) + o.left; + let right = xaxis.p2c(timeTo) + o.left; + regionWidth = right - left; + + _.each([left, right], position => { + let line = $('
').css({ + position: 'absolute', + opacity: 0.8, + left: position + 'px', + top: 8, + width: lineWidth + 'px', + height: this._plot.height() + topOffset, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }); + line.appendTo(container); + }); + + let region = $('
').css({ + position: 'absolute', + opacity: 0.5, + left: left + 'px', + top: top, + width: Math.round(regionWidth + lineWidth) + 'px', + height: '0.5rem', + 'border-left-color': color, + color: color, + 'background-color': color, + }); + region.appendTo(container); + + region.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(region, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(region, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + region.css({ cursor: 'help' }); + region.hover(mouseenter, mouseleave); + } + + let drawableEvent = new DrawableEvent( + region, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + region.width(), + region.height() + ); + + return drawableEvent; + } + + /** + * check if the event is inside visible range + */ + _insidePlot(x) { + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let xc = xaxis.p2c(x); + return xc > 0 && xc < xaxis.p2c(xaxis.max); + } +} + +/** + * initialize the plugin for the given plot + */ + +/** @ngInject */ +export function init(plot) { + /*jshint validthis:true */ + let that = this; + let eventMarkers = new EventMarkers(plot); + + plot.getEvents = function() { + return eventMarkers._events; + }; + + plot.hideEvents = function() { + $.each(eventMarkers._events, (index, event) => { + event + .visual() + .getObject() + .hide(); + }); + }; + + plot.showEvents = function() { + plot.hideEvents(); + $.each(eventMarkers._events, (index, event) => { + event.hide(); + }); + + that.eventMarkers.drawEvents(); + }; + + // change events on an existing plot + plot.setEvents = function(events) { + if (eventMarkers.eventsEnabled) { + eventMarkers.setupEvents(events); + } + }; + + plot.hooks.processOptions.push(function(plot, options) { + // enable the plugin + if (options.events.data != null) { + eventMarkers.eventsEnabled = true; + } + }); + + plot.hooks.draw.push(function(plot) { + let options = plot.getOptions(); + + if (eventMarkers.eventsEnabled) { + // check for first run + if (eventMarkers.getEvents().length < 1) { + eventMarkers.setTypes(options.events.types); + eventMarkers.setupEvents(options.events.data); + } else { + eventMarkers.updateEvents(); + } + } + + eventMarkers.drawEvents(); + }); +} + +let defaultOptions = { + events: { + data: null, + types: null, + xaxis: 1, + position: 'BOTTOM', + }, +}; + +$.plot.plugins.push({ + init: init, + options: defaultOptions, + name: 'events', + version: '0.2.5', +}); diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts new file mode 100644 index 0000000000000..3ebcf6cdf313c --- /dev/null +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -0,0 +1,94 @@ +import moment from 'moment'; +import { GraphCtrl } from '../module'; + +jest.mock('../graph', () => ({})); + +describe('GraphCtrl', () => { + let injector = { + get: () => { + return { + timeRange: () => { + return { + from: '', + to: '', + }; + }, + }; + }, + }; + + let scope = { + $on: () => {}, + }; + + GraphCtrl.prototype.panel = { + events: { + on: () => {}, + }, + gridPos: { + w: 100, + }, + }; + + let ctx = {}; + + beforeEach(() => { + ctx.ctrl = new GraphCtrl(scope, injector, {}); + ctx.ctrl.annotationsPromise = Promise.resolve({}); + ctx.ctrl.updateTimeRange(); + }); + + describe('when time series are outside range', () => { + beforeEach(() => { + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, 1234567890], [60, 1234567899]], + }, + ]; + + ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', () => { + expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range'); + }); + }); + + describe('when time series are inside range', () => { + beforeEach(() => { + var range = { + from: moment() + .subtract(1, 'days') + .valueOf(), + to: moment().valueOf(), + }; + + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, range.from + 1000], [60, range.from + 10000]], + }, + ]; + + ctx.ctrl.range = range; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', () => { + expect(ctx.ctrl.dataWarning).toBe(null); + }); + }); + + describe('datapointsCount given 2 series', () => { + beforeEach(() => { + var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsCount warning', () => { + expect(ctx.ctrl.dataWarning.title).toBe('No data points'); + }); + }); +}); diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts b/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts deleted file mode 100644 index d5cefb345cf29..0000000000000 --- a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; - -import moment from 'moment'; -import { GraphCtrl } from '../module'; -import helpers from '../../../../../test/specs/helpers'; - -describe('GraphCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach(ctx.createPanelController(GraphCtrl)); - beforeEach(() => { - ctx.ctrl.annotationsPromise = Promise.resolve({}); - ctx.ctrl.updateTimeRange(); - }); - - describe('when time series are outside range', function() { - beforeEach(function() { - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, 1234567890], [60, 1234567899]], - }, - ]; - - ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range'); - }); - }); - - describe('when time series are inside range', function() { - beforeEach(function() { - var range = { - from: moment() - .subtract(1, 'days') - .valueOf(), - to: moment().valueOf(), - }; - - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, range.from + 1000], [60, range.from + 10000]], - }, - ]; - - ctx.ctrl.range = range; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning).to.be(null); - }); - }); - - describe('datapointsCount given 2 series', function() { - beforeEach(function() { - var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsCount warning', function() { - expect(ctx.ctrl.dataWarning.title).to.be('No data points'); - }); - }); -}); diff --git a/public/app/plugins/panel/singlestat/editor.html b/public/app/plugins/panel/singlestat/editor.html index 96576fd3c41b4..dd9ca55a760f0 100644 --- a/public/app/plugins/panel/singlestat/editor.html +++ b/public/app/plugins/panel/singlestat/editor.html @@ -56,10 +56,10 @@
Value
Coloring
- +
- +
diff --git a/public/app/routes/routes.ts b/public/app/routes/routes.ts index cd1aed549e0b7..d12711aca5b91 100644 --- a/public/app/routes/routes.ts +++ b/public/app/routes/routes.ts @@ -112,7 +112,7 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { controller: 'FolderDashboardsCtrl', controllerAs: 'ctrl', }) - .when('/explore/:initial?', { + .when('/explore', { template: '', resolve: { roles: () => ['Editor', 'Admin'], diff --git a/public/sass/_variables.dark.scss b/public/sass/_variables.dark.scss index 4907540815d55..eb73b014a9376 100644 --- a/public/sass/_variables.dark.scss +++ b/public/sass/_variables.dark.scss @@ -93,24 +93,14 @@ $headings-color: darken($white, 11%); $abbr-border-color: $gray-3 !default; $text-muted: $text-color-weak; -$blockquote-small-color: $gray-3 !default; -$blockquote-border-color: $gray-4 !default; - $hr-border-color: rgba(0, 0, 0, 0.1) !default; -// Components -$component-active-color: #fff !default; -$component-active-bg: $brand-primary !default; - // Panel // ------------------------- $panel-bg: #212124; $panel-border-color: $dark-1; $panel-border: solid 1px $panel-border-color; -$panel-drop-zone-bg: repeating-linear-gradient(-128deg, #111, #111 10px, #191919 10px, #222 20px); $panel-header-hover-bg: $dark-4; -$panel-header-menu-hover-bg: $dark-5; -$panel-edit-shadow: 0 -30px 30px -30px $black; // page header $page-header-bg: linear-gradient(90deg, #292a2d, black); @@ -205,7 +195,6 @@ $input-box-shadow-focus: rgba(102, 175, 233, 0.6); $input-color-placeholder: $gray-1 !default; $input-label-bg: $gray-blue; $input-label-border-color: $dark-3; -$input-invalid-border-color: lighten($red, 5%); // Search $search-shadow: 0 0 30px 0 $black; @@ -223,7 +212,6 @@ $dropdownBorder: rgba(0, 0, 0, 0.2); $dropdownDividerTop: transparent; $dropdownDividerBottom: #444; $dropdownDivider: $dropdownDividerBottom; -$dropdownTitle: $link-color-disabled; $dropdownLinkColor: $text-color; $dropdownLinkColorHover: $white; @@ -232,8 +220,6 @@ $dropdownLinkColorActive: $white; $dropdownLinkBackgroundActive: $dark-4; $dropdownLinkBackgroundHover: $dark-4; -$dropdown-link-color: $gray-3; - // COMPONENT VARIABLES // -------------------------------------------------- @@ -246,22 +232,13 @@ $horizontalComponentOffset: 180px; // Wells // ------------------------- -$wellBackground: #131517; $navbarHeight: 55px; -$navbarBackgroundHighlight: $dark-3; $navbarBackground: $panel-bg; $navbarBorder: 1px solid $dark-3; $navbarShadow: 0 0 20px black; -$navbarText: $gray-4; $navbarLinkColor: $gray-4; -$navbarLinkColorHover: $white; -$navbarLinkColorActive: $navbarLinkColorHover; -$navbarLinkBackgroundHover: transparent; -$navbarLinkBackgroundActive: $navbarBackground; -$navbarBrandColor: $link-color; -$navbarDropdownShadow: inset 0px 4px 10px -4px $body-bg; $navbarButtonBackground: $navbarBackground; $navbarButtonBackgroundHighlight: $body-bg; @@ -275,20 +252,15 @@ $side-menu-bg-mobile: $side-menu-bg; $side-menu-item-hover-bg: $dark-2; $side-menu-shadow: 0 0 20px black; $side-menu-link-color: $link-color; -$breadcrumb-hover-hl: #111; // Menu dropdowns // ------------------------- $menu-dropdown-bg: $body-bg; $menu-dropdown-hover-bg: $dark-2; -$menu-dropdown-border-color: $dark-3; $menu-dropdown-shadow: 5px 5px 20px -5px $black; // Breadcrumb // ------------------------- -$page-nav-bg: $black; -$page-nav-shadow: 5px 5px 20px -5px $black; -$page-nav-breadcrumb-color: $gray-3; // Tabs // ------------------------- @@ -296,9 +268,6 @@ $tab-border-color: $dark-4; // Pagination // ------------------------- -$paginationBackground: $body-bg; -$paginationBorder: transparent; -$paginationActiveBackground: $blue; // Form states and alerts // ------------------------- @@ -343,10 +312,6 @@ $info-box-color: $gray-4; $footer-link-color: $gray-2; $footer-link-hover: $gray-4; -// collapse box -$collapse-box-body-border: $dark-5; -$collapse-box-body-error-border: $red; - // json-explorer $json-explorer-default-color: $text-color; $json-explorer-string-color: #23d662; @@ -357,7 +322,6 @@ $json-explorer-undefined-color: rgb(239, 143, 190); $json-explorer-function-color: #fd48cb; $json-explorer-rotate-time: 100ms; $json-explorer-toggler-opacity: 0.6; -$json-explorer-toggler-color: #45376f; $json-explorer-bracket-color: #9494ff; $json-explorer-key-color: #23a0db; $json-explorer-url-color: #027bff; diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index 14716f6dfefd9..7e5e1b6a7f8b4 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -90,25 +90,15 @@ $headings-color: $text-color; $abbr-border-color: $gray-2 !default; $text-muted: $text-color-weak; -$blockquote-small-color: $gray-2 !default; -$blockquote-border-color: $gray-3 !default; - $hr-border-color: $dark-3 !default; -// Components -$component-active-color: $white !default; -$component-active-bg: $brand-primary !default; - // Panel // ------------------------- $panel-bg: $white; $panel-border-color: $gray-5; $panel-border: solid 1px $panel-border-color; -$panel-drop-zone-bg: repeating-linear-gradient(-128deg, $body-bg, $body-bg 10px, $gray-6 10px, $gray-6 20px); $panel-header-hover-bg: $gray-6; -$panel-header-menu-hover-bg: $gray-4; -$panel-edit-shadow: 0 0 30px 20px $black; // Page header $page-header-bg: linear-gradient(90deg, $white, $gray-7); @@ -201,7 +191,6 @@ $input-box-shadow-focus: $blue !default; $input-color-placeholder: $gray-4 !default; $input-label-bg: $gray-5; $input-label-border-color: $gray-5; -$input-invalid-border-color: lighten($red, 5%); // Sidemenu // ------------------------- @@ -215,15 +204,10 @@ $side-menu-link-color: $gray-6; // ------------------------- $menu-dropdown-bg: $gray-7; $menu-dropdown-hover-bg: $gray-6; -$menu-dropdown-border-color: $gray-4; $menu-dropdown-shadow: 5px 5px 10px -5px $gray-1; // Breadcrumb // ------------------------- -$page-nav-bg: $gray-5; -$page-nav-shadow: 5px 5px 20px -5px $gray-4; -$page-nav-breadcrumb-color: $black; -$breadcrumb-hover-hl: #d9dadd; // Tabs // ------------------------- @@ -245,7 +229,6 @@ $dropdownBorder: $gray-4; $dropdownDividerTop: $gray-6; $dropdownDividerBottom: $white; $dropdownDivider: $dropdownDividerTop; -$dropdownTitle: $gray-3; $dropdownLinkColor: $dark-3; $dropdownLinkColorHover: $link-color; @@ -271,24 +254,16 @@ $horizontalComponentOffset: 180px; // Wells // ------------------------- -$wellBackground: $gray-3; // Navbar // ------------------------- $navbarHeight: 52px; -$navbarBackgroundHighlight: $white; $navbarBackground: $white; $navbarBorder: 1px solid $gray-4; $navbarShadow: 0 0 3px #c1c1c1; -$navbarText: #444; $navbarLinkColor: #444; -$navbarLinkColorHover: #000; -$navbarLinkColorActive: #333; -$navbarLinkBackgroundHover: transparent; -$navbarLinkBackgroundActive: darken($navbarBackground, 6.5%); -$navbarDropdownShadow: inset 0px 4px 7px -4px darken($body-bg, 20%); $navbarBrandColor: $navbarLinkColor; @@ -299,9 +274,6 @@ $navbar-button-border: $gray-4; // Pagination // ------------------------- -$paginationBackground: $gray-2; -$paginationBorder: transparent; -$paginationActiveBackground: $blue; // Form states and alerts // ------------------------- @@ -346,8 +318,6 @@ $footer-link-color: $gray-3; $footer-link-hover: $dark-5; // collapse box -$collapse-box-body-border: $gray-4; -$collapse-box-body-error-border: $red; // json explorer $json-explorer-default-color: black; @@ -359,7 +329,6 @@ $json-explorer-undefined-color: rgb(202, 11, 105); $json-explorer-function-color: #ff20ed; $json-explorer-rotate-time: 100ms; $json-explorer-toggler-opacity: 0.6; -$json-explorer-toggler-color: #45376f; $json-explorer-bracket-color: blue; $json-explorer-key-color: #00008b; $json-explorer-url-color: blue; diff --git a/public/sass/_variables.scss b/public/sass/_variables.scss index f46cacb0dd1c4..636b60c65a7d1 100644 --- a/public/sass/_variables.scss +++ b/public/sass/_variables.scss @@ -3,13 +3,7 @@ // Quickly modify global styling by enabling or disabling optional features. $enable-flex: true !default; -$enable-rounded: true !default; -$enable-shadows: false !default; -$enable-gradients: false !default; -$enable-transitions: false !default; $enable-hover-media-query: false !default; -$enable-grid-classes: true !default; -$enable-print-styles: true !default; // Spacing // @@ -53,9 +47,9 @@ $enable-flex: true; // Typography // ------------------------- -$font-family-sans-serif: "Roboto", Helvetica, Arial, sans-serif; -$font-family-serif: Georgia, "Times New Roman", Times, serif; -$font-family-monospace: Menlo, Monaco, Consolas, "Courier New", monospace; +$font-family-sans-serif: 'Roboto', Helvetica, Arial, sans-serif; +$font-family-serif: Georgia, 'Times New Roman', Times, serif; +$font-family-monospace: Menlo, Monaco, Consolas, 'Courier New', monospace; $font-family-base: $font-family-sans-serif !default; $font-size-root: 14px !default; @@ -90,16 +84,12 @@ $lead-font-size: 1.25rem !default; $lead-font-weight: 300 !default; $headings-margin-bottom: ($spacer / 2) !default; -$headings-font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif; +$headings-font-family: 'Roboto', 'Helvetica Neue', Helvetica, Arial, sans-serif; $headings-font-weight: 400 !default; $headings-line-height: 1.1 !default; -$blockquote-font-size: ($font-size-base * 1.25) !default; -$blockquote-border-width: 0.25rem !default; - $hr-border-width: $border-width !default; $dt-font-weight: bold !default; -$list-inline-padding: 5px !default; // Components // @@ -112,9 +102,6 @@ $border-radius: 3px !default; $border-radius-lg: 5px !default; $border-radius-sm: 2px!default; -$caret-width: 0.3em !default; -$caret-width-lg: $caret-width !default; - // Page $page-sidebar-width: 11rem; @@ -130,7 +117,6 @@ $link-hover-decoration: none !default; // Customizes the `.table` component with basic values, each used across all table variations. $table-cell-padding: 4px 10px !default; -$table-sm-cell-padding: 0.3rem !default; // Forms $input-padding-x: 10px !default; @@ -139,31 +125,18 @@ $input-line-height: 18px !default; $input-btn-border-width: 1px; $input-border-radius: 0 $border-radius $border-radius 0 !default; -$input-border-radius-lg: 0 $border-radius-lg $border-radius-lg 0 !default; $input-border-radius-sm: 0 $border-radius-sm $border-radius-sm 0 !default; $label-border-radius: $border-radius 0 0 $border-radius !default; -$label-border-radius-lg: $border-radius-lg 0 0 $border-radius-lg !default; $label-border-radius-sm: $border-radius-sm 0 0 $border-radius-sm !default; -$input-padding-x-sm: 7px !default; $input-padding-y-sm: 4px !default; $input-padding-x-lg: 20px !default; $input-padding-y-lg: 10px !default; -$input-height: (($font-size-base * $line-height-base) + ($input-padding-y * 2)) - !default; -$input-height-lg: ( - ($font-size-lg * $line-height-lg) + ($input-padding-y-lg * 2) - ) - !default; -$input-height-sm: ( - ($font-size-sm * $line-height-sm) + ($input-padding-y-sm * 2) - ) - !default; +$input-height: (($font-size-base * $line-height-base) + ($input-padding-y * 2)) !default; -$form-group-margin-bottom: $spacer-y !default; $gf-form-margin: 0.2rem; $cursor-disabled: not-allowed !default; @@ -221,9 +194,9 @@ $panel-padding: 0px 10px 5px 10px; $tabs-padding: 10px 15px 9px; $external-services: ( - github: (bgColor: #464646, borderColor: #393939, icon: ""), - google: (bgColor: #e84d3c, borderColor: #b83e31, icon: ""), - grafanacom: (bgColor: inherit, borderColor: #393939, icon: ""), - oauth: (bgColor: inherit, borderColor: #393939, icon: "") + github: (bgColor: #464646, borderColor: #393939, icon: ''), + google: (bgColor: #e84d3c, borderColor: #b83e31, icon: ''), + grafanacom: (bgColor: inherit, borderColor: #393939, icon: ''), + oauth: (bgColor: inherit, borderColor: #393939, icon: '') ) !default; diff --git a/public/sass/base/_type.scss b/public/sass/base/_type.scss index 1c3516c28288f..2de8665f06a89 100644 --- a/public/sass/base/_type.scss +++ b/public/sass/base/_type.scss @@ -24,7 +24,7 @@ small { font-size: 85%; } strong { - font-weight: bold; + font-weight: $font-weight-semi-bold; } em { font-style: italic; @@ -249,7 +249,7 @@ dd { line-height: $line-height-base; } dt { - font-weight: bold; + font-weight: $font-weight-semi-bold; } dd { margin-left: $line-height-base / 2; @@ -376,7 +376,7 @@ a.external-link { padding: $spacer*0.5 $spacer; } th { - font-weight: normal; + font-weight: $font-weight-semi-bold; background: $table-bg-accent; } } @@ -415,3 +415,7 @@ a.external-link { color: $yellow; padding: 0; } + +th { + font-weight: $font-weight-semi-bold; +} diff --git a/public/sass/pages/_dashboard.scss b/public/sass/pages/_dashboard.scss index 9b79279b99baf..970b625c4f824 100644 --- a/public/sass/pages/_dashboard.scss +++ b/public/sass/pages/_dashboard.scss @@ -16,6 +16,7 @@ div.flot-text { height: 100%; &--solo { + margin: 0; .panel-container { border: none; z-index: $zindex-sidemenu + 1; diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss index 876260c4f76a0..158f0eb68ad1f 100644 --- a/public/sass/pages/_explore.scss +++ b/public/sass/pages/_explore.scss @@ -60,6 +60,10 @@ flex-wrap: wrap; } + .datasource-picker { + min-width: 10rem; + } + .timepicker { display: flex; @@ -93,3 +97,40 @@ .query-row-tools { width: 4rem; } + +.explore { + .logs { + .logs-entries { + display: grid; + grid-column-gap: 1rem; + grid-row-gap: 0.1rem; + grid-template-columns: 4px minmax(100px, max-content) 1fr; + font-family: $font-family-monospace; + } + + .logs-row-match-highlight { + background-color: lighten($blue, 20%); + } + + .logs-row-level { + background-color: transparent; + margin: 6px 0; + border-radius: 2px; + opacity: 0.8; + } + + .logs-row-level-crit, + .logs-row-level-error, + .logs-row-level-err { + background-color: $red; + } + + .logs-row-level-warn { + background-color: $orange; + } + + .logs-row-level-info { + background-color: $green; + } + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go index 3271a18e80e16..212fe25e71e1b 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go @@ -91,6 +91,6 @@ func (c *Client) AddDebugHandlers() { return } - c.Handlers.Send.PushFrontNamed(request.NamedHandler{Name: "awssdk.client.LogRequest", Fn: logRequest}) - c.Handlers.Send.PushBackNamed(request.NamedHandler{Name: "awssdk.client.LogResponse", Fn: logResponse}) + c.Handlers.Send.PushFrontNamed(LogHTTPRequestHandler) + c.Handlers.Send.PushBackNamed(LogHTTPResponseHandler) } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go index e223c54cc6c1a..ce9fb896d943b 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go @@ -44,12 +44,22 @@ func (reader *teeReaderCloser) Close() error { return reader.Source.Close() } +// LogHTTPRequestHandler is a SDK request handler to log the HTTP request sent +// to a service. Will include the HTTP request body if the LogLevel of the +// request matches LogDebugWithHTTPBody. +var LogHTTPRequestHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequest", + Fn: logRequest, +} + func logRequest(r *request.Request) { logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) bodySeekable := aws.IsReaderSeekable(r.Body) - dumpedBody, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) + + b, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) if err != nil { - r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, r.ClientInfo.ServiceName, r.Operation.Name, err)) + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) return } @@ -63,7 +73,28 @@ func logRequest(r *request.Request) { r.ResetBody() } - r.Config.Logger.Log(fmt.Sprintf(logReqMsg, r.ClientInfo.ServiceName, r.Operation.Name, string(dumpedBody))) + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} + +// LogHTTPRequestHeaderHandler is a SDK request handler to log the HTTP request sent +// to a service. Will only log the HTTP request's headers. The request payload +// will not be read. +var LogHTTPRequestHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequestHeader", + Fn: logRequestHeader, +} + +func logRequestHeader(r *request.Request) { + b, err := httputil.DumpRequestOut(r.HTTPRequest, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) } const logRespMsg = `DEBUG: Response %s/%s Details: @@ -76,27 +107,44 @@ const logRespErrMsg = `DEBUG ERROR: Response %s/%s: %s -----------------------------------------------------` +// LogHTTPResponseHandler is a SDK request handler to log the HTTP response +// received from a service. Will include the HTTP response body if the LogLevel +// of the request matches LogDebugWithHTTPBody. +var LogHTTPResponseHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponse", + Fn: logResponse, +} + func logResponse(r *request.Request) { lw := &logWriter{r.Config.Logger, bytes.NewBuffer(nil)} - r.HTTPResponse.Body = &teeReaderCloser{ - Reader: io.TeeReader(r.HTTPResponse.Body, lw), - Source: r.HTTPResponse.Body, + + logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) + if logBody { + r.HTTPResponse.Body = &teeReaderCloser{ + Reader: io.TeeReader(r.HTTPResponse.Body, lw), + Source: r.HTTPResponse.Body, + } } handlerFn := func(req *request.Request) { - body, err := httputil.DumpResponse(req.HTTPResponse, false) + b, err := httputil.DumpResponse(req.HTTPResponse, false) if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) return } - b, err := ioutil.ReadAll(lw.buf) - if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) - return - } - lw.Logger.Log(fmt.Sprintf(logRespMsg, req.ClientInfo.ServiceName, req.Operation.Name, string(body))) - if req.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) { + lw.Logger.Log(fmt.Sprintf(logRespMsg, + req.ClientInfo.ServiceName, req.Operation.Name, string(b))) + + if logBody { + b, err := ioutil.ReadAll(lw.buf) + if err != nil { + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) + return + } + lw.Logger.Log(string(b)) } } @@ -110,3 +158,27 @@ func logResponse(r *request.Request) { Name: handlerName, Fn: handlerFn, }) } + +// LogHTTPResponseHeaderHandler is a SDK request handler to log the HTTP +// response received from a service. Will only log the HTTP response's headers. +// The response payload will not be read. +var LogHTTPResponseHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponseHeader", + Fn: logResponseHeader, +} + +func logResponseHeader(r *request.Request) { + if r.Config.Logger == nil { + return + } + + b, err := httputil.DumpResponse(r.HTTPResponse, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logRespErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logRespMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go index 4778056ddfdae..920e9fddf8706 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go @@ -3,6 +3,7 @@ package metadata // ClientInfo wraps immutable data from the client.Client structure. type ClientInfo struct { ServiceName string + ServiceID string APIVersion string Endpoint string SigningName string diff --git a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go index 42416fc2f0fcc..ed086992f62f2 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go @@ -178,7 +178,8 @@ func (e *Expiry) IsExpired() bool { type Credentials struct { creds Value forceRefresh bool - m sync.Mutex + + m sync.RWMutex provider Provider } @@ -201,6 +202,17 @@ func NewCredentials(provider Provider) *Credentials { // If Credentials.Expire() was called the credentials Value will be force // expired, and the next call to Get() will cause them to be refreshed. func (c *Credentials) Get() (Value, error) { + // Check the cached credentials first with just the read lock. + c.m.RLock() + if !c.isExpired() { + creds := c.creds + c.m.RUnlock() + return creds, nil + } + c.m.RUnlock() + + // Credentials are expired need to retrieve the credentials taking the full + // lock. c.m.Lock() defer c.m.Unlock() @@ -234,8 +246,8 @@ func (c *Credentials) Expire() { // If the Credentials were forced to be expired with Expire() this will // reflect that override. func (c *Credentials) IsExpired() bool { - c.m.Lock() - defer c.m.Unlock() + c.m.RLock() + defer c.m.RUnlock() return c.isExpired() } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go new file mode 100644 index 0000000000000..152d785b362bd --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go @@ -0,0 +1,46 @@ +// Package csm provides Client Side Monitoring (CSM) which enables sending metrics +// via UDP connection. Using the Start function will enable the reporting of +// metrics on a given port. If Start is called, with different parameters, again, +// a panic will occur. +// +// Pause can be called to pause any metrics publishing on a given port. Sessions +// that have had their handlers modified via InjectHandlers may still be used. +// However, the handlers will act as a no-op meaning no metrics will be published. +// +// Example: +// r, err := csm.Start("clientID", ":31000") +// if err != nil { +// panic(fmt.Errorf("failed starting CSM: %v", err)) +// } +// +// sess, err := session.NewSession(&aws.Config{}) +// if err != nil { +// panic(fmt.Errorf("failed loading session: %v", err)) +// } +// +// r.InjectHandlers(&sess.Handlers) +// +// client := s3.New(sess) +// resp, err := client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Will pause monitoring +// r.Pause() +// resp, err = client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Resume monitoring +// r.Continue() +// +// Start returns a Reporter that is used to enable or disable monitoring. If +// access to the Reporter is required later, calling Get will return the Reporter +// singleton. +// +// Example: +// r := csm.Get() +// r.Continue() +package csm diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go new file mode 100644 index 0000000000000..2f0c6eac9a80d --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go @@ -0,0 +1,67 @@ +package csm + +import ( + "fmt" + "sync" +) + +var ( + lock sync.Mutex +) + +// Client side metric handler names +const ( + APICallMetricHandlerName = "awscsm.SendAPICallMetric" + APICallAttemptMetricHandlerName = "awscsm.SendAPICallAttemptMetric" +) + +// Start will start the a long running go routine to capture +// client side metrics. Calling start multiple time will only +// start the metric listener once and will panic if a different +// client ID or port is passed in. +// +// Example: +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// sess := session.NewSession() +// r.InjectHandlers(sess.Handlers) +// +// svc := s3.New(sess) +// out, err := svc.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +func Start(clientID string, url string) (*Reporter, error) { + lock.Lock() + defer lock.Unlock() + + if sender == nil { + sender = newReporter(clientID, url) + } else { + if sender.clientID != clientID { + panic(fmt.Errorf("inconsistent client IDs. %q was expected, but received %q", sender.clientID, clientID)) + } + + if sender.url != url { + panic(fmt.Errorf("inconsistent URLs. %q was expected, but received %q", sender.url, url)) + } + } + + if err := connect(url); err != nil { + sender = nil + return nil, err + } + + return sender, nil +} + +// Get will return a reporter if one exists, if one does not exist, nil will +// be returned. +func Get() *Reporter { + lock.Lock() + defer lock.Unlock() + + return sender +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go new file mode 100644 index 0000000000000..4b0d630e4c1e2 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go @@ -0,0 +1,51 @@ +package csm + +import ( + "strconv" + "time" +) + +type metricTime time.Time + +func (t metricTime) MarshalJSON() ([]byte, error) { + ns := time.Duration(time.Time(t).UnixNano()) + return []byte(strconv.FormatInt(int64(ns/time.Millisecond), 10)), nil +} + +type metric struct { + ClientID *string `json:"ClientId,omitempty"` + API *string `json:"Api,omitempty"` + Service *string `json:"Service,omitempty"` + Timestamp *metricTime `json:"Timestamp,omitempty"` + Type *string `json:"Type,omitempty"` + Version *int `json:"Version,omitempty"` + + AttemptCount *int `json:"AttemptCount,omitempty"` + Latency *int `json:"Latency,omitempty"` + + Fqdn *string `json:"Fqdn,omitempty"` + UserAgent *string `json:"UserAgent,omitempty"` + AttemptLatency *int `json:"AttemptLatency,omitempty"` + + SessionToken *string `json:"SessionToken,omitempty"` + Region *string `json:"Region,omitempty"` + AccessKey *string `json:"AccessKey,omitempty"` + HTTPStatusCode *int `json:"HttpStatusCode,omitempty"` + XAmzID2 *string `json:"XAmzId2,omitempty"` + XAmzRequestID *string `json:"XAmznRequestId,omitempty"` + + AWSException *string `json:"AwsException,omitempty"` + AWSExceptionMessage *string `json:"AwsExceptionMessage,omitempty"` + SDKException *string `json:"SdkException,omitempty"` + SDKExceptionMessage *string `json:"SdkExceptionMessage,omitempty"` + + DestinationIP *string `json:"DestinationIp,omitempty"` + ConnectionReused *int `json:"ConnectionReused,omitempty"` + + AcquireConnectionLatency *int `json:"AcquireConnectionLatency,omitempty"` + ConnectLatency *int `json:"ConnectLatency,omitempty"` + RequestLatency *int `json:"RequestLatency,omitempty"` + DNSLatency *int `json:"DnsLatency,omitempty"` + TCPLatency *int `json:"TcpLatency,omitempty"` + SSLLatency *int `json:"SslLatency,omitempty"` +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go new file mode 100644 index 0000000000000..514fc3739a5f4 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go @@ -0,0 +1,54 @@ +package csm + +import ( + "sync/atomic" +) + +const ( + runningEnum = iota + pausedEnum +) + +var ( + // MetricsChannelSize of metrics to hold in the channel + MetricsChannelSize = 100 +) + +type metricChan struct { + ch chan metric + paused int64 +} + +func newMetricChan(size int) metricChan { + return metricChan{ + ch: make(chan metric, size), + } +} + +func (ch *metricChan) Pause() { + atomic.StoreInt64(&ch.paused, pausedEnum) +} + +func (ch *metricChan) Continue() { + atomic.StoreInt64(&ch.paused, runningEnum) +} + +func (ch *metricChan) IsPaused() bool { + v := atomic.LoadInt64(&ch.paused) + return v == pausedEnum +} + +// Push will push metrics to the metric channel if the channel +// is not paused +func (ch *metricChan) Push(m metric) bool { + if ch.IsPaused() { + return false + } + + select { + case ch.ch <- m: + return true + default: + return false + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go new file mode 100644 index 0000000000000..1484c8fc5b197 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go @@ -0,0 +1,230 @@ +package csm + +import ( + "encoding/json" + "net" + "time" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/request" +) + +const ( + // DefaultPort is used when no port is specified + DefaultPort = "31000" +) + +// Reporter will gather metrics of API requests made and +// send those metrics to the CSM endpoint. +type Reporter struct { + clientID string + url string + conn net.Conn + metricsCh metricChan + done chan struct{} +} + +var ( + sender *Reporter +) + +func connect(url string) error { + const network = "udp" + if err := sender.connect(network, url); err != nil { + return err + } + + if sender.done == nil { + sender.done = make(chan struct{}) + go sender.start() + } + + return nil +} + +func newReporter(clientID, url string) *Reporter { + return &Reporter{ + clientID: clientID, + url: url, + metricsCh: newMetricChan(MetricsChannelSize), + } +} + +func (rep *Reporter) sendAPICallAttemptMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + creds, _ := r.Config.Credentials.Get() + + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + UserAgent: aws.String(r.HTTPRequest.Header.Get("User-Agent")), + Region: r.Config.Region, + Type: aws.String("ApiCallAttempt"), + Version: aws.Int(1), + + XAmzRequestID: aws.String(r.RequestID), + + AttemptCount: aws.Int(r.RetryCount + 1), + AttemptLatency: aws.Int(int(now.Sub(r.AttemptTime).Nanoseconds() / int64(time.Millisecond))), + AccessKey: aws.String(creds.AccessKeyID), + } + + if r.HTTPResponse != nil { + m.HTTPStatusCode = aws.Int(r.HTTPResponse.StatusCode) + } + + if r.Error != nil { + if awserr, ok := r.Error.(awserr.Error); ok { + setError(&m, awserr) + } + } + + rep.metricsCh.Push(m) +} + +func setError(m *metric, err awserr.Error) { + msg := err.Message() + code := err.Code() + + switch code { + case "RequestError", + "SerializationError", + request.CanceledErrorCode: + + m.SDKException = &code + m.SDKExceptionMessage = &msg + default: + m.AWSException = &code + m.AWSExceptionMessage = &msg + } +} + +func (rep *Reporter) sendAPICallMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + Type: aws.String("ApiCall"), + AttemptCount: aws.Int(r.RetryCount + 1), + Latency: aws.Int(int(time.Now().Sub(r.Time) / time.Millisecond)), + XAmzRequestID: aws.String(r.RequestID), + } + + // TODO: Probably want to figure something out for logging dropped + // metrics + rep.metricsCh.Push(m) +} + +func (rep *Reporter) connect(network, url string) error { + if rep.conn != nil { + rep.conn.Close() + } + + conn, err := net.Dial(network, url) + if err != nil { + return awserr.New("UDPError", "Could not connect", err) + } + + rep.conn = conn + + return nil +} + +func (rep *Reporter) close() { + if rep.done != nil { + close(rep.done) + } + + rep.metricsCh.Pause() +} + +func (rep *Reporter) start() { + defer func() { + rep.metricsCh.Pause() + }() + + for { + select { + case <-rep.done: + rep.done = nil + return + case m := <-rep.metricsCh.ch: + // TODO: What to do with this error? Probably should just log + b, err := json.Marshal(m) + if err != nil { + continue + } + + rep.conn.Write(b) + } + } +} + +// Pause will pause the metric channel preventing any new metrics from +// being added. +func (rep *Reporter) Pause() { + lock.Lock() + defer lock.Unlock() + + if rep == nil { + return + } + + rep.close() +} + +// Continue will reopen the metric channel and allow for monitoring +// to be resumed. +func (rep *Reporter) Continue() { + lock.Lock() + defer lock.Unlock() + if rep == nil { + return + } + + if !rep.metricsCh.IsPaused() { + return + } + + rep.metricsCh.Continue() +} + +// InjectHandlers will will enable client side metrics and inject the proper +// handlers to handle how metrics are sent. +// +// Example: +// // Start must be called in order to inject the correct handlers +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// +// sess := session.NewSession() +// r.InjectHandlers(&sess.Handlers) +// +// // create a new service client with our client side metric session +// svc := s3.New(sess) +func (rep *Reporter) InjectHandlers(handlers *request.Handlers) { + if rep == nil { + return + } + + apiCallHandler := request.NamedHandler{Name: APICallMetricHandlerName, Fn: rep.sendAPICallMetric} + handlers.Complete.PushFrontNamed(apiCallHandler) + + apiCallAttemptHandler := request.NamedHandler{Name: APICallAttemptMetricHandlerName, Fn: rep.sendAPICallAttemptMetric} + handlers.AfterRetry.PushFrontNamed(apiCallAttemptHandler) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go index 857f677dd1061..c472a57fad2fd 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go @@ -48,6 +48,7 @@ const ( A4bServiceID = "a4b" // A4b. AcmServiceID = "acm" // Acm. AcmPcaServiceID = "acm-pca" // AcmPca. + ApiMediatailorServiceID = "api.mediatailor" // ApiMediatailor. ApiPricingServiceID = "api.pricing" // ApiPricing. ApigatewayServiceID = "apigateway" // Apigateway. ApplicationAutoscalingServiceID = "application-autoscaling" // ApplicationAutoscaling. @@ -130,6 +131,7 @@ const ( ModelsLexServiceID = "models.lex" // ModelsLex. MonitoringServiceID = "monitoring" // Monitoring. MturkRequesterServiceID = "mturk-requester" // MturkRequester. + NeptuneServiceID = "neptune" // Neptune. OpsworksServiceID = "opsworks" // Opsworks. OpsworksCmServiceID = "opsworks-cm" // OpsworksCm. OrganizationsServiceID = "organizations" // Organizations. @@ -307,6 +309,16 @@ var awsPartition = partition{ "us-west-2": endpoint{}, }, }, + "api.mediatailor": service{ + + Endpoints: endpoints{ + "ap-northeast-1": endpoint{}, + "ap-southeast-1": endpoint{}, + "ap-southeast-2": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + }, + }, "api.pricing": service{ Defaults: endpoint{ CredentialScope: credentialScope{ @@ -434,6 +446,7 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, "ap-northeast-2": endpoint{}, + "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "ca-central-1": endpoint{}, @@ -1046,6 +1059,7 @@ var awsPartition = partition{ "elasticfilesystem": service{ Endpoints: endpoints{ + "ap-northeast-2": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, @@ -1242,11 +1256,13 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, + "eu-west-2": endpoint{}, "us-east-1": endpoint{}, "us-east-2": endpoint{}, "us-west-2": endpoint{}, @@ -1509,8 +1525,10 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, + "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, "us-east-1": endpoint{}, "us-west-2": endpoint{}, @@ -1622,6 +1640,35 @@ var awsPartition = partition{ "us-east-1": endpoint{}, }, }, + "neptune": service{ + + Endpoints: endpoints{ + "eu-west-1": endpoint{ + Hostname: "rds.eu-west-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "eu-west-1", + }, + }, + "us-east-1": endpoint{ + Hostname: "rds.us-east-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-1", + }, + }, + "us-east-2": endpoint{ + Hostname: "rds.us-east-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-2", + }, + }, + "us-west-2": endpoint{ + Hostname: "rds.us-west-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-west-2", + }, + }, + }, + }, "opsworks": service{ Endpoints: endpoints{ @@ -1805,10 +1852,11 @@ var awsPartition = partition{ "runtime.sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "s3": service{ @@ -1873,10 +1921,11 @@ var awsPartition = partition{ "sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "sdb": service{ @@ -2081,6 +2130,10 @@ var awsPartition = partition{ "eu-west-1": endpoint{}, "eu-west-2": endpoint{}, "eu-west-3": endpoint{}, + "fips-us-east-1": endpoint{}, + "fips-us-east-2": endpoint{}, + "fips-us-west-1": endpoint{}, + "fips-us-west-2": endpoint{}, "sa-east-1": endpoint{}, "us-east-1": endpoint{ SSLCommonName: "queue.{dnsSuffix}", @@ -2507,13 +2560,15 @@ var awscnPartition = partition{ "ecr": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "ecs": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "elasticache": service{ diff --git a/vendor/github.com/aws/aws-sdk-go/aws/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/logger.go index 3babb5abdb69e..6ed15b2ecc26d 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/logger.go @@ -71,6 +71,12 @@ const ( // LogDebugWithRequestErrors states the SDK should log when service requests fail // to build, send, validate, or unmarshal. LogDebugWithRequestErrors + + // LogDebugWithEventStreamBody states the SDK should log EventStream + // request and response bodys. This should be used to log the EventStream + // wire unmarshaled message content of requests and responses made while + // using the SDK Will also enable LogDebug. + LogDebugWithEventStreamBody ) // A Logger is a minimalistic interface for the SDK to log messages to. Should diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go index 802ac88ad5cd6..605a72d3c9400 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go @@ -14,6 +14,7 @@ type Handlers struct { Send HandlerList ValidateResponse HandlerList Unmarshal HandlerList + UnmarshalStream HandlerList UnmarshalMeta HandlerList UnmarshalError HandlerList Retry HandlerList @@ -30,6 +31,7 @@ func (h *Handlers) Copy() Handlers { Send: h.Send.copy(), ValidateResponse: h.ValidateResponse.copy(), Unmarshal: h.Unmarshal.copy(), + UnmarshalStream: h.UnmarshalStream.copy(), UnmarshalError: h.UnmarshalError.copy(), UnmarshalMeta: h.UnmarshalMeta.copy(), Retry: h.Retry.copy(), @@ -45,6 +47,7 @@ func (h *Handlers) Clear() { h.Send.Clear() h.Sign.Clear() h.Unmarshal.Clear() + h.UnmarshalStream.Clear() h.UnmarshalMeta.Clear() h.UnmarshalError.Clear() h.ValidateResponse.Clear() @@ -172,6 +175,21 @@ func (l *HandlerList) SwapNamed(n NamedHandler) (swapped bool) { return swapped } +// Swap will swap out all handlers matching the name passed in. The matched +// handlers will be swapped in. True is returned if the handlers were swapped. +func (l *HandlerList) Swap(name string, replace NamedHandler) bool { + var swapped bool + + for i := 0; i < len(l.list); i++ { + if l.list[i].Name == name { + l.list[i] = replace + swapped = true + } + } + + return swapped +} + // SetBackNamed will replace the named handler if it exists in the handler list. // If the handler does not exist the handler will be added to the end of the list. func (l *HandlerList) SetBackNamed(n NamedHandler) { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go index 69b7a01ad74a7..75f0fe07780a8 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go @@ -46,6 +46,7 @@ type Request struct { Handlers Handlers Retryer + AttemptTime time.Time Time time.Time Operation *Operation HTTPRequest *http.Request @@ -121,6 +122,7 @@ func New(cfg aws.Config, clientInfo metadata.ClientInfo, handlers Handlers, Handlers: handlers.Copy(), Retryer: retryer, + AttemptTime: time.Now(), Time: time.Now(), ExpireTime: 0, Operation: operation, @@ -368,9 +370,9 @@ func (r *Request) Build() error { return r.Error } -// Sign will sign the request returning error if errors are encountered. +// Sign will sign the request, returning error if errors are encountered. // -// Send will build the request prior to signing. All Sign Handlers will +// Sign will build the request prior to signing. All Sign Handlers will // be executed in the order they were set. func (r *Request) Sign() error { r.Build() @@ -440,7 +442,7 @@ func (r *Request) GetBody() io.ReadSeeker { return r.safeBody } -// Send will send the request returning error if errors are encountered. +// Send will send the request, returning error if errors are encountered. // // Send will sign the request prior to sending. All Send Handlers will // be executed in the order they were set. @@ -461,6 +463,7 @@ func (r *Request) Send() error { }() for { + r.AttemptTime = time.Now() if aws.BoolValue(r.Retryable) { if r.Config.LogLevel.Matches(aws.LogDebugWithRequestRetries) { r.Config.Logger.Log(fmt.Sprintf("DEBUG: Retrying Request %s/%s, attempt %d", diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go index 869b97a1a0fa7..e36e468b7c61d 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go @@ -21,7 +21,7 @@ func (noBody) WriteTo(io.Writer) (int64, error) { return 0, nil } var NoBody = noBody{} // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go index c32fc69bc56fe..7c6a8000f6751 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go @@ -11,7 +11,7 @@ import ( var NoBody = http.NoBody // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go index 159518a75cda2..a633ed5acfa3e 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go @@ -35,8 +35,12 @@ type Pagination struct { // NewRequest should always be built from the same API operations. It is // undefined if different API operations are returned on subsequent calls. NewRequest func() (*Request, error) + // EndPageOnSameToken, when enabled, will allow the paginator to stop on + // token that are the same as its previous tokens. + EndPageOnSameToken bool started bool + prevTokens []interface{} nextTokens []interface{} err error @@ -49,7 +53,15 @@ type Pagination struct { // // Will always return true if Next has not been called yet. func (p *Pagination) HasNextPage() bool { - return !(p.started && len(p.nextTokens) == 0) + if !p.started { + return true + } + + hasNextPage := len(p.nextTokens) != 0 + if p.EndPageOnSameToken { + return hasNextPage && !awsutil.DeepEqual(p.nextTokens, p.prevTokens) + } + return hasNextPage } // Err returns the error Pagination encountered when retrieving the next page. @@ -96,6 +108,7 @@ func (p *Pagination) Next() bool { return false } + p.prevTokens = p.nextTokens p.nextTokens = req.nextPageTokens() p.curPage = req.Data diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go index 12b452177a8b2..82e04d76cdeb1 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go @@ -96,9 +96,23 @@ type envConfig struct { // // AWS_CA_BUNDLE=$HOME/my_custom_ca_bundle CustomCABundle string + + csmEnabled string + CSMEnabled bool + CSMPort string + CSMClientID string } var ( + csmEnabledEnvKey = []string{ + "AWS_CSM_ENABLED", + } + csmPortEnvKey = []string{ + "AWS_CSM_PORT", + } + csmClientIDEnvKey = []string{ + "AWS_CSM_CLIENT_ID", + } credAccessEnvKey = []string{ "AWS_ACCESS_KEY_ID", "AWS_ACCESS_KEY", @@ -157,6 +171,12 @@ func envConfigLoad(enableSharedConfig bool) envConfig { setFromEnvVal(&cfg.Creds.SecretAccessKey, credSecretEnvKey) setFromEnvVal(&cfg.Creds.SessionToken, credSessionEnvKey) + // CSM environment variables + setFromEnvVal(&cfg.csmEnabled, csmEnabledEnvKey) + setFromEnvVal(&cfg.CSMPort, csmPortEnvKey) + setFromEnvVal(&cfg.CSMClientID, csmClientIDEnvKey) + cfg.CSMEnabled = len(cfg.csmEnabled) > 0 + // Require logical grouping of credentials if len(cfg.Creds.AccessKeyID) == 0 || len(cfg.Creds.SecretAccessKey) == 0 { cfg.Creds = credentials.Value{} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go index 259b5c0fecc00..51f30556301f4 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go @@ -15,6 +15,7 @@ import ( "github.com/aws/aws-sdk-go/aws/corehandlers" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/stscreds" + "github.com/aws/aws-sdk-go/aws/csm" "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/request" @@ -81,10 +82,16 @@ func New(cfgs ...*aws.Config) *Session { r.Error = err }) } + return s } - return deprecatedNewSession(cfgs...) + s := deprecatedNewSession(cfgs...) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } + + return s } // NewSession returns a new Session created from SDK defaults, config files, @@ -300,10 +307,22 @@ func deprecatedNewSession(cfgs ...*aws.Config) *Session { } initHandlers(s) - return s } +func enableCSM(handlers *request.Handlers, clientID string, port string, logger aws.Logger) { + logger.Log("Enabling CSM") + if len(port) == 0 { + port = csm.DefaultPort + } + + r, err := csm.Start(clientID, "127.0.0.1:"+port) + if err != nil { + return + } + r.InjectHandlers(handlers) +} + func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, error) { cfg := defaults.Config() handlers := defaults.Handlers() @@ -343,6 +362,9 @@ func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, } initHandlers(s) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } // Setup HTTP client with custom cert bundle if enabled if opts.CustomCABundle != nil { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go index 6e46376125bcf..f3586131538ca 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go @@ -135,6 +135,7 @@ var requiredSignedHeaders = rules{ "X-Amz-Server-Side-Encryption-Customer-Key-Md5": struct{}{}, "X-Amz-Storage-Class": struct{}{}, "X-Amz-Website-Redirect-Location": struct{}{}, + "X-Amz-Content-Sha256": struct{}{}, }, }, patterns{"X-Amz-Meta-"}, @@ -671,8 +672,15 @@ func (ctx *signingCtx) buildSignature() { func (ctx *signingCtx) buildBodyDigest() error { hash := ctx.Request.Header.Get("X-Amz-Content-Sha256") if hash == "" { - if ctx.unsignedPayload || (ctx.isPresign && ctx.ServiceName == "s3") { + includeSHA256Header := ctx.unsignedPayload || + ctx.ServiceName == "s3" || + ctx.ServiceName == "glacier" + + s3Presign := ctx.isPresign && ctx.ServiceName == "s3" + + if ctx.unsignedPayload || s3Presign { hash = "UNSIGNED-PAYLOAD" + includeSHA256Header = !s3Presign } else if ctx.Body == nil { hash = emptyStringSHA256 } else { @@ -681,7 +689,8 @@ func (ctx *signingCtx) buildBodyDigest() error { } hash = hex.EncodeToString(makeSha256Reader(ctx.Body)) } - if ctx.unsignedPayload || ctx.ServiceName == "s3" || ctx.ServiceName == "glacier" { + + if includeSHA256Header { ctx.Request.Header.Set("X-Amz-Content-Sha256", hash) } } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/version.go b/vendor/github.com/aws/aws-sdk-go/aws/version.go index befbff7df07dc..c108466609e9f 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/version.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.13.49" +const SDKVersion = "1.14.12" diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go new file mode 100644 index 0000000000000..ecc7bf82fa20c --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go @@ -0,0 +1,144 @@ +package eventstream + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "strconv" +) + +type decodedMessage struct { + rawMessage + Headers decodedHeaders `json:"headers"` +} +type jsonMessage struct { + Length json.Number `json:"total_length"` + HeadersLen json.Number `json:"headers_length"` + PreludeCRC json.Number `json:"prelude_crc"` + Headers decodedHeaders `json:"headers"` + Payload []byte `json:"payload"` + CRC json.Number `json:"message_crc"` +} + +func (d *decodedMessage) UnmarshalJSON(b []byte) (err error) { + var jsonMsg jsonMessage + if err = json.Unmarshal(b, &jsonMsg); err != nil { + return err + } + + d.Length, err = numAsUint32(jsonMsg.Length) + if err != nil { + return err + } + d.HeadersLen, err = numAsUint32(jsonMsg.HeadersLen) + if err != nil { + return err + } + d.PreludeCRC, err = numAsUint32(jsonMsg.PreludeCRC) + if err != nil { + return err + } + d.Headers = jsonMsg.Headers + d.Payload = jsonMsg.Payload + d.CRC, err = numAsUint32(jsonMsg.CRC) + if err != nil { + return err + } + + return nil +} + +func (d *decodedMessage) MarshalJSON() ([]byte, error) { + jsonMsg := jsonMessage{ + Length: json.Number(strconv.Itoa(int(d.Length))), + HeadersLen: json.Number(strconv.Itoa(int(d.HeadersLen))), + PreludeCRC: json.Number(strconv.Itoa(int(d.PreludeCRC))), + Headers: d.Headers, + Payload: d.Payload, + CRC: json.Number(strconv.Itoa(int(d.CRC))), + } + + return json.Marshal(jsonMsg) +} + +func numAsUint32(n json.Number) (uint32, error) { + v, err := n.Int64() + if err != nil { + return 0, fmt.Errorf("failed to get int64 json number, %v", err) + } + + return uint32(v), nil +} + +func (d decodedMessage) Message() Message { + return Message{ + Headers: Headers(d.Headers), + Payload: d.Payload, + } +} + +type decodedHeaders Headers + +func (hs *decodedHeaders) UnmarshalJSON(b []byte) error { + var jsonHeaders []struct { + Name string `json:"name"` + Type valueType `json:"type"` + Value interface{} `json:"value"` + } + + decoder := json.NewDecoder(bytes.NewReader(b)) + decoder.UseNumber() + if err := decoder.Decode(&jsonHeaders); err != nil { + return err + } + + var headers Headers + for _, h := range jsonHeaders { + value, err := valueFromType(h.Type, h.Value) + if err != nil { + return err + } + headers.Set(h.Name, value) + } + (*hs) = decodedHeaders(headers) + + return nil +} + +func valueFromType(typ valueType, val interface{}) (Value, error) { + switch typ { + case trueValueType: + return BoolValue(true), nil + case falseValueType: + return BoolValue(false), nil + case int8ValueType: + v, err := val.(json.Number).Int64() + return Int8Value(int8(v)), err + case int16ValueType: + v, err := val.(json.Number).Int64() + return Int16Value(int16(v)), err + case int32ValueType: + v, err := val.(json.Number).Int64() + return Int32Value(int32(v)), err + case int64ValueType: + v, err := val.(json.Number).Int64() + return Int64Value(v), err + case bytesValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return BytesValue(v), err + case stringValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return StringValue(string(v)), err + case timestampValueType: + v, err := val.(json.Number).Int64() + return TimestampValue(timeFromEpochMilli(v)), err + case uuidValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + var tv UUIDValue + copy(tv[:], v) + return tv, err + default: + panic(fmt.Sprintf("unknown type, %s, %T", typ.String(), val)) + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go new file mode 100644 index 0000000000000..4b972b2d6664c --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go @@ -0,0 +1,199 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "encoding/hex" + "encoding/json" + "fmt" + "hash" + "hash/crc32" + "io" + + "github.com/aws/aws-sdk-go/aws" +) + +// Decoder provides decoding of an Event Stream messages. +type Decoder struct { + r io.Reader + logger aws.Logger +} + +// NewDecoder initializes and returns a Decoder for decoding event +// stream messages from the reader provided. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + r: r, + } +} + +// Decode attempts to decode a single message from the event stream reader. +// Will return the event stream message, or error if Decode fails to read +// the message from the stream. +func (d *Decoder) Decode(payloadBuf []byte) (m Message, err error) { + reader := d.r + if d.logger != nil { + debugMsgBuf := bytes.NewBuffer(nil) + reader = io.TeeReader(reader, debugMsgBuf) + defer func() { + logMessageDecode(d.logger, debugMsgBuf, m, err) + }() + } + + crc := crc32.New(crc32IEEETable) + hashReader := io.TeeReader(reader, crc) + + prelude, err := decodePrelude(hashReader, crc) + if err != nil { + return Message{}, err + } + + if prelude.HeadersLen > 0 { + lr := io.LimitReader(hashReader, int64(prelude.HeadersLen)) + m.Headers, err = decodeHeaders(lr) + if err != nil { + return Message{}, err + } + } + + if payloadLen := prelude.PayloadLen(); payloadLen > 0 { + buf, err := decodePayload(payloadBuf, io.LimitReader(hashReader, int64(payloadLen))) + if err != nil { + return Message{}, err + } + m.Payload = buf + } + + msgCRC := crc.Sum32() + if err := validateCRC(reader, msgCRC); err != nil { + return Message{}, err + } + + return m, nil +} + +// UseLogger specifies the Logger that that the decoder should use to log the +// message decode to. +func (d *Decoder) UseLogger(logger aws.Logger) { + d.logger = logger +} + +func logMessageDecode(logger aws.Logger, msgBuf *bytes.Buffer, msg Message, decodeErr error) { + w := bytes.NewBuffer(nil) + defer func() { logger.Log(w.String()) }() + + fmt.Fprintf(w, "Raw message:\n%s\n", + hex.Dump(msgBuf.Bytes())) + + if decodeErr != nil { + fmt.Fprintf(w, "Decode error: %v\n", decodeErr) + return + } + + rawMsg, err := msg.rawMessage() + if err != nil { + fmt.Fprintf(w, "failed to create raw message, %v\n", err) + return + } + + decodedMsg := decodedMessage{ + rawMessage: rawMsg, + Headers: decodedHeaders(msg.Headers), + } + + fmt.Fprintf(w, "Decoded message:\n") + encoder := json.NewEncoder(w) + if err := encoder.Encode(decodedMsg); err != nil { + fmt.Fprintf(w, "failed to generate decoded message, %v\n", err) + } +} + +func decodePrelude(r io.Reader, crc hash.Hash32) (messagePrelude, error) { + var p messagePrelude + + var err error + p.Length, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + p.HeadersLen, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + if err := p.ValidateLens(); err != nil { + return messagePrelude{}, err + } + + preludeCRC := crc.Sum32() + if err := validateCRC(r, preludeCRC); err != nil { + return messagePrelude{}, err + } + + p.PreludeCRC = preludeCRC + + return p, nil +} + +func decodePayload(buf []byte, r io.Reader) ([]byte, error) { + w := bytes.NewBuffer(buf[0:0]) + + _, err := io.Copy(w, r) + return w.Bytes(), err +} + +func decodeUint8(r io.Reader) (uint8, error) { + type byteReader interface { + ReadByte() (byte, error) + } + + if br, ok := r.(byteReader); ok { + v, err := br.ReadByte() + return uint8(v), err + } + + var b [1]byte + _, err := io.ReadFull(r, b[:]) + return uint8(b[0]), err +} +func decodeUint16(r io.Reader) (uint16, error) { + var b [2]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint16(bs), nil +} +func decodeUint32(r io.Reader) (uint32, error) { + var b [4]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint32(bs), nil +} +func decodeUint64(r io.Reader) (uint64, error) { + var b [8]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint64(bs), nil +} + +func validateCRC(r io.Reader, expect uint32) error { + msgCRC, err := decodeUint32(r) + if err != nil { + return err + } + + if msgCRC != expect { + return ChecksumError{} + } + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go new file mode 100644 index 0000000000000..150a60981d83b --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go @@ -0,0 +1,114 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash" + "hash/crc32" + "io" +) + +// Encoder provides EventStream message encoding. +type Encoder struct { + w io.Writer + + headersBuf *bytes.Buffer +} + +// NewEncoder initializes and returns an Encoder to encode Event Stream +// messages to an io.Writer. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + headersBuf: bytes.NewBuffer(nil), + } +} + +// Encode encodes a single EventStream message to the io.Writer the Encoder +// was created with. An error is returned if writing the message fails. +func (e *Encoder) Encode(msg Message) error { + e.headersBuf.Reset() + + err := encodeHeaders(e.headersBuf, msg.Headers) + if err != nil { + return err + } + + crc := crc32.New(crc32IEEETable) + hashWriter := io.MultiWriter(e.w, crc) + + headersLen := uint32(e.headersBuf.Len()) + payloadLen := uint32(len(msg.Payload)) + + if err := encodePrelude(hashWriter, crc, headersLen, payloadLen); err != nil { + return err + } + + if headersLen > 0 { + if _, err := io.Copy(hashWriter, e.headersBuf); err != nil { + return err + } + } + + if payloadLen > 0 { + if _, err := hashWriter.Write(msg.Payload); err != nil { + return err + } + } + + msgCRC := crc.Sum32() + return binary.Write(e.w, binary.BigEndian, msgCRC) +} + +func encodePrelude(w io.Writer, crc hash.Hash32, headersLen, payloadLen uint32) error { + p := messagePrelude{ + Length: minMsgLen + headersLen + payloadLen, + HeadersLen: headersLen, + } + if err := p.ValidateLens(); err != nil { + return err + } + + err := binaryWriteFields(w, binary.BigEndian, + p.Length, + p.HeadersLen, + ) + if err != nil { + return err + } + + p.PreludeCRC = crc.Sum32() + err = binary.Write(w, binary.BigEndian, p.PreludeCRC) + if err != nil { + return err + } + + return nil +} + +func encodeHeaders(w io.Writer, headers Headers) error { + for _, h := range headers { + hn := headerName{ + Len: uint8(len(h.Name)), + } + copy(hn.Name[:hn.Len], h.Name) + if err := hn.encode(w); err != nil { + return err + } + + if err := h.Value.encode(w); err != nil { + return err + } + } + + return nil +} + +func binaryWriteFields(w io.Writer, order binary.ByteOrder, vs ...interface{}) error { + for _, v := range vs { + if err := binary.Write(w, order, v); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go new file mode 100644 index 0000000000000..5481ef30796d7 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go @@ -0,0 +1,23 @@ +package eventstream + +import "fmt" + +// LengthError provides the error for items being larger than a maximum length. +type LengthError struct { + Part string + Want int + Have int + Value interface{} +} + +func (e LengthError) Error() string { + return fmt.Sprintf("%s length invalid, %d/%d, %v", + e.Part, e.Want, e.Have, e.Value) +} + +// ChecksumError provides the error for message checksum invalidation errors. +type ChecksumError struct{} + +func (e ChecksumError) Error() string { + return "message checksum mismatch" +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go new file mode 100644 index 0000000000000..4a4e64c713ed3 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go @@ -0,0 +1,160 @@ +package eventstreamapi + +import ( + "fmt" + "io" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" +) + +// Unmarshaler provides the interface for unmarshaling a EventStream +// message into a SDK type. +type Unmarshaler interface { + UnmarshalEvent(protocol.PayloadUnmarshaler, eventstream.Message) error +} + +// EventStream headers with specific meaning to async API functionality. +const ( + MessageTypeHeader = `:message-type` // Identifies type of message. + EventMessageType = `event` + ErrorMessageType = `error` + ExceptionMessageType = `exception` + + // Message Events + EventTypeHeader = `:event-type` // Identifies message event type e.g. "Stats". + + // Message Error + ErrorCodeHeader = `:error-code` + ErrorMessageHeader = `:error-message` + + // Message Exception + ExceptionTypeHeader = `:exception-type` +) + +// EventReader provides reading from the EventStream of an reader. +type EventReader struct { + reader io.ReadCloser + decoder *eventstream.Decoder + + unmarshalerForEventType func(string) (Unmarshaler, error) + payloadUnmarshaler protocol.PayloadUnmarshaler + + payloadBuf []byte +} + +// NewEventReader returns a EventReader built from the reader and unmarshaler +// provided. Use ReadStream method to start reading from the EventStream. +func NewEventReader( + reader io.ReadCloser, + payloadUnmarshaler protocol.PayloadUnmarshaler, + unmarshalerForEventType func(string) (Unmarshaler, error), +) *EventReader { + return &EventReader{ + reader: reader, + decoder: eventstream.NewDecoder(reader), + payloadUnmarshaler: payloadUnmarshaler, + unmarshalerForEventType: unmarshalerForEventType, + payloadBuf: make([]byte, 10*1024), + } +} + +// UseLogger instructs the EventReader to use the logger and log level +// specified. +func (r *EventReader) UseLogger(logger aws.Logger, logLevel aws.LogLevelType) { + if logger != nil && logLevel.Matches(aws.LogDebugWithEventStreamBody) { + r.decoder.UseLogger(logger) + } +} + +// ReadEvent attempts to read a message from the EventStream and return the +// unmarshaled event value that the message is for. +// +// For EventStream API errors check if the returned error satisfies the +// awserr.Error interface to get the error's Code and Message components. +// +// EventUnmarshalers called with EventStream messages must take copies of the +// message's Payload. The payload will is reused between events read. +func (r *EventReader) ReadEvent() (event interface{}, err error) { + msg, err := r.decoder.Decode(r.payloadBuf) + if err != nil { + return nil, err + } + defer func() { + // Reclaim payload buffer for next message read. + r.payloadBuf = msg.Payload[0:0] + }() + + typ, err := GetHeaderString(msg, MessageTypeHeader) + if err != nil { + return nil, err + } + + switch typ { + case EventMessageType: + return r.unmarshalEventMessage(msg) + case ErrorMessageType: + return nil, r.unmarshalErrorMessage(msg) + default: + return nil, fmt.Errorf("unknown eventstream message type, %v", typ) + } +} + +func (r *EventReader) unmarshalEventMessage( + msg eventstream.Message, +) (event interface{}, err error) { + eventType, err := GetHeaderString(msg, EventTypeHeader) + if err != nil { + return nil, err + } + + ev, err := r.unmarshalerForEventType(eventType) + if err != nil { + return nil, err + } + + err = ev.UnmarshalEvent(r.payloadUnmarshaler, msg) + if err != nil { + return nil, err + } + + return ev, nil +} + +func (r *EventReader) unmarshalErrorMessage(msg eventstream.Message) (err error) { + var msgErr messageError + + msgErr.code, err = GetHeaderString(msg, ErrorCodeHeader) + if err != nil { + return err + } + + msgErr.msg, err = GetHeaderString(msg, ErrorMessageHeader) + if err != nil { + return err + } + + return msgErr +} + +// Close closes the EventReader's EventStream reader. +func (r *EventReader) Close() error { + return r.reader.Close() +} + +// GetHeaderString returns the value of the header as a string. If the header +// is not set or the value is not a string an error will be returned. +func GetHeaderString(msg eventstream.Message, headerName string) (string, error) { + headerVal := msg.Headers.Get(headerName) + if headerVal == nil { + return "", fmt.Errorf("error header %s not present", headerName) + } + + v, ok := headerVal.Get().(string) + if !ok { + return "", fmt.Errorf("error header value is not a string, %T", headerVal) + } + + return v, nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go new file mode 100644 index 0000000000000..5ea5a988b63e4 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go @@ -0,0 +1,24 @@ +package eventstreamapi + +import "fmt" + +type messageError struct { + code string + msg string +} + +func (e messageError) Code() string { + return e.code +} + +func (e messageError) Message() string { + return e.msg +} + +func (e messageError) Error() string { + return fmt.Sprintf("%s: %s", e.code, e.msg) +} + +func (e messageError) OrigErr() error { + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go new file mode 100644 index 0000000000000..3b44dde2f3230 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go @@ -0,0 +1,166 @@ +package eventstream + +import ( + "encoding/binary" + "fmt" + "io" +) + +// Headers are a collection of EventStream header values. +type Headers []Header + +// Header is a single EventStream Key Value header pair. +type Header struct { + Name string + Value Value +} + +// Set associates the name with a value. If the header name already exists in +// the Headers the value will be replaced with the new one. +func (hs *Headers) Set(name string, value Value) { + var i int + for ; i < len(*hs); i++ { + if (*hs)[i].Name == name { + (*hs)[i].Value = value + return + } + } + + *hs = append(*hs, Header{ + Name: name, Value: value, + }) +} + +// Get returns the Value associated with the header. Nil is returned if the +// value does not exist. +func (hs Headers) Get(name string) Value { + for i := 0; i < len(hs); i++ { + if h := hs[i]; h.Name == name { + return h.Value + } + } + return nil +} + +// Del deletes the value in the Headers if it exists. +func (hs *Headers) Del(name string) { + for i := 0; i < len(*hs); i++ { + if (*hs)[i].Name == name { + copy((*hs)[i:], (*hs)[i+1:]) + (*hs) = (*hs)[:len(*hs)-1] + } + } +} + +func decodeHeaders(r io.Reader) (Headers, error) { + hs := Headers{} + + for { + name, err := decodeHeaderName(r) + if err != nil { + if err == io.EOF { + // EOF while getting header name means no more headers + break + } + return nil, err + } + + value, err := decodeHeaderValue(r) + if err != nil { + return nil, err + } + + hs.Set(name, value) + } + + return hs, nil +} + +func decodeHeaderName(r io.Reader) (string, error) { + var n headerName + + var err error + n.Len, err = decodeUint8(r) + if err != nil { + return "", err + } + + name := n.Name[:n.Len] + if _, err := io.ReadFull(r, name); err != nil { + return "", err + } + + return string(name), nil +} + +func decodeHeaderValue(r io.Reader) (Value, error) { + var raw rawValue + + typ, err := decodeUint8(r) + if err != nil { + return nil, err + } + raw.Type = valueType(typ) + + var v Value + + switch raw.Type { + case trueValueType: + v = BoolValue(true) + case falseValueType: + v = BoolValue(false) + case int8ValueType: + var tv Int8Value + err = tv.decode(r) + v = tv + case int16ValueType: + var tv Int16Value + err = tv.decode(r) + v = tv + case int32ValueType: + var tv Int32Value + err = tv.decode(r) + v = tv + case int64ValueType: + var tv Int64Value + err = tv.decode(r) + v = tv + case bytesValueType: + var tv BytesValue + err = tv.decode(r) + v = tv + case stringValueType: + var tv StringValue + err = tv.decode(r) + v = tv + case timestampValueType: + var tv TimestampValue + err = tv.decode(r) + v = tv + case uuidValueType: + var tv UUIDValue + err = tv.decode(r) + v = tv + default: + panic(fmt.Sprintf("unknown value type %d", raw.Type)) + } + + // Error could be EOF, let caller deal with it + return v, err +} + +const maxHeaderNameLen = 255 + +type headerName struct { + Len uint8 + Name [maxHeaderNameLen]byte +} + +func (v headerName) encode(w io.Writer) error { + if err := binary.Write(w, binary.BigEndian, v.Len); err != nil { + return err + } + + _, err := w.Write(v.Name[:v.Len]) + return err +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go new file mode 100644 index 0000000000000..d7786f92ce5ca --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go @@ -0,0 +1,501 @@ +package eventstream + +import ( + "encoding/base64" + "encoding/binary" + "fmt" + "io" + "strconv" + "time" +) + +const maxHeaderValueLen = 1<<15 - 1 // 2^15-1 or 32KB - 1 + +// valueType is the EventStream header value type. +type valueType uint8 + +// Header value types +const ( + trueValueType valueType = iota + falseValueType + int8ValueType // Byte + int16ValueType // Short + int32ValueType // Integer + int64ValueType // Long + bytesValueType + stringValueType + timestampValueType + uuidValueType +) + +func (t valueType) String() string { + switch t { + case trueValueType: + return "bool" + case falseValueType: + return "bool" + case int8ValueType: + return "int8" + case int16ValueType: + return "int16" + case int32ValueType: + return "int32" + case int64ValueType: + return "int64" + case bytesValueType: + return "byte_array" + case stringValueType: + return "string" + case timestampValueType: + return "timestamp" + case uuidValueType: + return "uuid" + default: + return fmt.Sprintf("unknown value type %d", uint8(t)) + } +} + +type rawValue struct { + Type valueType + Len uint16 // Only set for variable length slices + Value []byte // byte representation of value, BigEndian encoding. +} + +func (r rawValue) encodeScalar(w io.Writer, v interface{}) error { + return binaryWriteFields(w, binary.BigEndian, + r.Type, + v, + ) +} + +func (r rawValue) encodeFixedSlice(w io.Writer, v []byte) error { + binary.Write(w, binary.BigEndian, r.Type) + + _, err := w.Write(v) + return err +} + +func (r rawValue) encodeBytes(w io.Writer, v []byte) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + _, err = w.Write(v) + return err +} + +func (r rawValue) encodeString(w io.Writer, v string) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + type stringWriter interface { + WriteString(string) (int, error) + } + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + if sw, ok := w.(stringWriter); ok { + _, err = sw.WriteString(v) + } else { + _, err = w.Write([]byte(v)) + } + + return err +} + +func decodeFixedBytesValue(r io.Reader, buf []byte) error { + _, err := io.ReadFull(r, buf) + return err +} + +func decodeBytesValue(r io.Reader) ([]byte, error) { + var raw rawValue + var err error + raw.Len, err = decodeUint16(r) + if err != nil { + return nil, err + } + + buf := make([]byte, raw.Len) + _, err = io.ReadFull(r, buf) + if err != nil { + return nil, err + } + + return buf, nil +} + +func decodeStringValue(r io.Reader) (string, error) { + v, err := decodeBytesValue(r) + return string(v), err +} + +// Value represents the abstract header value. +type Value interface { + Get() interface{} + String() string + valueType() valueType + encode(io.Writer) error +} + +// An BoolValue provides eventstream encoding, and representation +// of a Go bool value. +type BoolValue bool + +// Get returns the underlying type +func (v BoolValue) Get() interface{} { + return bool(v) +} + +// valueType returns the EventStream header value type value. +func (v BoolValue) valueType() valueType { + if v { + return trueValueType + } + return falseValueType +} + +func (v BoolValue) String() string { + return strconv.FormatBool(bool(v)) +} + +// encode encodes the BoolValue into an eventstream binary value +// representation. +func (v BoolValue) encode(w io.Writer) error { + return binary.Write(w, binary.BigEndian, v.valueType()) +} + +// An Int8Value provides eventstream encoding, and representation of a Go +// int8 value. +type Int8Value int8 + +// Get returns the underlying value. +func (v Int8Value) Get() interface{} { + return int8(v) +} + +// valueType returns the EventStream header value type value. +func (Int8Value) valueType() valueType { + return int8ValueType +} + +func (v Int8Value) String() string { + return fmt.Sprintf("0x%02x", int8(v)) +} + +// encode encodes the Int8Value into an eventstream binary value +// representation. +func (v Int8Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeScalar(w, v) +} + +func (v *Int8Value) decode(r io.Reader) error { + n, err := decodeUint8(r) + if err != nil { + return err + } + + *v = Int8Value(n) + return nil +} + +// An Int16Value provides eventstream encoding, and representation of a Go +// int16 value. +type Int16Value int16 + +// Get returns the underlying value. +func (v Int16Value) Get() interface{} { + return int16(v) +} + +// valueType returns the EventStream header value type value. +func (Int16Value) valueType() valueType { + return int16ValueType +} + +func (v Int16Value) String() string { + return fmt.Sprintf("0x%04x", int16(v)) +} + +// encode encodes the Int16Value into an eventstream binary value +// representation. +func (v Int16Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int16Value) decode(r io.Reader) error { + n, err := decodeUint16(r) + if err != nil { + return err + } + + *v = Int16Value(n) + return nil +} + +// An Int32Value provides eventstream encoding, and representation of a Go +// int32 value. +type Int32Value int32 + +// Get returns the underlying value. +func (v Int32Value) Get() interface{} { + return int32(v) +} + +// valueType returns the EventStream header value type value. +func (Int32Value) valueType() valueType { + return int32ValueType +} + +func (v Int32Value) String() string { + return fmt.Sprintf("0x%08x", int32(v)) +} + +// encode encodes the Int32Value into an eventstream binary value +// representation. +func (v Int32Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int32Value) decode(r io.Reader) error { + n, err := decodeUint32(r) + if err != nil { + return err + } + + *v = Int32Value(n) + return nil +} + +// An Int64Value provides eventstream encoding, and representation of a Go +// int64 value. +type Int64Value int64 + +// Get returns the underlying value. +func (v Int64Value) Get() interface{} { + return int64(v) +} + +// valueType returns the EventStream header value type value. +func (Int64Value) valueType() valueType { + return int64ValueType +} + +func (v Int64Value) String() string { + return fmt.Sprintf("0x%016x", int64(v)) +} + +// encode encodes the Int64Value into an eventstream binary value +// representation. +func (v Int64Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int64Value) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = Int64Value(n) + return nil +} + +// An BytesValue provides eventstream encoding, and representation of a Go +// byte slice. +type BytesValue []byte + +// Get returns the underlying value. +func (v BytesValue) Get() interface{} { + return []byte(v) +} + +// valueType returns the EventStream header value type value. +func (BytesValue) valueType() valueType { + return bytesValueType +} + +func (v BytesValue) String() string { + return base64.StdEncoding.EncodeToString([]byte(v)) +} + +// encode encodes the BytesValue into an eventstream binary value +// representation. +func (v BytesValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeBytes(w, []byte(v)) +} + +func (v *BytesValue) decode(r io.Reader) error { + buf, err := decodeBytesValue(r) + if err != nil { + return err + } + + *v = BytesValue(buf) + return nil +} + +// An StringValue provides eventstream encoding, and representation of a Go +// string. +type StringValue string + +// Get returns the underlying value. +func (v StringValue) Get() interface{} { + return string(v) +} + +// valueType returns the EventStream header value type value. +func (StringValue) valueType() valueType { + return stringValueType +} + +func (v StringValue) String() string { + return string(v) +} + +// encode encodes the StringValue into an eventstream binary value +// representation. +func (v StringValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeString(w, string(v)) +} + +func (v *StringValue) decode(r io.Reader) error { + s, err := decodeStringValue(r) + if err != nil { + return err + } + + *v = StringValue(s) + return nil +} + +// An TimestampValue provides eventstream encoding, and representation of a Go +// timestamp. +type TimestampValue time.Time + +// Get returns the underlying value. +func (v TimestampValue) Get() interface{} { + return time.Time(v) +} + +// valueType returns the EventStream header value type value. +func (TimestampValue) valueType() valueType { + return timestampValueType +} + +func (v TimestampValue) epochMilli() int64 { + nano := time.Time(v).UnixNano() + msec := nano / int64(time.Millisecond) + return msec +} + +func (v TimestampValue) String() string { + msec := v.epochMilli() + return strconv.FormatInt(msec, 10) +} + +// encode encodes the TimestampValue into an eventstream binary value +// representation. +func (v TimestampValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + msec := v.epochMilli() + return raw.encodeScalar(w, msec) +} + +func (v *TimestampValue) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = TimestampValue(timeFromEpochMilli(int64(n))) + return nil +} + +func timeFromEpochMilli(t int64) time.Time { + secs := t / 1e3 + msec := t % 1e3 + return time.Unix(secs, msec*int64(time.Millisecond)) +} + +// An UUIDValue provides eventstream encoding, and representation of a UUID +// value. +type UUIDValue [16]byte + +// Get returns the underlying value. +func (v UUIDValue) Get() interface{} { + return v[:] +} + +// valueType returns the EventStream header value type value. +func (UUIDValue) valueType() valueType { + return uuidValueType +} + +func (v UUIDValue) String() string { + return fmt.Sprintf(`%X-%X-%X-%X-%X`, v[0:4], v[4:6], v[6:8], v[8:10], v[10:]) +} + +// encode encodes the UUIDValue into an eventstream binary value +// representation. +func (v UUIDValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeFixedSlice(w, v[:]) +} + +func (v *UUIDValue) decode(r io.Reader) error { + tv := (*v)[:] + return decodeFixedBytesValue(r, tv) +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go new file mode 100644 index 0000000000000..2dc012a66e29f --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go @@ -0,0 +1,103 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash/crc32" +) + +const preludeLen = 8 +const preludeCRCLen = 4 +const msgCRCLen = 4 +const minMsgLen = preludeLen + preludeCRCLen + msgCRCLen +const maxPayloadLen = 1024 * 1024 * 16 // 16MB +const maxHeadersLen = 1024 * 128 // 128KB +const maxMsgLen = minMsgLen + maxHeadersLen + maxPayloadLen + +var crc32IEEETable = crc32.MakeTable(crc32.IEEE) + +// A Message provides the eventstream message representation. +type Message struct { + Headers Headers + Payload []byte +} + +func (m *Message) rawMessage() (rawMessage, error) { + var raw rawMessage + + if len(m.Headers) > 0 { + var headers bytes.Buffer + if err := encodeHeaders(&headers, m.Headers); err != nil { + return rawMessage{}, err + } + raw.Headers = headers.Bytes() + raw.HeadersLen = uint32(len(raw.Headers)) + } + + raw.Length = raw.HeadersLen + uint32(len(m.Payload)) + minMsgLen + + hash := crc32.New(crc32IEEETable) + binaryWriteFields(hash, binary.BigEndian, raw.Length, raw.HeadersLen) + raw.PreludeCRC = hash.Sum32() + + binaryWriteFields(hash, binary.BigEndian, raw.PreludeCRC) + + if raw.HeadersLen > 0 { + hash.Write(raw.Headers) + } + + // Read payload bytes and update hash for it as well. + if len(m.Payload) > 0 { + raw.Payload = m.Payload + hash.Write(raw.Payload) + } + + raw.CRC = hash.Sum32() + + return raw, nil +} + +type messagePrelude struct { + Length uint32 + HeadersLen uint32 + PreludeCRC uint32 +} + +func (p messagePrelude) PayloadLen() uint32 { + return p.Length - p.HeadersLen - minMsgLen +} + +func (p messagePrelude) ValidateLens() error { + if p.Length == 0 || p.Length > maxMsgLen { + return LengthError{ + Part: "message prelude", + Want: maxMsgLen, + Have: int(p.Length), + } + } + if p.HeadersLen > maxHeadersLen { + return LengthError{ + Part: "message headers", + Want: maxHeadersLen, + Have: int(p.HeadersLen), + } + } + if payloadLen := p.PayloadLen(); payloadLen > maxPayloadLen { + return LengthError{ + Part: "message payload", + Want: maxPayloadLen, + Have: int(payloadLen), + } + } + + return nil +} + +type rawMessage struct { + messagePrelude + + Headers []byte + Payload []byte + + CRC uint32 +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go new file mode 100644 index 0000000000000..e21614a125011 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go @@ -0,0 +1,81 @@ +package protocol + +import ( + "io" + "io/ioutil" + "net/http" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/client/metadata" + "github.com/aws/aws-sdk-go/aws/request" +) + +// PayloadUnmarshaler provides the interface for unmarshaling a payload's +// reader into a SDK shape. +type PayloadUnmarshaler interface { + UnmarshalPayload(io.Reader, interface{}) error +} + +// HandlerPayloadUnmarshal implements the PayloadUnmarshaler from a +// HandlerList. This provides the support for unmarshaling a payload reader to +// a shape without needing a SDK request first. +type HandlerPayloadUnmarshal struct { + Unmarshalers request.HandlerList +} + +// UnmarshalPayload unmarshals the io.Reader payload into the SDK shape using +// the Unmarshalers HandlerList provided. Returns an error if unable +// unmarshaling fails. +func (h HandlerPayloadUnmarshal) UnmarshalPayload(r io.Reader, v interface{}) error { + req := &request.Request{ + HTTPRequest: &http.Request{}, + HTTPResponse: &http.Response{ + StatusCode: 200, + Header: http.Header{}, + Body: ioutil.NopCloser(r), + }, + Data: v, + } + + h.Unmarshalers.Run(req) + + return req.Error +} + +// PayloadMarshaler provides the interface for marshaling a SDK shape into and +// io.Writer. +type PayloadMarshaler interface { + MarshalPayload(io.Writer, interface{}) error +} + +// HandlerPayloadMarshal implements the PayloadMarshaler from a HandlerList. +// This provides support for marshaling a SDK shape into an io.Writer without +// needing a SDK request first. +type HandlerPayloadMarshal struct { + Marshalers request.HandlerList +} + +// MarshalPayload marshals the SDK shape into the io.Writer using the +// Marshalers HandlerList provided. Returns an error if unable if marshal +// fails. +func (h HandlerPayloadMarshal) MarshalPayload(w io.Writer, v interface{}) error { + req := request.New( + aws.Config{}, + metadata.ClientInfo{}, + request.Handlers{}, + nil, + &request.Operation{HTTPMethod: "GET"}, + v, + nil, + ) + + h.Marshalers.Run(req) + + if req.Error != nil { + return req.Error + } + + io.Copy(w, req.GetBody()) + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go index c405288d74233..f761e0b3a5b4c 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go @@ -20,8 +20,10 @@ import ( "github.com/aws/aws-sdk-go/private/protocol" ) -// RFC822 returns an RFC822 formatted timestamp for AWS protocols -const RFC822 = "Mon, 2 Jan 2006 15:04:05 GMT" +// RFC1123GMT is a RFC1123 (RFC822) formated timestame. This format is not +// using the standard library's time.RFC1123 due to the desire to always use +// GMT as the timezone. +const RFC1123GMT = "Mon, 2 Jan 2006 15:04:05 GMT" // Whether the byte value can be sent without escaping in AWS URLs var noEscape [256]bool @@ -270,7 +272,7 @@ func convertType(v reflect.Value, tag reflect.StructTag) (str string, err error) case float64: str = strconv.FormatFloat(value, 'f', -1, 64) case time.Time: - str = value.UTC().Format(RFC822) + str = value.UTC().Format(RFC1123GMT) case aws.JSONValue: if len(value) == 0 { return "", errValueNotSet diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go index 823f045eed798..9d4e7626775f8 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go @@ -198,7 +198,7 @@ func unmarshalHeader(v reflect.Value, header string, tag reflect.StructTag) erro } v.Set(reflect.ValueOf(&f)) case *time.Time: - t, err := time.Parse(RFC822, header) + t, err := time.Parse(time.RFC1123, header) if err != nil { return err } diff --git a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go index 4b0aa76edcd75..0d478662240aa 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "monitoring" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "monitoring" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "CloudWatch" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the CloudWatch client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go index 99d12a66e42f9..b48e40e205c12 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go @@ -2268,11 +2268,7 @@ func (c *EC2) CancelSpotInstanceRequestsRequest(input *CancelSpotInstanceRequest // CancelSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Cancels one or more Spot Instance requests. Spot Instances are instances -// that Amazon EC2 starts on your behalf when the maximum price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Cancels one or more Spot Instance requests. // // Canceling a Spot Instance request does not terminate running Spot Instances // associated with the request. @@ -4179,8 +4175,8 @@ func (c *EC2) CreateNetworkInterfacePermissionRequest(input *CreateNetworkInterf // CreateNetworkInterfacePermission API operation for Amazon Elastic Compute Cloud. // -// Grants an AWS authorized partner account permission to attach the specified -// network interface to an instance in their account. +// Grants an AWS-authorized account permission to attach the specified network +// interface to an instance in their account. // // You can grant permission to a single AWS account only, and only one account // at a time. @@ -13675,11 +13671,7 @@ func (c *EC2) DescribeSpotInstanceRequestsRequest(input *DescribeSpotInstanceReq // DescribeSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Describes the Spot Instance requests that belong to your account. Spot Instances -// are instances that Amazon EC2 launches when the Spot price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Describes the specified Spot Instance requests. // // You can use DescribeSpotInstanceRequests to find a running Spot Instance // by examining the response. If the status of the Spot Instance is fulfilled, @@ -21367,9 +21359,9 @@ func (c *EC2) RequestSpotInstancesRequest(input *RequestSpotInstancesInput) (req // RequestSpotInstances API operation for Amazon Elastic Compute Cloud. // -// Creates a Spot Instance request. Spot Instances are instances that Amazon -// EC2 launches when the maximum price that you specify exceeds the current -// Spot price. For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) +// Creates a Spot Instance request. +// +// For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) // in the Amazon EC2 User Guide for Linux Instances. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -37615,7 +37607,7 @@ type DescribeInstancesInput struct { // The maximum number of results to return in a single call. To retrieve the // remaining results, make another call with the returned NextToken value. This // value can be between 5 and 1000. You cannot specify this parameter and the - // instance IDs parameter or tag filters in the same call. + // instance IDs parameter in the same call. MaxResults *int64 `locationName:"maxResults" type:"integer"` // The token to request the next page of results. @@ -66458,19 +66450,23 @@ type StateReason struct { // The message for the state change. // - // * Server.InsufficientInstanceCapacity: There was insufficient instance - // capacity to satisfy the launch request. + // * Server.InsufficientInstanceCapacity: There was insufficient capacity + // available to satisfy the launch request. // - // * Server.InternalError: An internal error occurred during instance launch, - // resulting in termination. + // * Server.InternalError: An internal error caused the instance to terminate + // during launch. // // * Server.ScheduledStop: The instance was stopped due to a scheduled retirement. // - // * Server.SpotInstanceTermination: A Spot Instance was terminated due to - // an increase in the Spot price. + // * Server.SpotInstanceShutdown: The instance was stopped because the number + // of Spot requests with a maximum price equal to or higher than the Spot + // price exceeded available capacity or because of an increase in the Spot + // price. // - // * Client.InternalError: A client error caused the instance to terminate - // on launch. + // * Server.SpotInstanceTermination: The instance was terminated because + // the number of Spot requests with a maximum price equal to or higher than + // the Spot price exceeded available capacity or because of an increase in + // the Spot price. // // * Client.InstanceInitiatedShutdown: The instance was shut down using the // shutdown -h command from the instance. @@ -66478,14 +66474,17 @@ type StateReason struct { // * Client.InstanceTerminated: The instance was terminated or rebooted during // AMI creation. // + // * Client.InternalError: A client error caused the instance to terminate + // during launch. + // + // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // // * Client.UserInitiatedShutdown: The instance was shut down using the Amazon // EC2 API. // // * Client.VolumeLimitExceeded: The limit on the number of EBS volumes or // total storage was exceeded. Decrease usage or request an increase in your - // limits. - // - // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // account limits. Message *string `locationName:"message" type:"string"` } @@ -66969,7 +66968,7 @@ type TagSpecification struct { _ struct{} `type:"structure"` // The type of resource to tag. Currently, the resource types that support tagging - // on creation are instance and volume. + // on creation are instance, snapshot, and volume. ResourceType *string `locationName:"resourceType" type:"string" enum:"ResourceType"` // The tags to apply to the resource. @@ -70694,6 +70693,9 @@ const ( // InstanceTypeI316xlarge is a InstanceType enum value InstanceTypeI316xlarge = "i3.16xlarge" + // InstanceTypeI3Metal is a InstanceType enum value + InstanceTypeI3Metal = "i3.metal" + // InstanceTypeHi14xlarge is a InstanceType enum value InstanceTypeHi14xlarge = "hi1.4xlarge" @@ -70754,6 +70756,24 @@ const ( // InstanceTypeC518xlarge is a InstanceType enum value InstanceTypeC518xlarge = "c5.18xlarge" + // InstanceTypeC5dLarge is a InstanceType enum value + InstanceTypeC5dLarge = "c5d.large" + + // InstanceTypeC5dXlarge is a InstanceType enum value + InstanceTypeC5dXlarge = "c5d.xlarge" + + // InstanceTypeC5d2xlarge is a InstanceType enum value + InstanceTypeC5d2xlarge = "c5d.2xlarge" + + // InstanceTypeC5d4xlarge is a InstanceType enum value + InstanceTypeC5d4xlarge = "c5d.4xlarge" + + // InstanceTypeC5d9xlarge is a InstanceType enum value + InstanceTypeC5d9xlarge = "c5d.9xlarge" + + // InstanceTypeC5d18xlarge is a InstanceType enum value + InstanceTypeC5d18xlarge = "c5d.18xlarge" + // InstanceTypeCc14xlarge is a InstanceType enum value InstanceTypeCc14xlarge = "cc1.4xlarge" @@ -70832,6 +70852,24 @@ const ( // InstanceTypeM524xlarge is a InstanceType enum value InstanceTypeM524xlarge = "m5.24xlarge" + // InstanceTypeM5dLarge is a InstanceType enum value + InstanceTypeM5dLarge = "m5d.large" + + // InstanceTypeM5dXlarge is a InstanceType enum value + InstanceTypeM5dXlarge = "m5d.xlarge" + + // InstanceTypeM5d2xlarge is a InstanceType enum value + InstanceTypeM5d2xlarge = "m5d.2xlarge" + + // InstanceTypeM5d4xlarge is a InstanceType enum value + InstanceTypeM5d4xlarge = "m5d.4xlarge" + + // InstanceTypeM5d12xlarge is a InstanceType enum value + InstanceTypeM5d12xlarge = "m5d.12xlarge" + + // InstanceTypeM5d24xlarge is a InstanceType enum value + InstanceTypeM5d24xlarge = "m5d.24xlarge" + // InstanceTypeH12xlarge is a InstanceType enum value InstanceTypeH12xlarge = "h1.2xlarge" diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go index ba4433d388ebb..6acbc43fe3ded 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "ec2" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "ec2" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "EC2" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the EC2 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go index a27823fdfb51d..07fc06af1f97c 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go @@ -3,14 +3,21 @@ package s3 import ( + "bytes" "fmt" "io" + "sync" + "sync/atomic" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awsutil" + "github.com/aws/aws-sdk-go/aws/client" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" + "github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi" + "github.com/aws/aws-sdk-go/private/protocol/rest" "github.com/aws/aws-sdk-go/private/protocol/restxml" ) @@ -6017,6 +6024,88 @@ func (c *S3) RestoreObjectWithContext(ctx aws.Context, input *RestoreObjectInput return out, req.Send() } +const opSelectObjectContent = "SelectObjectContent" + +// SelectObjectContentRequest generates a "aws/request.Request" representing the +// client's request for the SelectObjectContent operation. The "output" return +// value will be populated with the request's response once the request completes +// successfuly. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See SelectObjectContent for more information on using the SelectObjectContent +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// +// // Example sending a request using the SelectObjectContentRequest method. +// req, resp := client.SelectObjectContentRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContentRequest(input *SelectObjectContentInput) (req *request.Request, output *SelectObjectContentOutput) { + op := &request.Operation{ + Name: opSelectObjectContent, + HTTPMethod: "POST", + HTTPPath: "/{Bucket}/{Key+}?select&select-type=2", + } + + if input == nil { + input = &SelectObjectContentInput{} + } + + output = &SelectObjectContentOutput{} + req = c.newRequest(op, input, output) + req.Handlers.Send.Swap(client.LogHTTPResponseHandler.Name, client.LogHTTPResponseHeaderHandler) + req.Handlers.Unmarshal.Swap(restxml.UnmarshalHandler.Name, rest.UnmarshalHandler) + req.Handlers.Unmarshal.PushBack(output.runEventStreamLoop) + return +} + +// SelectObjectContent API operation for Amazon Simple Storage Service. +// +// This operation filters the contents of an Amazon S3 object based on a simple +// Structured Query Language (SQL) statement. In the request, along with the +// SQL expression, you must also specify a data serialization format (JSON or +// CSV) of the object. Amazon S3 uses this to parse object data into records, +// and returns only records that match the specified SQL expression. You must +// also specify the data serialization format for the response. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Simple Storage Service's +// API operation SelectObjectContent for usage and error information. +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContent(input *SelectObjectContentInput) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + return out, req.Send() +} + +// SelectObjectContentWithContext is the same as SelectObjectContent with the addition of +// the ability to pass a context and additional request options. +// +// See SelectObjectContent for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *S3) SelectObjectContentWithContext(ctx aws.Context, input *SelectObjectContentInput, opts ...request.Option) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opUploadPart = "UploadPart" // UploadPartRequest generates a "aws/request.Request" representing the @@ -7474,6 +7563,32 @@ func (s *Condition) SetKeyPrefixEquals(v string) *Condition { return s } +type ContinuationEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s ContinuationEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ContinuationEvent) GoString() string { + return s.String() +} + +// The ContinuationEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ContinuationEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ContinuationEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ContinuationEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type CopyObjectInput struct { _ struct{} `type:"structure"` @@ -9919,6 +10034,32 @@ func (s *EncryptionConfiguration) SetReplicaKmsKeyID(v string) *EncryptionConfig return s } +type EndEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s EndEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s EndEvent) GoString() string { + return s.String() +} + +// The EndEvent is and event in the SelectObjectContentEventStream group of events. +func (s *EndEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the EndEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *EndEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type Error struct { _ struct{} `type:"structure"` @@ -16380,6 +16521,87 @@ func (s *Part) SetSize(v int64) *Part { return s } +type Progress struct { + _ struct{} `type:"structure"` + + // Current number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Current number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Current number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Progress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Progress) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Progress) SetBytesProcessed(v int64) *Progress { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Progress) SetBytesReturned(v int64) *Progress { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Progress) SetBytesScanned(v int64) *Progress { + s.BytesScanned = &v + return s +} + +type ProgressEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Progress event details. + Details *Progress `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s ProgressEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ProgressEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *ProgressEvent) SetDetails(v *Progress) *ProgressEvent { + s.Details = v + return s +} + +// The ProgressEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ProgressEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ProgressEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ProgressEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type PutBucketAccelerateConfigurationInput struct { _ struct{} `type:"structure" payload:"AccelerateConfiguration"` @@ -18622,6 +18844,45 @@ func (s *QueueConfigurationDeprecated) SetQueue(v string) *QueueConfigurationDep return s } +type RecordsEvent struct { + _ struct{} `type:"structure" payload:"Payload"` + + // The byte array of partial, one or more result records. + // + // Payload is automatically base64 encoded/decoded by the SDK. + Payload []byte `type:"blob"` +} + +// String returns the string representation +func (s RecordsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RecordsEvent) GoString() string { + return s.String() +} + +// SetPayload sets the Payload field's value. +func (s *RecordsEvent) SetPayload(v []byte) *RecordsEvent { + s.Payload = v + return s +} + +// The RecordsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *RecordsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the RecordsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *RecordsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + s.Payload = make([]byte, len(msg.Payload)) + copy(s.Payload, msg.Payload) + return nil +} + type Redirect struct { _ struct{} `type:"structure"` @@ -18939,6 +19200,30 @@ func (s *RequestPaymentConfiguration) SetPayer(v string) *RequestPaymentConfigur return s } +type RequestProgress struct { + _ struct{} `type:"structure"` + + // Specifies whether periodic QueryProgress frames should be sent. Valid values: + // TRUE, FALSE. Default value: FALSE. + Enabled *bool `type:"boolean"` +} + +// String returns the string representation +func (s RequestProgress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RequestProgress) GoString() string { + return s.String() +} + +// SetEnabled sets the Enabled field's value. +func (s *RequestProgress) SetEnabled(v bool) *RequestProgress { + s.Enabled = &v + return s +} + type RestoreObjectInput struct { _ struct{} `type:"structure" payload:"RestoreRequest"` @@ -19392,6 +19677,436 @@ func (s SSES3) GoString() string { return s.String() } +// SelectObjectContentEventStream provides handling of EventStreams for +// the SelectObjectContent API. +// +// Use this type to receive SelectObjectContentEventStream events. The events +// can be read from the Events channel member. +// +// The events that can be received are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStream struct { + // Reader is the EventStream reader for the SelectObjectContentEventStream + // events. This value is automatically set by the SDK when the API call is made + // Use this member when unit testing your code with the SDK to mock out the + // EventStream Reader. + // + // Must not be nil. + Reader SelectObjectContentEventStreamReader + + // StreamCloser is the io.Closer for the EventStream connection. For HTTP + // EventStream this is the response Body. The stream will be closed when + // the Close method of the EventStream is called. + StreamCloser io.Closer +} + +// Close closes the EventStream. This will also cause the Events channel to be +// closed. You can use the closing of the Events channel to terminate your +// application's read from the API's EventStream. +// +// Will close the underlying EventStream reader. For EventStream over HTTP +// connection this will also close the HTTP connection. +// +// Close must be called when done using the EventStream API. Not calling Close +// may result in resource leaks. +func (es *SelectObjectContentEventStream) Close() (err error) { + es.Reader.Close() + return es.Err() +} + +// Err returns any error that occurred while reading EventStream Events from +// the service API's response. Returns nil if there were no errors. +func (es *SelectObjectContentEventStream) Err() error { + if err := es.Reader.Err(); err != nil { + return err + } + es.StreamCloser.Close() + + return nil +} + +// Events returns a channel to read EventStream Events from the +// SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +func (es *SelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return es.Reader.Events() +} + +// SelectObjectContentEventStreamEvent groups together all EventStream +// events read from the SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamEvent interface { + eventSelectObjectContentEventStream() +} + +// SelectObjectContentEventStreamReader provides the interface for reading EventStream +// Events from the SelectObjectContent API. The +// default implementation for this interface will be SelectObjectContentEventStream. +// +// The reader's Close method must allow multiple concurrent calls. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamReader interface { + // Returns a channel of events as they are read from the event stream. + Events() <-chan SelectObjectContentEventStreamEvent + + // Close will close the underlying event stream reader. For event stream over + // HTTP this will also close the HTTP connection. + Close() error + + // Returns any error that has occured while reading from the event stream. + Err() error +} + +type readSelectObjectContentEventStream struct { + eventReader *eventstreamapi.EventReader + stream chan SelectObjectContentEventStreamEvent + errVal atomic.Value + + done chan struct{} + closeOnce sync.Once +} + +func newReadSelectObjectContentEventStream( + reader io.ReadCloser, + unmarshalers request.HandlerList, + logger aws.Logger, + logLevel aws.LogLevelType, +) *readSelectObjectContentEventStream { + r := &readSelectObjectContentEventStream{ + stream: make(chan SelectObjectContentEventStreamEvent), + done: make(chan struct{}), + } + + r.eventReader = eventstreamapi.NewEventReader( + reader, + protocol.HandlerPayloadUnmarshal{ + Unmarshalers: unmarshalers, + }, + r.unmarshalerForEventType, + ) + r.eventReader.UseLogger(logger, logLevel) + + return r +} + +// Close will close the underlying event stream reader. For EventStream over +// HTTP this will also close the HTTP connection. +func (r *readSelectObjectContentEventStream) Close() error { + r.closeOnce.Do(r.safeClose) + + return r.Err() +} + +func (r *readSelectObjectContentEventStream) safeClose() { + close(r.done) + err := r.eventReader.Close() + if err != nil { + r.errVal.Store(err) + } +} + +func (r *readSelectObjectContentEventStream) Err() error { + if v := r.errVal.Load(); v != nil { + return v.(error) + } + + return nil +} + +func (r *readSelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return r.stream +} + +func (r *readSelectObjectContentEventStream) readEventStream() { + defer close(r.stream) + + for { + event, err := r.eventReader.ReadEvent() + if err != nil { + if err == io.EOF { + return + } + select { + case <-r.done: + // If closed already ignore the error + return + default: + } + r.errVal.Store(err) + return + } + + select { + case r.stream <- event.(SelectObjectContentEventStreamEvent): + case <-r.done: + return + } + } +} + +func (r *readSelectObjectContentEventStream) unmarshalerForEventType( + eventType string, +) (eventstreamapi.Unmarshaler, error) { + switch eventType { + case "Cont": + return &ContinuationEvent{}, nil + + case "End": + return &EndEvent{}, nil + + case "Progress": + return &ProgressEvent{}, nil + + case "Records": + return &RecordsEvent{}, nil + + case "Stats": + return &StatsEvent{}, nil + default: + return nil, fmt.Errorf( + "unknown event type name, %s, for SelectObjectContentEventStream", eventType) + } +} + +// Request to filter the contents of an Amazon S3 object based on a simple Structured +// Query Language (SQL) statement. In the request, along with the SQL expression, +// you must also specify a data serialization format (JSON or CSV) of the object. +// Amazon S3 uses this to parse object data into records, and returns only records +// that match the specified SQL expression. You must also specify the data serialization +// format for the response. For more information, go to S3Select API Documentation +// (https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectSELECTContent.html) +type SelectObjectContentInput struct { + _ struct{} `locationName:"SelectObjectContentRequest" type:"structure" xmlURI:"http://s3.amazonaws.com/doc/2006-03-01/"` + + // The S3 Bucket. + // + // Bucket is a required field + Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"` + + // The expression that is used to query the object. + // + // Expression is a required field + Expression *string `type:"string" required:"true"` + + // The type of the provided expression (e.g., SQL). + // + // ExpressionType is a required field + ExpressionType *string `type:"string" required:"true" enum:"ExpressionType"` + + // Describes the format of the data in the object that is being queried. + // + // InputSerialization is a required field + InputSerialization *InputSerialization `type:"structure" required:"true"` + + // The Object Key. + // + // Key is a required field + Key *string `location:"uri" locationName:"Key" min:"1" type:"string" required:"true"` + + // Describes the format of the data that you want Amazon S3 to return in response. + // + // OutputSerialization is a required field + OutputSerialization *OutputSerialization `type:"structure" required:"true"` + + // Specifies if periodic request progress information should be enabled. + RequestProgress *RequestProgress `type:"structure"` + + // The SSE Algorithm used to encrypt the object. For more information, go to + // Server-Side Encryption (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"` + + // The SSE Customer Key. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"` + + // The SSE Customer Key MD5. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"` +} + +// String returns the string representation +func (s SelectObjectContentInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *SelectObjectContentInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "SelectObjectContentInput"} + if s.Bucket == nil { + invalidParams.Add(request.NewErrParamRequired("Bucket")) + } + if s.Expression == nil { + invalidParams.Add(request.NewErrParamRequired("Expression")) + } + if s.ExpressionType == nil { + invalidParams.Add(request.NewErrParamRequired("ExpressionType")) + } + if s.InputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("InputSerialization")) + } + if s.Key == nil { + invalidParams.Add(request.NewErrParamRequired("Key")) + } + if s.Key != nil && len(*s.Key) < 1 { + invalidParams.Add(request.NewErrParamMinLen("Key", 1)) + } + if s.OutputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("OutputSerialization")) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetBucket sets the Bucket field's value. +func (s *SelectObjectContentInput) SetBucket(v string) *SelectObjectContentInput { + s.Bucket = &v + return s +} + +func (s *SelectObjectContentInput) getBucket() (v string) { + if s.Bucket == nil { + return v + } + return *s.Bucket +} + +// SetExpression sets the Expression field's value. +func (s *SelectObjectContentInput) SetExpression(v string) *SelectObjectContentInput { + s.Expression = &v + return s +} + +// SetExpressionType sets the ExpressionType field's value. +func (s *SelectObjectContentInput) SetExpressionType(v string) *SelectObjectContentInput { + s.ExpressionType = &v + return s +} + +// SetInputSerialization sets the InputSerialization field's value. +func (s *SelectObjectContentInput) SetInputSerialization(v *InputSerialization) *SelectObjectContentInput { + s.InputSerialization = v + return s +} + +// SetKey sets the Key field's value. +func (s *SelectObjectContentInput) SetKey(v string) *SelectObjectContentInput { + s.Key = &v + return s +} + +// SetOutputSerialization sets the OutputSerialization field's value. +func (s *SelectObjectContentInput) SetOutputSerialization(v *OutputSerialization) *SelectObjectContentInput { + s.OutputSerialization = v + return s +} + +// SetRequestProgress sets the RequestProgress field's value. +func (s *SelectObjectContentInput) SetRequestProgress(v *RequestProgress) *SelectObjectContentInput { + s.RequestProgress = v + return s +} + +// SetSSECustomerAlgorithm sets the SSECustomerAlgorithm field's value. +func (s *SelectObjectContentInput) SetSSECustomerAlgorithm(v string) *SelectObjectContentInput { + s.SSECustomerAlgorithm = &v + return s +} + +// SetSSECustomerKey sets the SSECustomerKey field's value. +func (s *SelectObjectContentInput) SetSSECustomerKey(v string) *SelectObjectContentInput { + s.SSECustomerKey = &v + return s +} + +func (s *SelectObjectContentInput) getSSECustomerKey() (v string) { + if s.SSECustomerKey == nil { + return v + } + return *s.SSECustomerKey +} + +// SetSSECustomerKeyMD5 sets the SSECustomerKeyMD5 field's value. +func (s *SelectObjectContentInput) SetSSECustomerKeyMD5(v string) *SelectObjectContentInput { + s.SSECustomerKeyMD5 = &v + return s +} + +type SelectObjectContentOutput struct { + _ struct{} `type:"structure" payload:"Payload"` + + // Use EventStream to use the API's stream. + EventStream *SelectObjectContentEventStream `type:"structure"` +} + +// String returns the string representation +func (s SelectObjectContentOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentOutput) GoString() string { + return s.String() +} + +// SetEventStream sets the EventStream field's value. +func (s *SelectObjectContentOutput) SetEventStream(v *SelectObjectContentEventStream) *SelectObjectContentOutput { + s.EventStream = v + return s +} + +func (s *SelectObjectContentOutput) runEventStreamLoop(r *request.Request) { + if r.Error != nil { + return + } + reader := newReadSelectObjectContentEventStream( + r.HTTPResponse.Body, + r.Handlers.UnmarshalStream, + r.Config.Logger, + r.Config.LogLevel.Value(), + ) + go reader.readEventStream() + + eventStream := &SelectObjectContentEventStream{ + StreamCloser: r.HTTPResponse.Body, + Reader: reader, + } + s.EventStream = eventStream +} + // Describes the parameters for Select job types. type SelectParameters struct { _ struct{} `type:"structure"` @@ -19696,6 +20411,87 @@ func (s *SseKmsEncryptedObjects) SetStatus(v string) *SseKmsEncryptedObjects { return s } +type Stats struct { + _ struct{} `type:"structure"` + + // Total number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Total number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Total number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Stats) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Stats) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Stats) SetBytesProcessed(v int64) *Stats { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Stats) SetBytesReturned(v int64) *Stats { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Stats) SetBytesScanned(v int64) *Stats { + s.BytesScanned = &v + return s +} + +type StatsEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Stats event details. + Details *Stats `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s StatsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s StatsEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *StatsEvent) SetDetails(v *Stats) *StatsEvent { + s.Details = v + return s +} + +// The StatsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *StatsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the StatsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *StatsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type StorageClassAnalysis struct { _ struct{} `type:"structure"` diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go index 614e477d3bb73..20de53f29d79d 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "s3" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "s3" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "S3" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the S3 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, @@ -71,6 +73,8 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio svc.Handlers.UnmarshalMeta.PushBackNamed(restxml.UnmarshalMetaHandler) svc.Handlers.UnmarshalError.PushBackNamed(restxml.UnmarshalErrorHandler) + svc.Handlers.UnmarshalStream.PushBackNamed(restxml.UnmarshalHandler) + // Run custom client initialization if present if initClient != nil { initClient(svc.Client) diff --git a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go index 1ee5839e0462b..185c914d1b30a 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "sts" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "sts" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "STS" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the STS client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE new file mode 100644 index 0000000000000..c35c17af98083 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2015 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/main.go b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go new file mode 100644 index 0000000000000..6a77d1243173f --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go @@ -0,0 +1,29 @@ +// Package sanitized_anchor_name provides a func to create sanitized anchor names. +// +// Its logic can be reused by multiple packages to create interoperable anchor names +// and links to those anchors. +// +// At this time, it does not try to ensure that generated anchor names +// are unique, that responsibility falls on the caller. +package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name" + +import "unicode" + +// Create returns a sanitized anchor name for the given text. +func Create(text string) string { + var anchorName []rune + var futureDash = false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +}