Commit 67daa9b0 by Daniel Lee Committed by GitHub

Merge pull request #11588 from mjtrangoni/fix-codespell-issues

Fix codespell issues
parents 52bd51f2 638f7d23
...@@ -64,7 +64,7 @@ ...@@ -64,7 +64,7 @@
#################################### Database #################################### #################################### Database ####################################
[database] [database]
# You can configure the database connection by specifying type, host, name, user and password # You can configure the database connection by specifying type, host, name, user and password
# as seperate properties or as on string using the url propertie. # as separate properties or as on string using the url properties.
# Either "mysql", "postgres" or "sqlite3", it's your choice # Either "mysql", "postgres" or "sqlite3", it's your choice
;type = sqlite3 ;type = sqlite3
......
...@@ -38,7 +38,7 @@ CACHE_QUERY_PORT = 7002 ...@@ -38,7 +38,7 @@ CACHE_QUERY_PORT = 7002
LOG_UPDATES = False LOG_UPDATES = False
# Enable AMQP if you want to receve metrics using an amqp broker # Enable AMQP if you want to receive metrics using an amqp broker
# ENABLE_AMQP = False # ENABLE_AMQP = False
# Verbose means a line will be logged for every metric received # Verbose means a line will be logged for every metric received
......
...@@ -41,7 +41,7 @@ PICKLE_RECEIVER_PORT = 2004 ...@@ -41,7 +41,7 @@ PICKLE_RECEIVER_PORT = 2004
CACHE_QUERY_INTERFACE = 0.0.0.0 CACHE_QUERY_INTERFACE = 0.0.0.0
CACHE_QUERY_PORT = 7002 CACHE_QUERY_PORT = 7002
# Enable AMQP if you want to receve metrics using you amqp broker # Enable AMQP if you want to receive metrics using you amqp broker
ENABLE_AMQP = True ENABLE_AMQP = True
# Verbose means a line will be logged for every metric received # Verbose means a line will be logged for every metric received
......
...@@ -265,7 +265,7 @@ WHISPER_FALLOCATE_CREATE = True ...@@ -265,7 +265,7 @@ WHISPER_FALLOCATE_CREATE = True
# CARBON_METRIC_PREFIX = carbon # CARBON_METRIC_PREFIX = carbon
# CARBON_METRIC_INTERVAL = 60 # CARBON_METRIC_INTERVAL = 60
# Enable AMQP if you want to receve metrics using an amqp broker # Enable AMQP if you want to receive metrics using an amqp broker
# ENABLE_AMQP = False # ENABLE_AMQP = False
# Verbose means a line will be logged for every metric received # Verbose means a line will be logged for every metric received
......
...@@ -30,7 +30,7 @@ give_completer_focus = shift-space ...@@ -30,7 +30,7 @@ give_completer_focus = shift-space
# pertain only to specific metric types. # pertain only to specific metric types.
# #
# The dashboard presents only metrics that fall into specified naming schemes # The dashboard presents only metrics that fall into specified naming schemes
# defined in this file. This creates a simpler, more targetted view of the # defined in this file. This creates a simpler, more targeted view of the
# data. The general form for defining a naming scheme is as follows: # data. The general form for defining a naming scheme is as follows:
# #
#[Metric Type] #[Metric Type]
......
...@@ -22,6 +22,6 @@ log() { ...@@ -22,6 +22,6 @@ log() {
log $RUN_CMD log $RUN_CMD
$RUN_CMD $RUN_CMD
# Exit immidiately in case of any errors or when we have interactive terminal # Exit immediately in case of any errors or when we have interactive terminal
if [[ $? != 0 ]] || test -t 0; then exit $?; fi if [[ $? != 0 ]] || test -t 0; then exit $?; fi
log log
...@@ -206,7 +206,7 @@ When Grafana starts, it will update/insert all dashboards available in the confi ...@@ -206,7 +206,7 @@ When Grafana starts, it will update/insert all dashboards available in the confi
### Reuseable Dashboard Urls ### Reuseable Dashboard Urls
If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifer. If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifier.
When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated. When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated.
By default Grafana will delete dashboards in the database if the file is removed. You can disable this behavior using the `disableDeletion` setting. By default Grafana will delete dashboards in the database if the file is removed. You can disable this behavior using the `disableDeletion` setting.
......
...@@ -153,7 +153,7 @@ Prometheus Alertmanager | `prometheus-alertmanager` | no ...@@ -153,7 +153,7 @@ Prometheus Alertmanager | `prometheus-alertmanager` | no
# Enable images in notifications {#external-image-store} # Enable images in notifications {#external-image-store}
Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessable (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessible (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports
Amazon S3, Webdav, Google Cloud Storage and Azure Blob Storage. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file. Amazon S3, Webdav, Google Cloud Storage and Azure Blob Storage. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file.
Be aware that some notifiers requires public access to the image to be able to include it in the notification. So make sure to enable public access to the images. If you're using local image uploader, your Grafana instance need to be accessible by the internet. Be aware that some notifiers requires public access to the image to be able to include it in the notification. So make sure to enable public access to the images. If you're using local image uploader, your Grafana instance need to be accessible by the internet.
......
...@@ -110,7 +110,7 @@ to `Keep Last State` in order to basically ignore them. ...@@ -110,7 +110,7 @@ to `Keep Last State` in order to basically ignore them.
## Notifications ## Notifications
In alert tab you can also specify alert rule notifications along with a detailed messsage about the alert rule. In alert tab you can also specify alert rule notifications along with a detailed message about the alert rule.
The message can contain anything, information about how you might solve the issue, link to runbook, etc. The message can contain anything, information about how you might solve the issue, link to runbook, etc.
The actual notifications are configured and shared between multiple alerts. Read the The actual notifications are configured and shared between multiple alerts. Read the
......
+++ +++
title = "Contributor Licence Agreement (CLA)" title = "Contributor Licence Agreement (CLA)"
description = "Contributer Licence Agreement (CLA)" description = "Contributor Licence Agreement (CLA)"
type = "docs" type = "docs"
aliases = ["/project/cla", "docs/contributing/cla.html"] aliases = ["/project/cla", "docs/contributing/cla.html"]
[menu.docs] [menu.docs]
...@@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US] ...@@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US]
<br> <br>
<br> <br>
<br> <br>
This CLA aggreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) This CLA agreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/)
...@@ -78,7 +78,7 @@ the existing time series data in OpenTSDB, you need to run `tsdb uid metasync` o ...@@ -78,7 +78,7 @@ the existing time series data in OpenTSDB, you need to run `tsdb uid metasync` o
### Nested Templating ### Nested Templating
One template variable can be used to filter tag values for another template varible. First parameter is the metric name, One template variable can be used to filter tag values for another template variable. First parameter is the metric name,
second parameter is the tag key for which you need to find tag values, and after that all other dependent template variables. second parameter is the tag key for which you need to find tag values, and after that all other dependent template variables.
Some examples are mentioned below to make nested template queries work successfully. Some examples are mentioned below to make nested template queries work successfully.
...@@ -106,4 +106,4 @@ datasources: ...@@ -106,4 +106,4 @@ datasources:
jsonData: jsonData:
tsdbResolution: 1 tsdbResolution: 1
tsdbVersion: 1 tsdbVersion: 1
``` ```
\ No newline at end of file
...@@ -14,7 +14,7 @@ weight = 4 ...@@ -14,7 +14,7 @@ weight = 4
{{< docs-imagebox img="/img/docs/v45/alert-list-panel.png" max-width="850px" >}} {{< docs-imagebox img="/img/docs/v45/alert-list-panel.png" max-width="850px" >}}
The alert list panel allows you to display your dashbords alerts. The list can be configured to show current state or recent state changes. You can read more about alerts [here](http://docs.grafana.org/alerting/rules). The alert list panel allows you to display your dashboards alerts. The list can be configured to show current state or recent state changes. You can read more about alerts [here](http://docs.grafana.org/alerting/rules).
## Alert List Options ## Alert List Options
......
...@@ -25,7 +25,7 @@ The dashboard list panel allows you to display dynamic links to other dashboards ...@@ -25,7 +25,7 @@ The dashboard list panel allows you to display dynamic links to other dashboards
1. **Starred**: The starred dashboard selection displays starred dashboards in alphabetical order. 1. **Starred**: The starred dashboard selection displays starred dashboards in alphabetical order.
2. **Recently Viewed**: The recently viewed dashboard selection displays recently viewed dashboards in alphabetical order. 2. **Recently Viewed**: The recently viewed dashboard selection displays recently viewed dashboards in alphabetical order.
3. **Search**: The search dashboard selection displays dashboards by search query or tag(s). 3. **Search**: The search dashboard selection displays dashboards by search query or tag(s).
4. **Show Headings**: When show headings is ticked the choosen list selection(Starred, Recently Viewed, Search) is shown as a heading. 4. **Show Headings**: When show headings is ticked the chosen list selection(Starred, Recently Viewed, Search) is shown as a heading.
5. **Max Items**: Max items set the maximum of items in a list. 5. **Max Items**: Max items set the maximum of items in a list.
6. **Query**: Here is where you enter your query you want to search by. Queries are case-insensitive, and partial values are accepted. 6. **Query**: Here is where you enter your query you want to search by. Queries are case-insensitive, and partial values are accepted.
7. **Tags**: Here is where you enter your tag(s) you want to search by. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar. 7. **Tags**: Here is where you enter your tag(s) you want to search by. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar.
......
...@@ -30,7 +30,7 @@ The singlestat panel has a normal query editor to allow you define your exact me ...@@ -30,7 +30,7 @@ The singlestat panel has a normal query editor to allow you define your exact me
* **total** - The sum of all the non-null values in the series * **total** - The sum of all the non-null values in the series
* **first** - The first value in the series * **first** - The first value in the series
* **delta** - The total incremental increase (of a counter) in the series. An attempt is made to account for counter resets, but this will only be accurate for single instance metrics. Used to show total counter increase in time series. * **delta** - The total incremental increase (of a counter) in the series. An attempt is made to account for counter resets, but this will only be accurate for single instance metrics. Used to show total counter increase in time series.
* **diff** - The difference betwen 'current' (last value) and 'first'. * **diff** - The difference between 'current' (last value) and 'first'.
* **range** - The difference between 'min' and 'max'. Useful the show the range of change for a gauge. * **range** - The difference between 'min' and 'max'. Useful the show the range of change for a gauge.
2. **Prefix/Postfix**: The Prefix/Postfix fields let you define a custom label to appear *before/after* the value. The `$__name` variable can be used here to use the series name or alias from the metric query. 2. **Prefix/Postfix**: The Prefix/Postfix fields let you define a custom label to appear *before/after* the value. The `$__name` variable can be used here to use the series name or alias from the metric query.
3. **Units**: Units are appended to the the Singlestat within the panel, and will respect the color and threshold settings for the value. 3. **Units**: Units are appended to the the Singlestat within the panel, and will respect the color and threshold settings for the value.
...@@ -70,7 +70,7 @@ Gauges gives a clear picture of how high a value is in it's context. It's a grea ...@@ -70,7 +70,7 @@ Gauges gives a clear picture of how high a value is in it's context. It's a grea
{{< docs-imagebox img="/img/docs/v45/singlestat-gauge-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}} {{< docs-imagebox img="/img/docs/v45/singlestat-gauge-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}}
1. **Show**: The show checkbox will toggle wether the gauge is shown in the panel. When unselected, only the Singlestat value will appear. 1. **Show**: The show checkbox will toggle whether the gauge is shown in the panel. When unselected, only the Singlestat value will appear.
2. **Min/Max**: This sets the start and end point for the gauge. 2. **Min/Max**: This sets the start and end point for the gauge.
3. **Threshold Labels**: Check if you want to show the threshold labels. Thresholds are set in the color options. 3. **Threshold Labels**: Check if you want to show the threshold labels. Thresholds are set in the color options.
4. **Threshold Markers**: Check if you want to have a second meter showing the thresholds. 4. **Threshold Markers**: Check if you want to have a second meter showing the thresholds.
......
...@@ -15,7 +15,7 @@ support for multiple Cloudwatch credentials. ...@@ -15,7 +15,7 @@ support for multiple Cloudwatch credentials.
<img src="/assets/img/features/table-panel.png"> <img src="/assets/img/features/table-panel.png">
The new table panel is very flexible, supporting both multiple modes for time series as well as for The new table panel is very flexible, supporting both multiple modes for time series as well as for
table, annotation and raw JSON data. It also provides date formating and value formating and coloring options. table, annotation and raw JSON data. It also provides date formatting and value formatting and coloring options.
### Time series to rows ### Time series to rows
......
...@@ -33,7 +33,7 @@ You can enable/disable the shared tooltip from the dashboard settings menu or cy ...@@ -33,7 +33,7 @@ You can enable/disable the shared tooltip from the dashboard settings menu or cy
{{< imgbox max-width="60%" img="/img/docs/v41/helptext_for_panel_settings.png" caption="Hovering help text" >}} {{< imgbox max-width="60%" img="/img/docs/v41/helptext_for_panel_settings.png" caption="Hovering help text" >}}
You can set a help text in the general tab on any panel. The help text is using Markdown to enable better formating and linking to other sites that can provide more information. You can set a help text in the general tab on any panel. The help text is using Markdown to enable better formatting and linking to other sites that can provide more information.
<div class="clearfix"></div> <div class="clearfix"></div>
......
...@@ -12,7 +12,7 @@ weight = -4 ...@@ -12,7 +12,7 @@ weight = -4
# What's New in Grafana v4.5 # What's New in Grafana v4.5
## Hightlights ## Highlights
### New prometheus query editor ### New prometheus query editor
...@@ -62,7 +62,7 @@ Datas source selection & options & help are now above your metric queries. ...@@ -62,7 +62,7 @@ Datas source selection & options & help are now above your metric queries.
### Minor Changes ### Minor Changes
* **InfluxDB**: Change time range filter for absolute time ranges to be inclusive instead of exclusive [#8319](https://github.com/grafana/grafana/issues/8319), thx [@Oxydros](https://github.com/Oxydros) * **InfluxDB**: Change time range filter for absolute time ranges to be inclusive instead of exclusive [#8319](https://github.com/grafana/grafana/issues/8319), thx [@Oxydros](https://github.com/Oxydros)
* **InfluxDB**: Added paranthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131) * **InfluxDB**: Added parenthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131)
## Bug Fixes ## Bug Fixes
......
...@@ -45,7 +45,7 @@ This makes exploring and filtering Prometheus data much easier. ...@@ -45,7 +45,7 @@ This makes exploring and filtering Prometheus data much easier.
* **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin) * **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin)
* **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187) * **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187)
* **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) * **Graph**: Add support for local formatting in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk)
* **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213) * **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213)
* **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/2764) * **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/2764)
* **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873) * **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873)
......
...@@ -307,7 +307,7 @@ Content-Type: application/json ...@@ -307,7 +307,7 @@ Content-Type: application/json
`PUT /api/orgs/:orgId` `PUT /api/orgs/:orgId`
Update Organisation, fields *Adress 1*, *Adress 2*, *City* are not implemented yet. Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet.
**Example Request**: **Example Request**:
...@@ -436,4 +436,4 @@ HTTP/1.1 200 ...@@ -436,4 +436,4 @@ HTTP/1.1 200
Content-Type: application/json Content-Type: application/json
{"message":"User removed from organization"} {"message":"User removed from organization"}
``` ```
\ No newline at end of file
...@@ -482,7 +482,7 @@ Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.co ...@@ -482,7 +482,7 @@ Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.co
First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https://<grafana domain>/` and set the Login redirect URIs to `https://<grafana domain>/login/generic_oauth`. First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https://<grafana domain>/` and set the Login redirect URIs to `https://<grafana domain>/login/generic_oauth`.
Finaly set up the generic oauth module like this: Finally set up the generic oauth module like this:
```bash ```bash
[auth.generic_oauth] [auth.generic_oauth]
name = Okta name = Okta
......
...@@ -12,7 +12,7 @@ weight = 4 ...@@ -12,7 +12,7 @@ weight = 4
# Installing using Docker # Installing using Docker
Grafana is very easy to install and run using the offical docker container. Grafana is very easy to install and run using the official docker container.
```bash ```bash
$ docker run -d -p 3000:3000 grafana/grafana $ docker run -d -p 3000:3000 grafana/grafana
......
...@@ -25,7 +25,7 @@ Before upgrading it can be a good idea to backup your Grafana database. This wil ...@@ -25,7 +25,7 @@ Before upgrading it can be a good idea to backup your Grafana database. This wil
If you use sqlite you only need to make a backup of your `grafana.db` file. This is usually located at `/var/lib/grafana/grafana.db` on unix system. If you use sqlite you only need to make a backup of your `grafana.db` file. This is usually located at `/var/lib/grafana/grafana.db` on unix system.
If you are unsure what database you use and where it is stored check you grafana configuration file. If you If you are unsure what database you use and where it is stored check you grafana configuration file. If you
installed grafana to custom location using a binary tar/zip it is usally in `<grafana_install_dir>/data`. installed grafana to custom location using a binary tar/zip it is usually in `<grafana_install_dir>/data`.
#### mysql #### mysql
......
...@@ -168,7 +168,7 @@ Option | Description ...@@ -168,7 +168,7 @@ Option | Description
*Include All option* | Add a special `All` option whose value includes all options. *Include All option* | Add a special `All` option whose value includes all options.
*Custom all value* | By default the `All` value will include all options in combined expression. This can become very long and can have performance problems. Many times it can be better to specify a custom all value, like a wildcard regex. To make it possible to have custom regex, globs or lucene syntax in the **Custom all value** option it is never escaped so you will have to think avbout what is a valid value for your data source. *Custom all value* | By default the `All` value will include all options in combined expression. This can become very long and can have performance problems. Many times it can be better to specify a custom all value, like a wildcard regex. To make it possible to have custom regex, globs or lucene syntax in the **Custom all value** option it is never escaped so you will have to think avbout what is a valid value for your data source.
### Formating multiple values ### Formatting multiple values
Interpolating a variable with multiple values selected is tricky as it is not straight forward how to format the multiple values to into a string that Interpolating a variable with multiple values selected is tricky as it is not straight forward how to format the multiple values to into a string that
is valid in the given context where the variable is used. Grafana tries to solve this by allowing each data source plugin to is valid in the given context where the variable is used. Grafana tries to solve this by allowing each data source plugin to
...@@ -186,7 +186,7 @@ break the regex expression. ...@@ -186,7 +186,7 @@ break the regex expression.
**Elasticsearch** uses lucene query syntax, so the same variable would, in this case, be formatted as `("host1" OR "host2" OR "host3")`. In this case every value **Elasticsearch** uses lucene query syntax, so the same variable would, in this case, be formatted as `("host1" OR "host2" OR "host3")`. In this case every value
needs to be escaped so that the value can contain lucene control words and quotation marks. needs to be escaped so that the value can contain lucene control words and quotation marks.
#### Formating troubles #### Formatting troubles
Automatic escaping & formatting can cause problems and it can be tricky to grasp the logic is behind it. Automatic escaping & formatting can cause problems and it can be tricky to grasp the logic is behind it.
Especially for InfluxDB and Prometheus where the use of regex syntax requires that the variable is used in regex operator context. Especially for InfluxDB and Prometheus where the use of regex syntax requires that the variable is used in regex operator context.
......
...@@ -108,7 +108,7 @@ In this example we use Apache as a reverseProxy in front of Grafana. Apache hand ...@@ -108,7 +108,7 @@ In this example we use Apache as a reverseProxy in front of Grafana. Apache hand
* The next part of the configuration is the tricky part. We use Apache’s rewrite engine to create our **X-WEBAUTH-USER header**, populated with the authenticated user. * The next part of the configuration is the tricky part. We use Apache’s rewrite engine to create our **X-WEBAUTH-USER header**, populated with the authenticated user.
* **RewriteRule .* - [E=PROXY_USER:%{LA-U:REMOTE_USER}, NS]**: This line is a little bit of magic. What it does, is for every request use the rewriteEngines look-ahead (LA-U) feature to determine what the REMOTE_USER variable would be set to after processing the request. Then assign the result to the variable PROXY_USER. This is neccessary as the REMOTE_USER variable is not available to the RequestHeader function. * **RewriteRule .* - [E=PROXY_USER:%{LA-U:REMOTE_USER}, NS]**: This line is a little bit of magic. What it does, is for every request use the rewriteEngines look-ahead (LA-U) feature to determine what the REMOTE_USER variable would be set to after processing the request. Then assign the result to the variable PROXY_USER. This is necessary as the REMOTE_USER variable is not available to the RequestHeader function.
* **RequestHeader set X-WEBAUTH-USER “%{PROXY_USER}e”**: With the authenticated username now stored in the PROXY_USER variable, we create a new HTTP request header that will be sent to our backend Grafana containing the username. * **RequestHeader set X-WEBAUTH-USER “%{PROXY_USER}e”**: With the authenticated username now stored in the PROXY_USER variable, we create a new HTTP request header that will be sent to our backend Grafana containing the username.
...@@ -149,7 +149,7 @@ auto_sign_up = true ...@@ -149,7 +149,7 @@ auto_sign_up = true
##### Grafana Container ##### Grafana Container
For this example, we use the offical Grafana docker image available at [Docker Hub](https://hub.docker.com/r/grafana/grafana/) For this example, we use the official Grafana docker image available at [Docker Hub](https://hub.docker.com/r/grafana/grafana/)
* Create a file `grafana.ini` with the following contents * Create a file `grafana.ini` with the following contents
...@@ -166,7 +166,7 @@ header_property = username ...@@ -166,7 +166,7 @@ header_property = username
auto_sign_up = true auto_sign_up = true
``` ```
* Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We dont expose any ports for this container as it will only be connected to by our Apache container. * Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We don't expose any ports for this container as it will only be connected to by our Apache container.
```bash ```bash
docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana grafana/grafana docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana grafana/grafana
...@@ -174,7 +174,7 @@ docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana graf ...@@ -174,7 +174,7 @@ docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana graf
### Apache Container ### Apache Container
For this example we use the offical Apache docker image available at [Docker Hub](https://hub.docker.com/_/httpd/) For this example we use the official Apache docker image available at [Docker Hub](https://hub.docker.com/_/httpd/)
* Create a file `httpd.conf` with the following contents * Create a file `httpd.conf` with the following contents
...@@ -244,4 +244,4 @@ ProxyPassReverse / http://grafana:3000/ ...@@ -244,4 +244,4 @@ ProxyPassReverse / http://grafana:3000/
### Use grafana. ### Use grafana.
With our Grafana and Apache containers running, you can now connect to http://localhost/ and log in using the username/password we created in the htpasswd file. With our Grafana and Apache containers running, you can now connect to http://localhost/ and log in using the username/password we created in the htpasswd file.
\ No newline at end of file
...@@ -75,7 +75,7 @@ func GetTestDataScenarios(c *m.ReqContext) Response { ...@@ -75,7 +75,7 @@ func GetTestDataScenarios(c *m.ReqContext) Response {
return JSON(200, &result) return JSON(200, &result)
} }
// Genereates a index out of range error // Generates a index out of range error
func GenerateError(c *m.ReqContext) Response { func GenerateError(c *m.ReqContext) Response {
var array []string var array []string
return JSON(200, array[20]) return JSON(200, array[20])
......
...@@ -48,7 +48,7 @@ type StaticOptions struct { ...@@ -48,7 +48,7 @@ type StaticOptions struct {
// Expires defines which user-defined function to use for producing a HTTP Expires Header // Expires defines which user-defined function to use for producing a HTTP Expires Header
// https://developers.google.com/speed/docs/insights/LeverageBrowserCaching // https://developers.google.com/speed/docs/insights/LeverageBrowserCaching
AddHeaders func(ctx *macaron.Context) AddHeaders func(ctx *macaron.Context)
// FileSystem is the interface for supporting any implmentation of file system. // FileSystem is the interface for supporting any implementation of file system.
FileSystem http.FileSystem FileSystem http.FileSystem
} }
......
...@@ -22,7 +22,7 @@ const ( ...@@ -22,7 +22,7 @@ const (
) )
var ( var (
// changeTypeToSymbol is used for populating the terminating characer in // changeTypeToSymbol is used for populating the terminating character in
// the diff // the diff
changeTypeToSymbol = map[ChangeType]string{ changeTypeToSymbol = map[ChangeType]string{
ChangeNil: "", ChangeNil: "",
......
...@@ -76,10 +76,10 @@ func TestFirst(t *testing.T) { ...@@ -76,10 +76,10 @@ func TestFirst(t *testing.T) {
assert.True(s == "fallback", "must get string return fallback") assert.True(s == "fallback", "must get string return fallback")
s, err = j.GetString("name") s, err = j.GetString("name")
assert.True(s == "anton" && err == nil, "name shoud match") assert.True(s == "anton" && err == nil, "name should match")
s, err = j.GetString("address", "street") s, err = j.GetString("address", "street")
assert.True(s == "Street 42" && err == nil, "street shoud match") assert.True(s == "Street 42" && err == nil, "street should match")
//log.Println("s: ", s.String()) //log.Println("s: ", s.String())
_, err = j.GetNumber("age") _, err = j.GetNumber("age")
......
...@@ -35,7 +35,7 @@ var ( ...@@ -35,7 +35,7 @@ var (
slash = []byte("/") slash = []byte("/")
) )
// stack returns a nicely formated stack frame, skipping skip frames // stack returns a nicely formatted stack frame, skipping skip frames
func stack(skip int) []byte { func stack(skip int) []byte {
buf := new(bytes.Buffer) // the returned data buf := new(bytes.Buffer) // the returned data
// As we loop, we open files and read them. These variables record the currently // As we loop, we open files and read them. These variables record the currently
......
...@@ -157,7 +157,7 @@ func NewDashboardFromJson(data *simplejson.Json) *Dashboard { ...@@ -157,7 +157,7 @@ func NewDashboardFromJson(data *simplejson.Json) *Dashboard {
return dash return dash
} }
// GetDashboardModel turns the command into the savable model // GetDashboardModel turns the command into the saveable model
func (cmd *SaveDashboardCommand) GetDashboardModel() *Dashboard { func (cmd *SaveDashboardCommand) GetDashboardModel() *Dashboard {
dash := NewDashboardFromJson(cmd.Dashboard) dash := NewDashboardFromJson(cmd.Dashboard)
userId := cmd.UserId userId := cmd.UserId
......
...@@ -32,7 +32,7 @@ type Folder struct { ...@@ -32,7 +32,7 @@ type Folder struct {
HasAcl bool HasAcl bool
} }
// GetDashboardModel turns the command into the savable model // GetDashboardModel turns the command into the saveable model
func (cmd *CreateFolderCommand) GetDashboardModel(orgId int64, userId int64) *Dashboard { func (cmd *CreateFolderCommand) GetDashboardModel(orgId int64, userId int64) *Dashboard {
dashFolder := NewDashboardFolder(strings.TrimSpace(cmd.Title)) dashFolder := NewDashboardFolder(strings.TrimSpace(cmd.Title))
dashFolder.OrgId = orgId dashFolder.OrgId = orgId
......
...@@ -12,7 +12,7 @@ import ( ...@@ -12,7 +12,7 @@ import (
func TestPluginScans(t *testing.T) { func TestPluginScans(t *testing.T) {
Convey("When scaning for plugins", t, func() { Convey("When scanning for plugins", t, func() {
setting.StaticRootPath, _ = filepath.Abs("../../public/") setting.StaticRootPath, _ = filepath.Abs("../../public/")
setting.Cfg = ini.Empty() setting.Cfg = ini.Empty()
err := initPlugins(context.Background()) err := initPlugins(context.Background())
......
...@@ -37,7 +37,7 @@ func GetPluginSettings(orgId int64) (map[string]*m.PluginSettingInfoDTO, error) ...@@ -37,7 +37,7 @@ func GetPluginSettings(orgId int64) (map[string]*m.PluginSettingInfoDTO, error)
// if it's included in app check app settings // if it's included in app check app settings
if pluginDef.IncludedInAppId != "" { if pluginDef.IncludedInAppId != "" {
// app componets are by default disabled // app components are by default disabled
opt.Enabled = false opt.Enabled = false
if appSettings, ok := pluginMap[pluginDef.IncludedInAppId]; ok { if appSettings, ok := pluginMap[pluginDef.IncludedInAppId]; ok {
......
...@@ -10,7 +10,7 @@ import ( ...@@ -10,7 +10,7 @@ import (
) )
type FakeEvalHandler struct { type FakeEvalHandler struct {
SuccessCallID int // 0 means never sucess SuccessCallID int // 0 means never success
CallNb int CallNb int
} }
...@@ -87,7 +87,7 @@ func TestEngineProcessJob(t *testing.T) { ...@@ -87,7 +87,7 @@ func TestEngineProcessJob(t *testing.T) {
Convey("Should trigger as many retries as needed", func() { Convey("Should trigger as many retries as needed", func() {
Convey("never sucess -> max retries number", func() { Convey("never success -> max retries number", func() {
expectedAttempts := alertMaxAttempts expectedAttempts := alertMaxAttempts
evalHandler := NewFakeEvalHandler(0) evalHandler := NewFakeEvalHandler(0)
engine.evalHandler = evalHandler engine.evalHandler = evalHandler
...@@ -96,7 +96,7 @@ func TestEngineProcessJob(t *testing.T) { ...@@ -96,7 +96,7 @@ func TestEngineProcessJob(t *testing.T) {
So(evalHandler.CallNb, ShouldEqual, expectedAttempts) So(evalHandler.CallNb, ShouldEqual, expectedAttempts)
}) })
Convey("always sucess -> never retry", func() { Convey("always success -> never retry", func() {
expectedAttempts := 1 expectedAttempts := 1
evalHandler := NewFakeEvalHandler(1) evalHandler := NewFakeEvalHandler(1)
engine.evalHandler = evalHandler engine.evalHandler = evalHandler
...@@ -105,7 +105,7 @@ func TestEngineProcessJob(t *testing.T) { ...@@ -105,7 +105,7 @@ func TestEngineProcessJob(t *testing.T) {
So(evalHandler.CallNb, ShouldEqual, expectedAttempts) So(evalHandler.CallNb, ShouldEqual, expectedAttempts)
}) })
Convey("some errors before sucess -> some retries", func() { Convey("some errors before success -> some retries", func() {
expectedAttempts := int(math.Ceil(float64(alertMaxAttempts) / 2)) expectedAttempts := int(math.Ceil(float64(alertMaxAttempts) / 2))
evalHandler := NewFakeEvalHandler(expectedAttempts) evalHandler := NewFakeEvalHandler(expectedAttempts)
engine.evalHandler = evalHandler engine.evalHandler = evalHandler
......
...@@ -111,7 +111,7 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { ...@@ -111,7 +111,7 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error {
} }
message := "" message := ""
if evalContext.Rule.State != models.AlertStateOK { //dont add message when going back to alert state ok. if evalContext.Rule.State != models.AlertStateOK { //don't add message when going back to alert state ok.
message += " " + evalContext.Rule.Message message += " " + evalContext.Rule.Message
} }
......
...@@ -129,7 +129,7 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { ...@@ -129,7 +129,7 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error {
} }
message := this.Mention message := this.Mention
if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok.
message += " " + evalContext.Rule.Message message += " " + evalContext.Rule.Message
} }
image_url := "" image_url := ""
......
...@@ -13,7 +13,7 @@ func init() { ...@@ -13,7 +13,7 @@ func init() {
alerting.RegisterNotifier(&alerting.NotifierPlugin{ alerting.RegisterNotifier(&alerting.NotifierPlugin{
Type: "teams", Type: "teams",
Name: "Microsoft Teams", Name: "Microsoft Teams",
Description: "Sends notifications using Incomming Webhook connector to Microsoft Teams", Description: "Sends notifications using Incoming Webhook connector to Microsoft Teams",
Factory: NewTeamsNotifier, Factory: NewTeamsNotifier,
OptionsTemplate: ` OptionsTemplate: `
<h3 class="page-heading">Teams settings</h3> <h3 class="page-heading">Teams settings</h3>
...@@ -76,7 +76,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { ...@@ -76,7 +76,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error {
} }
message := this.Mention message := this.Mention
if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok.
message += " " + evalContext.Rule.Message message += " " + evalContext.Rule.Message
} else { } else {
message += " " // summary must not be empty message += " " // summary must not be empty
......
...@@ -100,7 +100,7 @@ func TestTelegramNotifier(t *testing.T) { ...@@ -100,7 +100,7 @@ func TestTelegramNotifier(t *testing.T) {
So(caption, ShouldContainSubstring, "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise ") So(caption, ShouldContainSubstring, "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise ")
}) })
Convey("Metrics should be skipped if they dont fit", func() { Convey("Metrics should be skipped if they don't fit", func() {
evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ evalContext := alerting.NewEvalContext(nil, &alerting.Rule{
Name: "This is an alarm", Name: "This is an alarm",
Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ",
......
...@@ -56,7 +56,7 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { ...@@ -56,7 +56,7 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error {
if err := bus.Dispatch(cmd); err != nil { if err := bus.Dispatch(cmd); err != nil {
if err == m.ErrCannotChangeStateOnPausedAlert { if err == m.ErrCannotChangeStateOnPausedAlert {
handler.log.Error("Cannot change state on alert thats pause", "error", err) handler.log.Error("Cannot change state on alert that's paused", "error", err)
return err return err
} }
......
...@@ -58,7 +58,7 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { ...@@ -58,7 +58,7 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) {
if job.OffsetWait && now%job.Offset == 0 { if job.OffsetWait && now%job.Offset == 0 {
job.OffsetWait = false job.OffsetWait = false
s.enque(job, execQueue) s.enqueue(job, execQueue)
continue continue
} }
...@@ -66,13 +66,13 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { ...@@ -66,13 +66,13 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) {
if job.Offset > 0 { if job.Offset > 0 {
job.OffsetWait = true job.OffsetWait = true
} else { } else {
s.enque(job, execQueue) s.enqueue(job, execQueue)
} }
} }
} }
} }
func (s *SchedulerImpl) enque(job *Job, execQueue chan *Job) { func (s *SchedulerImpl) enqueue(job *Job, execQueue chan *Job) {
s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.Id) s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.Id)
execQueue <- job execQueue <- job
} }
...@@ -113,7 +113,7 @@ func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.D ...@@ -113,7 +113,7 @@ func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.D
return false, err return false, err
} }
// evalute team rules // evaluate team rules
for _, p := range acl { for _, p := range acl {
for _, ug := range teams { for _, ug := range teams {
if ug.Id == p.TeamId && p.Permission >= permission { if ug.Id == p.TeamId && p.Permission >= permission {
......
...@@ -58,7 +58,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { ...@@ -58,7 +58,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) {
files, err := ioutil.ReadDir(cr.path) files, err := ioutil.ReadDir(cr.path)
if err != nil { if err != nil {
cr.log.Error("cant read dashboard provisioning files from directory", "path", cr.path) cr.log.Error("can't read dashboard provisioning files from directory", "path", cr.path)
return dashboards, nil return dashboards, nil
} }
......
...@@ -19,7 +19,7 @@ func (cr *configReader) readConfig(path string) ([]*DatasourcesAsConfig, error) ...@@ -19,7 +19,7 @@ func (cr *configReader) readConfig(path string) ([]*DatasourcesAsConfig, error)
files, err := ioutil.ReadDir(path) files, err := ioutil.ReadDir(path)
if err != nil { if err != nil {
cr.log.Error("cant read datasource provisioning files from directory", "path", path) cr.log.Error("can't read datasource provisioning files from directory", "path", path)
return datasources, nil return datasources, nil
} }
......
...@@ -21,7 +21,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { ...@@ -21,7 +21,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
} }
err := GetAlertNotifications(cmd) err := GetAlertNotifications(cmd)
fmt.Printf("errror %v", err) fmt.Printf("error %v", err)
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(cmd.Result, ShouldBeNil) So(cmd.Result, ShouldBeNil)
}) })
......
...@@ -35,7 +35,7 @@ func TestOpenTsdbExecutor(t *testing.T) { ...@@ -35,7 +35,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
}) })
Convey("Build metric with downsampling diabled", func() { Convey("Build metric with downsampling disabled", func() {
query := &tsdb.Query{ query := &tsdb.Query{
Model: simplejson.New(), Model: simplejson.New(),
......
...@@ -79,7 +79,7 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, ...@@ -79,7 +79,7 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
} }
return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil
case "__timeFilter": case "__timeFilter":
// dont use to_timestamp in this macro for redshift compatibility #9566 // don't use to_timestamp in this macro for redshift compatibility #9566
if len(args) == 0 { if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name) return "", fmt.Errorf("missing time column argument for macro %v", name)
} }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
// Licence MIT, Copyright (c) 2015 Mohsen Azimi // Licence MIT, Copyright (c) 2015 Mohsen Azimi
/* /*
* Escapes `"` charachters from string * Escapes `"` characters from string
*/ */
function escapeString(str: string): string { function escapeString(str: string): string {
return str.replace('"', '"'); return str.replace('"', '"');
...@@ -100,7 +100,7 @@ export function cssClass(className: string): string { ...@@ -100,7 +100,7 @@ export function cssClass(className: string): string {
} }
/* /*
* Creates a new DOM element wiht given type and class * Creates a new DOM element with given type and class
* TODO: move me to helpers * TODO: move me to helpers
*/ */
export function createElement(type: string, className?: string, content?: Element | string): Element { export function createElement(type: string, className?: string, content?: Element | string): Element {
......
...@@ -146,7 +146,7 @@ export class JsonExplorer { ...@@ -146,7 +146,7 @@ export class JsonExplorer {
} }
/* /*
* did we recieve a key argument? * did we receive a key argument?
* This means that the formatter was called as a sub formatter of a parent formatter * This means that the formatter was called as a sub formatter of a parent formatter
*/ */
private get hasKey(): boolean { private get hasKey(): boolean {
......
...@@ -108,7 +108,7 @@ function (_, $, coreModule) { ...@@ -108,7 +108,7 @@ function (_, $, coreModule) {
$input.val(''); $input.val('');
$button.show(); $button.show();
$button.focus(); $button.focus();
// clicking the function dropdown menu wont // clicking the function dropdown menu won't
// work if you remove class at once // work if you remove class at once
setTimeout(function() { setTimeout(function() {
elem.removeClass('open'); elem.removeClass('open');
...@@ -222,7 +222,7 @@ function (_, $, coreModule) { ...@@ -222,7 +222,7 @@ function (_, $, coreModule) {
$input.val(''); $input.val('');
$button.show(); $button.show();
$button.focus(); $button.focus();
// clicking the function dropdown menu wont // clicking the function dropdown menu won't
// work if you remove class at once // work if you remove class at once
setTimeout(function() { setTimeout(function() {
elem.removeClass('open'); elem.removeClass('open');
......
...@@ -67,7 +67,7 @@ export function fetch(load): any { ...@@ -67,7 +67,7 @@ export function fetch(load): any {
return ''; return '';
} }
// dont reload styles loaded in the head // don't reload styles loaded in the head
for (var i = 0; i < linkHrefs.length; i++) { for (var i = 0; i < linkHrefs.length; i++) {
if (load.address === linkHrefs[i]) { if (load.address === linkHrefs[i]) {
return ''; return '';
......
...@@ -620,13 +620,13 @@ kbn.valueFormats.ms = function(size, decimals, scaledDecimals) { ...@@ -620,13 +620,13 @@ kbn.valueFormats.ms = function(size, decimals, scaledDecimals) {
// Less than 1 min // Less than 1 min
return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, ' s'); return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, ' s');
} else if (Math.abs(size) < 3600000) { } else if (Math.abs(size) < 3600000) {
// Less than 1 hour, devide in minutes // Less than 1 hour, divide in minutes
return kbn.toFixedScaled(size / 60000, decimals, scaledDecimals, 5, ' min'); return kbn.toFixedScaled(size / 60000, decimals, scaledDecimals, 5, ' min');
} else if (Math.abs(size) < 86400000) { } else if (Math.abs(size) < 86400000) {
// Less than one day, devide in hours // Less than one day, divide in hours
return kbn.toFixedScaled(size / 3600000, decimals, scaledDecimals, 7, ' hour'); return kbn.toFixedScaled(size / 3600000, decimals, scaledDecimals, 7, ' hour');
} else if (Math.abs(size) < 31536000000) { } else if (Math.abs(size) < 31536000000) {
// Less than one year, devide in days // Less than one year, divide in days
return kbn.toFixedScaled(size / 86400000, decimals, scaledDecimals, 8, ' day'); return kbn.toFixedScaled(size / 86400000, decimals, scaledDecimals, 8, ' day');
} }
...@@ -638,15 +638,15 @@ kbn.valueFormats.s = function(size, decimals, scaledDecimals) { ...@@ -638,15 +638,15 @@ kbn.valueFormats.s = function(size, decimals, scaledDecimals) {
return ''; return '';
} }
// Less than 1 µs, devide in ns // Less than 1 µs, divide in ns
if (Math.abs(size) < 0.000001) { if (Math.abs(size) < 0.000001) {
return kbn.toFixedScaled(size * 1e9, decimals, scaledDecimals - decimals, -9, ' ns'); return kbn.toFixedScaled(size * 1e9, decimals, scaledDecimals - decimals, -9, ' ns');
} }
// Less than 1 ms, devide in µs // Less than 1 ms, divide in µs
if (Math.abs(size) < 0.001) { if (Math.abs(size) < 0.001) {
return kbn.toFixedScaled(size * 1e6, decimals, scaledDecimals - decimals, -6, ' µs'); return kbn.toFixedScaled(size * 1e6, decimals, scaledDecimals - decimals, -6, ' µs');
} }
// Less than 1 second, devide in ms // Less than 1 second, divide in ms
if (Math.abs(size) < 1) { if (Math.abs(size) < 1) {
return kbn.toFixedScaled(size * 1e3, decimals, scaledDecimals - decimals, -3, ' ms'); return kbn.toFixedScaled(size * 1e3, decimals, scaledDecimals - decimals, -3, ' ms');
} }
...@@ -654,16 +654,16 @@ kbn.valueFormats.s = function(size, decimals, scaledDecimals) { ...@@ -654,16 +654,16 @@ kbn.valueFormats.s = function(size, decimals, scaledDecimals) {
if (Math.abs(size) < 60) { if (Math.abs(size) < 60) {
return kbn.toFixed(size, decimals) + ' s'; return kbn.toFixed(size, decimals) + ' s';
} else if (Math.abs(size) < 3600) { } else if (Math.abs(size) < 3600) {
// Less than 1 hour, devide in minutes // Less than 1 hour, divide in minutes
return kbn.toFixedScaled(size / 60, decimals, scaledDecimals, 1, ' min'); return kbn.toFixedScaled(size / 60, decimals, scaledDecimals, 1, ' min');
} else if (Math.abs(size) < 86400) { } else if (Math.abs(size) < 86400) {
// Less than one day, devide in hours // Less than one day, divide in hours
return kbn.toFixedScaled(size / 3600, decimals, scaledDecimals, 4, ' hour'); return kbn.toFixedScaled(size / 3600, decimals, scaledDecimals, 4, ' hour');
} else if (Math.abs(size) < 604800) { } else if (Math.abs(size) < 604800) {
// Less than one week, devide in days // Less than one week, divide in days
return kbn.toFixedScaled(size / 86400, decimals, scaledDecimals, 5, ' day'); return kbn.toFixedScaled(size / 86400, decimals, scaledDecimals, 5, ' day');
} else if (Math.abs(size) < 31536000) { } else if (Math.abs(size) < 31536000) {
// Less than one year, devide in week // Less than one year, divide in week
return kbn.toFixedScaled(size / 604800, decimals, scaledDecimals, 6, ' week'); return kbn.toFixedScaled(size / 604800, decimals, scaledDecimals, 6, ' week');
} }
......
...@@ -124,7 +124,7 @@ function joinEvalMatches(matches, separator: string) { ...@@ -124,7 +124,7 @@ function joinEvalMatches(matches, separator: string) {
} }
function getAlertAnnotationInfo(ah) { function getAlertAnnotationInfo(ah) {
// backward compatability, can be removed in grafana 5.x // backward compatibility, can be removed in grafana 5.x
// old way stored evalMatches in data property directly, // old way stored evalMatches in data property directly,
// new way stores it in evalMatches property on new data object // new way stores it in evalMatches property on new data object
......
...@@ -4,7 +4,7 @@ import { ThresholdMapper } from '../threshold_mapper'; ...@@ -4,7 +4,7 @@ import { ThresholdMapper } from '../threshold_mapper';
describe('ThresholdMapper', () => { describe('ThresholdMapper', () => {
describe('with greater than evaluator', () => { describe('with greater than evaluator', () => {
it('can mapp query conditions to thresholds', () => { it('can map query conditions to thresholds', () => {
var panel: any = { var panel: any = {
type: 'graph', type: 'graph',
alert: { alert: {
...@@ -25,7 +25,7 @@ describe('ThresholdMapper', () => { ...@@ -25,7 +25,7 @@ describe('ThresholdMapper', () => {
}); });
describe('with outside range evaluator', () => { describe('with outside range evaluator', () => {
it('can mapp query conditions to thresholds', () => { it('can map query conditions to thresholds', () => {
var panel: any = { var panel: any = {
type: 'graph', type: 'graph',
alert: { alert: {
...@@ -49,7 +49,7 @@ describe('ThresholdMapper', () => { ...@@ -49,7 +49,7 @@ describe('ThresholdMapper', () => {
}); });
describe('with inside range evaluator', () => { describe('with inside range evaluator', () => {
it('can mapp query conditions to thresholds', () => { it('can map query conditions to thresholds', () => {
var panel: any = { var panel: any = {
type: 'graph', type: 'graph',
alert: { alert: {
......
...@@ -56,7 +56,7 @@ function isStartOfRegion(event): boolean { ...@@ -56,7 +56,7 @@ function isStartOfRegion(event): boolean {
export function dedupAnnotations(annotations) { export function dedupAnnotations(annotations) {
let dedup = []; let dedup = [];
// Split events by annotationId property existance // Split events by annotationId property existence
let events = _.partition(annotations, 'id'); let events = _.partition(annotations, 'id');
let eventsById = _.groupBy(events[0], 'id'); let eventsById = _.groupBy(events[0], 'id');
......
...@@ -129,7 +129,7 @@ export class DashboardModel { ...@@ -129,7 +129,7 @@ export class DashboardModel {
this.meta = meta; this.meta = meta;
} }
// cleans meta data and other non peristent state // cleans meta data and other non persistent state
getSaveModelClone() { getSaveModelClone() {
// make clone // make clone
var copy: any = {}; var copy: any = {};
...@@ -606,7 +606,7 @@ export class DashboardModel { ...@@ -606,7 +606,7 @@ export class DashboardModel {
if (panel.gridPos.x + panel.gridPos.w * 2 <= GRID_COLUMN_COUNT) { if (panel.gridPos.x + panel.gridPos.w * 2 <= GRID_COLUMN_COUNT) {
newPanel.gridPos.x += panel.gridPos.w; newPanel.gridPos.x += panel.gridPos.w;
} else { } else {
// add bellow // add below
newPanel.gridPos.y += panel.gridPos.h; newPanel.gridPos.y += panel.gridPos.h;
} }
......
...@@ -133,7 +133,7 @@ export class HistoryListCtrl { ...@@ -133,7 +133,7 @@ export class HistoryListCtrl {
return this.historySrv return this.historySrv
.getHistoryList(this.dashboard, options) .getHistoryList(this.dashboard, options)
.then(revisions => { .then(revisions => {
// set formated dates & default values // set formatted dates & default values
for (let rev of revisions) { for (let rev of revisions) {
rev.createdDateString = this.formatDate(rev.created); rev.createdDateString = this.formatDate(rev.created);
rev.ageString = this.formatBasicDate(rev.created); rev.ageString = this.formatBasicDate(rev.created);
......
...@@ -56,7 +56,7 @@ describe('DashboardImportCtrl', function() { ...@@ -56,7 +56,7 @@ describe('DashboardImportCtrl', function() {
}); });
}); });
describe('when specifing grafana.com url', function() { describe('when specifying grafana.com url', function() {
beforeEach(function() { beforeEach(function() {
ctx.ctrl.gnetUrl = 'http://grafana.com/dashboards/123'; ctx.ctrl.gnetUrl = 'http://grafana.com/dashboards/123';
// setup api mock // setup api mock
...@@ -73,7 +73,7 @@ describe('DashboardImportCtrl', function() { ...@@ -73,7 +73,7 @@ describe('DashboardImportCtrl', function() {
}); });
}); });
describe('when specifing dashbord id', function() { describe('when specifying dashboard id', function() {
beforeEach(function() { beforeEach(function() {
ctx.ctrl.gnetUrl = '2342'; ctx.ctrl.gnetUrl = '2342';
// setup api mock // setup api mock
......
...@@ -44,7 +44,7 @@ describe('timeSrv', function() { ...@@ -44,7 +44,7 @@ describe('timeSrv', function() {
expect(time.raw.to).to.be('now'); expect(time.raw.to).to.be('now');
}); });
it('should handle formated dates', function() { it('should handle formatted dates', function() {
ctx.$location.search({ from: '20140410T052010', to: '20140520T031022' }); ctx.$location.search({ from: '20140410T052010', to: '20140520T031022' });
ctx.service.init(_dashboard); ctx.service.init(_dashboard);
var time = ctx.service.timeRange(true); var time = ctx.service.timeRange(true);
...@@ -52,7 +52,7 @@ describe('timeSrv', function() { ...@@ -52,7 +52,7 @@ describe('timeSrv', function() {
expect(time.to.valueOf()).to.equal(new Date('2014-05-20T03:10:22Z').getTime()); expect(time.to.valueOf()).to.equal(new Date('2014-05-20T03:10:22Z').getTime());
}); });
it('should handle formated dates without time', function() { it('should handle formatted dates without time', function() {
ctx.$location.search({ from: '20140410', to: '20140520' }); ctx.$location.search({ from: '20140410', to: '20140520' });
ctx.service.init(_dashboard); ctx.service.init(_dashboard);
var time = ctx.service.timeRange(true); var time = ctx.service.timeRange(true);
......
...@@ -38,7 +38,7 @@ export class DashboardViewState { ...@@ -38,7 +38,7 @@ export class DashboardViewState {
}); });
// this marks changes to location during this digest cycle as not to add history item // this marks changes to location during this digest cycle as not to add history item
// dont want url changes like adding orgId to add browser history // don't want url changes like adding orgId to add browser history
$location.replace(); $location.replace();
this.update(this.getQueryStringState()); this.update(this.getQueryStringState());
} }
...@@ -196,7 +196,7 @@ export class DashboardViewState { ...@@ -196,7 +196,7 @@ export class DashboardViewState {
this.oldTimeRange = ctrl.range; this.oldTimeRange = ctrl.range;
this.fullscreenPanel = panelScope; this.fullscreenPanel = panelScope;
// Firefox doesn't return scrollTop postion properly if 'dash-scroll' is emitted after setViewMode() // Firefox doesn't return scrollTop position properly if 'dash-scroll' is emitted after setViewMode()
this.$scope.appEvent('dash-scroll', { animate: false, pos: 0 }); this.$scope.appEvent('dash-scroll', { animate: false, pos: 0 });
this.dashboard.setViewMode(ctrl.panel, true, ctrl.editMode); this.dashboard.setViewMode(ctrl.panel, true, ctrl.editMode);
this.$scope.appEvent('panel-fullscreen-enter', { panelId: ctrl.panel.id }); this.$scope.appEvent('panel-fullscreen-enter', { panelId: ctrl.panel.id });
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
New Organization New Organization
</h2> </h2>
<p class="playlist-description">Each organization contains their own dashboards, data sources and configuration, and cannot be shared between orgs. While users may belong to more than one, mutiple organization are most frequently used in multi-tenant deployments. </p> <p class="playlist-description">Each organization contains their own dashboards, data sources and configuration, and cannot be shared between orgs. While users may belong to more than one, multiple organization are most frequently used in multi-tenant deployments. </p>
<form> <form>
<div class="gf-form-group"> <div class="gf-form-group">
......
...@@ -73,7 +73,7 @@ class MetricsPanelCtrl extends PanelCtrl { ...@@ -73,7 +73,7 @@ class MetricsPanelCtrl extends PanelCtrl {
if (this.panel.snapshotData) { if (this.panel.snapshotData) {
this.updateTimeRange(); this.updateTimeRange();
var data = this.panel.snapshotData; var data = this.panel.snapshotData;
// backward compatability // backward compatibility
if (!_.isArray(data)) { if (!_.isArray(data)) {
data = data.data; data = data.data;
} }
......
...@@ -29,7 +29,7 @@ export class DatasourceVariable implements Variable { ...@@ -29,7 +29,7 @@ export class DatasourceVariable implements Variable {
getSaveModel() { getSaveModel() {
assignModelProperties(this.model, this, this.defaults); assignModelProperties(this.model, this, this.defaults);
// dont persist options // don't persist options
this.model.options = []; this.model.options = [];
return this.model; return this.model;
} }
......
...@@ -2,7 +2,7 @@ import { AdhocVariable } from '../adhoc_variable'; ...@@ -2,7 +2,7 @@ import { AdhocVariable } from '../adhoc_variable';
describe('AdhocVariable', function() { describe('AdhocVariable', function() {
describe('when serializing to url', function() { describe('when serializing to url', function() {
it('should set return key value and op seperated by pipe', function() { it('should set return key value and op separated by pipe', function() {
var variable = new AdhocVariable({ var variable = new AdhocVariable({
filters: [ filters: [
{ key: 'key1', operator: '=', value: 'value1' }, { key: 'key1', operator: '=', value: 'value1' },
......
...@@ -282,7 +282,7 @@ describe('templateSrv', function() { ...@@ -282,7 +282,7 @@ describe('templateSrv', function() {
}); });
}); });
describe('can hightlight variables in string', function() { describe('can highlight variables in string', function() {
beforeEach(function() { beforeEach(function() {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]); initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]);
}); });
......
...@@ -204,7 +204,7 @@ export class TemplateSrv { ...@@ -204,7 +204,7 @@ export class TemplateSrv {
value = variable.current.value; value = variable.current.value;
if (this.isAllValue(value)) { if (this.isAllValue(value)) {
value = this.getAllValue(variable); value = this.getAllValue(variable);
// skip formating of custom all values // skip formatting of custom all values
if (variable.allValue) { if (variable.allValue) {
return value; return value;
} }
......
...@@ -392,7 +392,7 @@ ...@@ -392,7 +392,7 @@
"thresholds": [], "thresholds": [],
"timeFrom": null, "timeFrom": null,
"timeShift": null, "timeShift": null,
"title": "2 yaxis and axis lables", "title": "2 yaxis and axis labels",
"tooltip": { "tooltip": {
"msResolution": false, "msResolution": false,
"shared": true, "shared": true,
...@@ -894,7 +894,7 @@ ...@@ -894,7 +894,7 @@
"thresholds": [], "thresholds": [],
"timeFrom": null, "timeFrom": null,
"timeShift": null, "timeShift": null,
"title": "Legend Table Single Series Should Take Minium Height", "title": "Legend Table Single Series Should Take Minimum Height",
"tooltip": { "tooltip": {
"shared": true, "shared": true,
"sort": 0, "sort": 0,
......
...@@ -175,7 +175,7 @@ export class ElasticResponse { ...@@ -175,7 +175,7 @@ export class ElasticResponse {
} }
// This is quite complex // This is quite complex
// neeed to recurise down the nested buckets to build series // need to recurise down the nested buckets to build series
processBuckets(aggs, target, seriesList, table, props, depth) { processBuckets(aggs, target, seriesList, table, props, depth) {
var bucket, aggDef, esAgg, aggId; var bucket, aggDef, esAgg, aggId;
var maxDepth = target.bucketAggs.length - 1; var maxDepth = target.bucketAggs.length - 1;
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
<input type="text" class="gf-form-input max-width-10" ng-model='ctrl.annotation.tagsField' placeholder="tags"></input> <input type="text" class="gf-form-input max-width-10" ng-model='ctrl.annotation.tagsField' placeholder="tags"></input>
</div> </div>
<div class="gf-form" ng-show="ctrl.annotation.titleField"> <div class="gf-form" ng-show="ctrl.annotation.titleField">
<span class="gf-form-label">Title <em class="muted">(depricated)</em></span> <span class="gf-form-label">Title <em class="muted">(deprecated)</em></span>
<input type="text" class="gf-form-input max-width-16" ng-model='ctrl.annotation.titleField' placeholder="desc"></input> <input type="text" class="gf-form-input max-width-16" ng-model='ctrl.annotation.titleField' placeholder="desc"></input>
</div> </div>
</div> </div>
......
...@@ -53,7 +53,7 @@ describe('ElasticDatasource', function() { ...@@ -53,7 +53,7 @@ describe('ElasticDatasource', function() {
}); });
}); });
describe('When issueing metric query with interval pattern', function() { describe('When issuing metric query with interval pattern', function() {
var requestOptions, parts, header; var requestOptions, parts, header;
beforeEach(function() { beforeEach(function() {
...@@ -98,7 +98,7 @@ describe('ElasticDatasource', function() { ...@@ -98,7 +98,7 @@ describe('ElasticDatasource', function() {
}); });
}); });
describe('When issueing document query', function() { describe('When issuing document query', function() {
var requestOptions, parts, header; var requestOptions, parts, header;
beforeEach(function() { beforeEach(function() {
......
...@@ -68,7 +68,7 @@ export function graphiteAddFunc($compile) { ...@@ -68,7 +68,7 @@ export function graphiteAddFunc($compile) {
}); });
$input.blur(function() { $input.blur(function() {
// clicking the function dropdown menu wont // clicking the function dropdown menu won't
// work if you remove class at once // work if you remove class at once
setTimeout(function() { setTimeout(function() {
$input.val(''); $input.val('');
......
...@@ -97,7 +97,7 @@ describe('GraphiteQueryCtrl', function() { ...@@ -97,7 +97,7 @@ describe('GraphiteQueryCtrl', function() {
}); });
}); });
describe('when initalizing target without metric expression and only function', function() { describe('when initializing target without metric expression and only function', function() {
beforeEach(function() { beforeEach(function() {
ctx.ctrl.target.target = 'asPercent(#A, #B)'; ctx.ctrl.target.target = 'asPercent(#A, #B)';
ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
...@@ -130,7 +130,7 @@ describe('GraphiteQueryCtrl', function() { ...@@ -130,7 +130,7 @@ describe('GraphiteQueryCtrl', function() {
}); });
}); });
describe('when initalizing target without metric expression and function with series-ref', function() { describe('when initializing target without metric expression and function with series-ref', function() {
beforeEach(function() { beforeEach(function() {
ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)';
ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
...@@ -146,7 +146,7 @@ describe('GraphiteQueryCtrl', function() { ...@@ -146,7 +146,7 @@ describe('GraphiteQueryCtrl', function() {
}); });
}); });
describe('when getting altSegments and metricFindQuery retuns empty array', function() { describe('when getting altSegments and metricFindQuery returns empty array', function() {
beforeEach(function() { beforeEach(function() {
ctx.ctrl.target.target = 'test.count'; ctx.ctrl.target.target = 'test.count';
ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([]));
......
...@@ -54,7 +54,7 @@ export default class InfluxDatasource { ...@@ -54,7 +54,7 @@ export default class InfluxDatasource {
queryTargets.push(target); queryTargets.push(target);
// backward compatability // backward compatibility
scopedVars.interval = scopedVars.__interval; scopedVars.interval = scopedVars.__interval;
queryModel = new InfluxQuery(target, this.templateSrv, scopedVars); queryModel = new InfluxQuery(target, this.templateSrv, scopedVars);
......
...@@ -230,7 +230,7 @@ export default class InfluxQuery { ...@@ -230,7 +230,7 @@ export default class InfluxQuery {
for (i = 0; i < this.groupByParts.length; i++) { for (i = 0; i < this.groupByParts.length; i++) {
var part = this.groupByParts[i]; var part = this.groupByParts[i];
if (i > 0) { if (i > 0) {
// for some reason fill has no seperator // for some reason fill has no separator
groupBySection += part.def.type === 'fill' ? ' ' : ', '; groupBySection += part.def.type === 'fill' ? ' ' : ', ';
} }
groupBySection += part.render(''); groupBySection += part.render('');
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
<input type="text" class="gf-form-input max-width-10" ng-model='ctrl.annotation.tagsColumn' placeholder=""></input> <input type="text" class="gf-form-input max-width-10" ng-model='ctrl.annotation.tagsColumn' placeholder=""></input>
</div> </div>
<div class="gf-form" ng-show="ctrl.annotation.titleColumn"> <div class="gf-form" ng-show="ctrl.annotation.titleColumn">
<span class="gf-form-label width-4">Title <em class="muted">(depricated)</em></span> <span class="gf-form-label width-4">Title <em class="muted">(deprecated)</em></span>
<input type="text" class="gf-form-input max-width-10" ng-model='ctrl.annotation.titleColumn' placeholder=""></input> <input type="text" class="gf-form-input max-width-10" ng-model='ctrl.annotation.titleColumn' placeholder=""></input>
</div> </div>
</div> </div>
......
...@@ -97,7 +97,7 @@ describe('InfluxQueryBuilder', function() { ...@@ -97,7 +97,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG VALUES FROM "one_week"."cpu" WITH KEY = "app" WHERE "host" = \'server1\''); expect(query).toBe('SHOW TAG VALUES FROM "one_week"."cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
}); });
it('should not includ policy when policy is default', function() { it('should not include policy when policy is default', function() {
var builder = new InfluxQueryBuilder({ var builder = new InfluxQueryBuilder({
measurement: 'cpu', measurement: 'cpu',
policy: 'default', policy: 'default',
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
<div class="gf-form" ng-show="ctrl.showHelp"> <div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6> <pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the <b>time</b> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. An annotation is an event that is overlaid on top of graphs. The query can have up to three columns per row, the <b>time</b> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type. - column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text. - column with alias: <b>text</b> for the annotation text.
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
<div class="gf-form" ng-show="ctrl.showHelp"> <div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6> <pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the <i>time</i> or <i>time_sec</i> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. An annotation is an event that is overlaid on top of graphs. The query can have up to three columns per row, the <i>time</i> or <i>time_sec</i> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time</b> or <i>time_sec</i> for the annotation event time. Use epoch time or any native date data type. - column with alias: <b>time</b> or <i>time_sec</i> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text - column with alias: <b>text</b> for the annotation text
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="432.071pt" height="445.383pt" viewBox="0 0 432.071 445.383" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"> <svg width="432.071pt" height="445.383pt" viewBox="0 0 432.071 445.383" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
<g id="orginal" style="fill-rule:nonzero;clip-rule:nonzero;stroke:#000000;stroke-miterlimit:4;"> <g id="original" style="fill-rule:nonzero;clip-rule:nonzero;stroke:#000000;stroke-miterlimit:4;">
</g> </g>
<g id="Layer_x0020_3" style="fill-rule:nonzero;clip-rule:nonzero;fill:none;stroke:#FFFFFF;stroke-width:12.4651;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;"> <g id="Layer_x0020_3" style="fill-rule:nonzero;clip-rule:nonzero;fill:none;stroke:#FFFFFF;stroke-width:12.4651;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;">
<path style="fill:#000000;stroke:#000000;stroke-width:37.3953;stroke-linecap:butt;stroke-linejoin:miter;" d="M323.205,324.227c2.833-23.601,1.984-27.062,19.563-23.239l4.463,0.392c13.517,0.615,31.199-2.174,41.587-7c22.362-10.376,35.622-27.7,13.572-23.148c-50.297,10.376-53.755-6.655-53.755-6.655c53.111-78.803,75.313-178.836,56.149-203.322 C352.514-5.534,262.036,26.049,260.522,26.869l-0.482,0.089c-9.938-2.062-21.06-3.294-33.554-3.496c-22.761-0.374-40.032,5.967-53.133,15.904c0,0-161.408-66.498-153.899,83.628c1.597,31.936,45.777,241.655,98.47,178.31 c19.259-23.163,37.871-42.748,37.871-42.748c9.242,6.14,20.307,9.272,31.912,8.147l0.897-0.765c-0.281,2.876-0.157,5.689,0.359,9.019c-13.572,15.167-9.584,17.83-36.723,23.416c-27.457,5.659-11.326,15.734-0.797,18.367c12.768,3.193,42.305,7.716,62.268-20.224 l-0.795,3.188c5.325,4.26,4.965,30.619,5.72,49.452c0.756,18.834,2.017,36.409,5.856,46.771c3.839,10.36,8.369,37.05,44.036,29.406c29.809-6.388,52.6-15.582,54.677-101.107"/> <path style="fill:#000000;stroke:#000000;stroke-width:37.3953;stroke-linecap:butt;stroke-linejoin:miter;" d="M323.205,324.227c2.833-23.601,1.984-27.062,19.563-23.239l4.463,0.392c13.517,0.615,31.199-2.174,41.587-7c22.362-10.376,35.622-27.7,13.572-23.148c-50.297,10.376-53.755-6.655-53.755-6.655c53.111-78.803,75.313-178.836,56.149-203.322 C352.514-5.534,262.036,26.049,260.522,26.869l-0.482,0.089c-9.938-2.062-21.06-3.294-33.554-3.496c-22.761-0.374-40.032,5.967-53.133,15.904c0,0-161.408-66.498-153.899,83.628c1.597,31.936,45.777,241.655,98.47,178.31 c19.259-23.163,37.871-42.748,37.871-42.748c9.242,6.14,20.307,9.272,31.912,8.147l0.897-0.765c-0.281,2.876-0.157,5.689,0.359,9.019c-13.572,15.167-9.584,17.83-36.723,23.416c-27.457,5.659-11.326,15.734-0.797,18.367c12.768,3.193,42.305,7.716,62.268-20.224 l-0.795,3.188c5.325,4.26,4.965,30.619,5.72,49.452c0.756,18.834,2.017,36.409,5.856,46.771c3.839,10.36,8.369,37.05,44.036,29.406c29.809-6.388,52.6-15.582,54.677-101.107"/>
...@@ -19,4 +19,4 @@ ...@@ -19,4 +19,4 @@
<path d="M350.676,123.432c0.863,15.994-3.445,26.888-3.988,43.914c-0.804,24.748,11.799,53.074-7.191,81.435"/> <path d="M350.676,123.432c0.863,15.994-3.445,26.888-3.988,43.914c-0.804,24.748,11.799,53.074-7.191,81.435"/>
<path style="stroke-width:3;" d="M0,60.232"/> <path style="stroke-width:3;" d="M0,60.232"/>
</g> </g>
</svg> </svg>
\ No newline at end of file
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
<div class="gf-form" ng-show="ctrl.showHelp"> <div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6> <pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. An annotation is an event that is overlaid on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type. - column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text - column with alias: <b>text</b> for the annotation text
......
...@@ -52,14 +52,14 @@ function ($, _, angular, Drop) { ...@@ -52,14 +52,14 @@ function ($, _, angular, Drop) {
var eventManager = plot.getOptions().events.manager; var eventManager = plot.getOptions().events.manager;
if (eventManager.editorOpen) { if (eventManager.editorOpen) {
// update marker element to attach to (needed in case of legend on the right // update marker element to attach to (needed in case of legend on the right
// when there is a double render pass and the inital marker element is removed) // when there is a double render pass and the initial marker element is removed)
markerElementToAttachTo = element; markerElementToAttachTo = element;
return; return;
} }
// mark as openend // mark as openend
eventManager.editorOpened(); eventManager.editorOpened();
// set marker elment to attache to // set marker element to attache to
markerElementToAttachTo = element; markerElementToAttachTo = element;
// wait for element to be attached and positioned // wait for element to be attached and positioned
......
...@@ -129,7 +129,7 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { ...@@ -129,7 +129,7 @@ module.directive('graphLegend', function(popoverSrv, $timeout) {
elem.empty(); elem.empty();
// Set min-width if side style and there is a value, otherwise remove the CSS propery // Set min-width if side style and there is a value, otherwise remove the CSS property
// Set width so it works with IE11 // Set width so it works with IE11
var width: any = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth + 'px' : ''; var width: any = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth + 'px' : '';
var ieWidth: any = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth - 1 + 'px' : ''; var ieWidth: any = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth - 1 + 'px' : '';
......
...@@ -31,7 +31,7 @@ export class SeriesOverridesCtrl { ...@@ -31,7 +31,7 @@ export class SeriesOverridesCtrl {
$scope.override[item.propertyName] = subItem.value; $scope.override[item.propertyName] = subItem.value;
// automatically disable lines for this series and the fill bellow to series // automatically disable lines for this series and the fill below to series
// can be removed by the user if they still want lines // can be removed by the user if they still want lines
if (item.propertyName === 'fillBelowTo') { if (item.propertyName === 'fillBelowTo') {
$scope.override['lines'] = false; $scope.override['lines'] = false;
......
...@@ -221,7 +221,7 @@ describe('when transforming time series table', () => { ...@@ -221,7 +221,7 @@ describe('when transforming time series table', () => {
expect(table.rows[0][2]).toBe(42); expect(table.rows[0][2]).toBe(42);
}); });
it('should return 2 rows for a mulitple queries with same label values plus one extra row', () => { it('should return 2 rows for a multiple queries with same label values plus one extra row', () => {
table = transformDataToTable(multipleQueriesDataSameLabels, panel); table = transformDataToTable(multipleQueriesDataSameLabels, panel);
expect(table.rows.length).toBe(2); expect(table.rows.length).toBe(2);
expect(table.rows[0][0]).toBe(time); expect(table.rows[0][0]).toBe(time);
...@@ -238,7 +238,7 @@ describe('when transforming time series table', () => { ...@@ -238,7 +238,7 @@ describe('when transforming time series table', () => {
expect(table.rows[1][5]).toBe(7); expect(table.rows[1][5]).toBe(7);
}); });
it('should return 2 rows for mulitple queries with different label values', () => { it('should return 2 rows for multiple queries with different label values', () => {
table = transformDataToTable(multipleQueriesDataDifferentLabels, panel); table = transformDataToTable(multipleQueriesDataDifferentLabels, panel);
expect(table.rows.length).toBe(2); expect(table.rows.length).toBe(2);
expect(table.columns.length).toBe(6); expect(table.columns.length).toBe(6);
......
...@@ -243,7 +243,7 @@ transformers['table'] = { ...@@ -243,7 +243,7 @@ transformers['table'] = {
row[columnIndex] = matchedRow[columnIndex]; row[columnIndex] = matchedRow[columnIndex];
} }
} }
// Dont visit this row again // Don't visit this row again
mergedRows[match] = matchedRow; mergedRows[match] = matchedRow;
// Keep looking for more rows to merge // Keep looking for more rows to merge
offset = match + 1; offset = match + 1;
......
...@@ -22,7 +22,7 @@ var dashboard; ...@@ -22,7 +22,7 @@ var dashboard;
// All url parameters are available via the ARGS object // All url parameters are available via the ARGS object
var ARGS; var ARGS;
// Intialize a skeleton with nothing but a rows array and service object // Initialize a skeleton with nothing but a rows array and service object
dashboard = { dashboard = {
rows : [], rows : [],
schemaVersion: 13, schemaVersion: 13,
......
...@@ -12,6 +12,6 @@ if [[ -e ~/docker/centos.tar ]]; then ...@@ -12,6 +12,6 @@ if [[ -e ~/docker/centos.tar ]]; then
else else
docker build --rm=false --tag "grafana/buildcontainer" ./scripts/build/ docker build --rm=false --tag "grafana/buildcontainer" ./scripts/build/
# save docker container so we dont have to recreate it next run # save docker container so we don't have to recreate it next run
docker save grafana/buildcontainer > ~/docker/centos.tar; docker save grafana/buildcontainer > ~/docker/centos.tar;
fi fi
...@@ -125,7 +125,7 @@ func postRequest(url string, obj interface{}, desc string) { ...@@ -125,7 +125,7 @@ func postRequest(url string, obj interface{}, desc string) {
} else { } else {
log.Printf("Action: %s \t Failed - Status: %v", desc, res.Status) log.Printf("Action: %s \t Failed - Status: %v", desc, res.Status)
log.Printf("Resp: %s", body) log.Printf("Resp: %s", body)
log.Fatalf("Quiting") log.Fatalf("Quitting")
} }
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment