Commit 26577ab6 by Torkel Ödegaard

Merge branch 'master' into graphite-series-by-tags

parents 3ae0cfa1 9ea5af57
......@@ -19,6 +19,7 @@
* **Alert panel**: Adds placeholder text when no alerts are within the time range [#9624](https://github.com/grafana/grafana/issues/9624), thx [@straend](https://github.com/straend)
* **Mysql**: MySQL enable MaxOpenCon and MaxIdleCon regards how constring is configured. [#9784](https://github.com/grafana/grafana/issues/9784), thx [@dfredell](https://github.com/dfredell)
* **Cloudwatch**: Fixes broken query inspector for cloudwatch [#9661](https://github.com/grafana/grafana/issues/9661), thx [@mtanda](https://github.com/mtanda)
* **Dashboard**: Make it possible to start dashboards from search and dashboard list panel [#1871](https://github.com/grafana/grafana/issues/1871)
## Tech
* **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645)
......@@ -28,8 +29,12 @@
* **Singlestat**: suppress error when result contains no datapoints [#9636](https://github.com/grafana/grafana/issues/9636), thx [@utkarshcmu](https://github.com/utkarshcmu)
* **Postgres/MySQL**: Control quoting in SQL-queries when using template variables [#9030](https://github.com/grafana/grafana/issues/9030), thanks [@svenklemm](https://github.com/svenklemm)
# 4.6.2 (unreleased)
# 4.6.2 (2017-11-16)
# Important
* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if your using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777)
# Fixes
* **Color picker**: Bug after using textbox input field to change/paste color string [#9769](https://github.com/grafana/grafana/issues/9769)
* **Cloudwatch**: Fix for cloudwatch templating query `ec2_instance_attribute` [#9667](https://github.com/grafana/grafana/issues/9667), thanks [@mtanda](https://github.com/mtanda)
* **Heatmap**: Fixed tooltip for "time series buckets" mode [#9332](https://github.com/grafana/grafana/issues/9332)
......
# list of datasources that should be deleted from the database
delete_datasources:
# - name: Graphite
# org_id: 1
# list of datasources to insert/update depending
# whats available in the datbase
datasources:
# # <string, required> name of the datasource. Required
# - name: Graphite
# # <string, required> datasource type. Required
# type: graphite
# # <string, required> access mode. direct or proxy. Required
# access: proxy
# # <int> org id. will default to org_id 1 if not specified
# org_id: 1
# # <string> url
# url: http://localhost:8080
# # <string> database password, if used
# password:
# # <string> database user, if used
# user:
# # <string> database name, if used
# database:
# # <bool> enable/disable basic auth
# basic_auth:
# # <string> basic auth username
# basic_auth_user:
# # <string> basic auth password
# basic_auth_password:
# # <bool> enable/disable with credentials headers
# with_credentials:
# # <bool> mark as default datasource. Max one per org
# is_default:
# # <map> fields that will be converted to json and stored in json_data
# json_data:
# graphiteVersion: "1.1"
# tlsAuth: true
# tlsAuthWithCACert: true
# # <string> json object of data that will be encrypted.
# secure_json_data:
# tlsCACert: "..."
# tlsClientCert: "..."
# tlsClientKey: "..."
# version: 1
# # <bool> allow users to edit datasources from the UI.
# editable: false
......@@ -12,17 +12,17 @@ instance_name = ${HOSTNAME}
#################################### Paths ###############################
[paths]
# Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used)
#
data = data
#
# Directory where grafana can store logs
#
logs = data/log
#
# Directory where grafana will automatically scan and look for plugins
#
plugins = data/plugins
# Config files containing datasources that will be configured at startup
datasources = conf/datasources
#################################### Server ##############################
[server]
# Protocol (http, https, socket)
......@@ -432,7 +432,7 @@ enabled = true
execute_alerts = true
#################################### Internal Grafana Metrics ############
# Metrics available at HTTP API Url /api/metrics
# Metrics available at HTTP API Url /metrics
[metrics]
enabled = true
interval_seconds = 10
......
......@@ -12,18 +12,17 @@
#################################### Paths ####################################
[paths]
# Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used)
#
;data = /var/lib/grafana
#
# Directory where grafana can store logs
#
;logs = /var/log/grafana
#
# Directory where grafana will automatically scan and look for plugins
#
;plugins = /var/lib/grafana/plugins
#
# Config files containing datasources that will be configured at startup
;datasources = conf/datasources
#################################### Server ####################################
[server]
# Protocol (http, https, socket)
......@@ -375,7 +374,7 @@ log_queries =
;execute_alerts = true
#################################### Internal Grafana Metrics ##########################
# Metrics available at HTTP API Url /api/metrics
# Metrics available at HTTP API Url /metrics
[metrics]
# Disable / Enable internal metrics
;enabled = true
......
+++
title = "Provisioning"
description = ""
keywords = ["grafana", "provisioning"]
type = "docs"
[menu.docs]
parent = "admin"
weight = 8
+++
# Provisioning Grafana
## Config file
Checkout the [configuration](/installation/configuration) page for more information about what you can configure in `grafana.ini`
### Config file locations
- Default configuration from `$WORKING_DIR/conf/defaults.ini`
- Custom configuration from `$WORKING_DIR/conf/custom.ini`
- The custom configuration file path can be overridden using the `--config` parameter
> **Note.** If you have installed Grafana using the `deb` or `rpm`
> packages, then your configuration file is located at
> `/etc/grafana/grafana.ini`. This path is specified in the Grafana
> init.d script using `--config` file parameter.
### Using environment variables
All options in the configuration file (listed below) can be overridden
using environment variables using the syntax:
```bash
GF_<SectionName>_<KeyName>
```
Where the section name is the text within the brackets. Everything
should be upper case, `.` should be replaced by `_`. For example, given these configuration settings:
```bash
# default section
instance_name = ${HOSTNAME}
[security]
admin_user = admin
[auth.google]
client_secret = 0ldS3cretKey
```
Then you can override them using:
```bash
export GF_DEFAULT_INSTANCE_NAME=my-instance
export GF_SECURITY_ADMIN_USER=true
export GF_AUTH_GOOGLE_CLIENT_SECRET=newS3cretKey
```
<hr />
## Configuration management tools
Currently we do not provide any scripts/manifests for configuring Grafana. Rather then spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefor, we heavily relay on the expertise of he community.
Tool | Project
-----|------------
Puppet | [https://forge.puppet.com/puppet/grafana](https://forge.puppet.com/puppet/grafana)
Ansible | [https://github.com/picotrading/ansible-grafana](https://github.com/picotrading/ansible-grafana)
Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana)
Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana)
## Datasources
> This feature is available from v4.7
It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`conf/datasources`](/installation/configuration/#datasources) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list.
### Running multiple grafana instances.
If you are running multiple instances of Grafana you might run into problems if they have different versions of the datasource.yaml configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way old configs cannot overwrite newer configs if they restart at the same time.
### Example datasource config file
```yaml
# list of datasources that should be deleted from the database
delete_datasources:
- name: Graphite
org_id: 1
# list of datasources to insert/update depending
# whats available in the datbase
datasources:
# <string, required> name of the datasource. Required
- name: Graphite
# <string, required> datasource type. Required
type: graphite
# <string, required> access mode. direct or proxy. Required
access: proxy
# <int> org id. will default to org_id 1 if not specified
org_id: 1
# <string> url
url: http://localhost:8080
# <string> database password, if used
password:
# <string> database user, if used
user:
# <string> database name, if used
database:
# <bool> enable/disable basic auth
basic_auth:
# <string> basic auth username
basic_auth_user:
# <string> basic auth password
basic_auth_password:
# <bool> enable/disable with credentials headers
with_credentials:
# <bool> mark as default datasource. Max one per org
is_default:
# <map> fields that will be converted to json and stored in json_data
json_data:
graphiteVersion: "1.1"
tlsAuth: true
tlsAuthWithCACert: true
# <string> json object of data that will be encrypted.
secure_json_data:
tlsCACert: "..."
tlsClientCert: "..."
tlsClientKey: "..."
version: 1
# <bool> allow users to edit datasources from the UI.
editable: false
```
#### Json data
Since all datasources dont have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use.
| Name | Type | Datasource |Description |
| ----| ---- | ---- | --- |
| tlsAuth | boolean | *All* | Enable TLS authentication using client cert configured in secure json data |
| tlsAuthWithCACert | boolean | *All* | Enable TLS authtication using CA cert |
| graphiteVersion | string | Graphite | Graphite version |
| timeInterval | string | Elastic, Influxdb & Prometheus | Lowest interval/step value that should be used for this data source |
| esVersion | string | Elastic | Elasticsearch version |
| timeField | string | Elastic | Which field that should be used as timestamp |
| interval | string | Elastic | Index date time format |
| authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
| assumeRoleArn | string | Cloudwatch | ARN of Assume Role |
| defaultRegion | string | Cloudwatch | AWS region |
| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics |
| tsdbVersion | string | OpenTsdb | Version |
| tsdbResolution | string | OpenTsdb | Resolution |
| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
#### Secure Json data
{"authType":"keys","defaultRegion":"us-west-2","timeField":"@timestamp"}
Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to request on the server side. All these settings are optional.
| Name | Type | Datasource | Description |
| ----| ---- | ---- | --- |
| tlsCACert | string | *All* |CA cert for out going requests |
| tlsClientCert | string | *All* |TLS Client cert for outgoing requests |
| tlsClientKey | string | *All* |TLS Client key for outgoing requests |
| password | string | Postgre | password |
| user | string | Postgre | user |
......@@ -34,6 +34,7 @@ Name | Description
*Basic Auth* | Enable basic authentication to the Prometheus data source.
*User* | Name of your Prometheus user
*Password* | Database user's password
*Scrape interval* | This will be used as a lower limit for the Prometheus step query parameter. Default value is 15s.
## Query editor
......
......@@ -87,6 +87,14 @@ command line in the init.d script or the systemd service file. It can
be overridden in the configuration file or in the default environment variable
file.
### plugins
Directory where grafana will automatically scan and look for plugins
### datasources
Config files containing datasources that will be configured at startup
## [server]
### http_addr
......@@ -676,7 +684,7 @@ Ex `filters = sqlstore:debug`
## [metrics]
### enabled
Enable metrics reporting. defaults true. Available via HTTP API `/api/metrics`.
Enable metrics reporting. defaults true. Available via HTTP API `/metrics`.
### interval_seconds
......
......@@ -15,7 +15,7 @@ weight = 1
Description | Download
------------ | -------------
Stable for Debian-based Linux | [grafana_4.6.1_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.6.1_amd64.deb)
Stable for Debian-based Linux | [grafana_4.6.2_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.6.2_amd64.deb)
<!-- Beta for Debian-based Linux | [grafana_4.5.0-beta1_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.5.0-beta1_amd64.deb) -->
......@@ -26,9 +26,9 @@ installation.
```bash
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.6.1_amd64.deb
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.6.2_amd64.deb
sudo apt-get install -y adduser libfontconfig
sudo dpkg -i grafana_4.6.1_amd64.deb
sudo dpkg -i grafana_4.6.2_amd64.deb
```
<!--
......
......@@ -15,7 +15,7 @@ weight = 2
Description | Download
------------ | -------------
Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [4.6.1 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.1-1.x86_64.rpm)
Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [4.6.2 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.2-1.x86_64.rpm)
<!-- Latest Beta for CentOS / Fedora / OpenSuse / Redhat Linux | [4.5.0-beta1 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.5.0-beta1.x86_64.rpm) -->
......@@ -27,7 +27,7 @@ installation.
You can install Grafana using Yum directly.
```bash
$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.1-1.x86_64.rpm
$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.2-1.x86_64.rpm
```
Or install manually using `rpm`.
......@@ -35,15 +35,15 @@ Or install manually using `rpm`.
#### On CentOS / Fedora / Redhat:
```bash
$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.1-1.x86_64.rpm
$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.2-1.x86_64.rpm
$ sudo yum install initscripts fontconfig
$ sudo rpm -Uvh grafana-4.6.1-1.x86_64.rpm
$ sudo rpm -Uvh grafana-4.6.2-1.x86_64.rpm
```
#### On OpenSuse:
```bash
$ sudo rpm -i --nodeps grafana-4.6.1-1.x86_64.rpm
$ sudo rpm -i --nodeps grafana-4.6.2-1.x86_64.rpm
```
## Install via YUM Repository
......
......@@ -13,7 +13,7 @@ weight = 3
Description | Download
------------ | -------------
Latest stable package for Windows | [grafana.4.6.1.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.1.windows-x64.zip)
Latest stable package for Windows | [grafana.4.6.2.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.2.windows-x64.zip)
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing
installation.
......
#! /usr/bin/env bash
version=4.6.1
version=4.6.2
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_${version}_amd64.deb
......
......@@ -33,6 +33,7 @@ func GetDataSources(c *middleware.Context) Response {
BasicAuth: ds.BasicAuth,
IsDefault: ds.IsDefault,
JsonData: ds.JsonData,
ReadOnly: ds.ReadOnly,
}
if plugin, exists := plugins.DataSources[ds.Type]; exists {
......@@ -76,9 +77,20 @@ func DeleteDataSourceById(c *middleware.Context) {
return
}
ds, err := getRawDataSourceById(id, c.OrgId)
if err != nil {
c.JsonApiErr(400, "Failed to delete datasource", nil)
return
}
if ds.ReadOnly {
c.JsonApiErr(403, "Cannot delete read-only data source", nil)
return
}
cmd := &m.DeleteDataSourceByIdCommand{Id: id, OrgId: c.OrgId}
err := bus.Dispatch(cmd)
err = bus.Dispatch(cmd)
if err != nil {
c.JsonApiErr(500, "Failed to delete datasource", err)
return
......@@ -95,8 +107,18 @@ func DeleteDataSourceByName(c *middleware.Context) {
return
}
cmd := &m.DeleteDataSourceByNameCommand{Name: name, OrgId: c.OrgId}
getCmd := &m.GetDataSourceByNameQuery{Name: name, OrgId: c.OrgId}
if err := bus.Dispatch(getCmd); err != nil {
c.JsonApiErr(500, "Failed to delete datasource", err)
return
}
if getCmd.Result.ReadOnly {
c.JsonApiErr(403, "Cannot delete read-only data source", nil)
return
}
cmd := &m.DeleteDataSourceByNameCommand{Name: name, OrgId: c.OrgId}
err := bus.Dispatch(cmd)
if err != nil {
c.JsonApiErr(500, "Failed to delete datasource", err)
......@@ -160,11 +182,14 @@ func fillWithSecureJsonData(cmd *m.UpdateDataSourceCommand) error {
}
ds, err := getRawDataSourceById(cmd.Id, cmd.OrgId)
if err != nil {
return err
}
if ds.ReadOnly {
return m.ErrDatasourceIsReadOnly
}
secureJsonData := ds.SecureJsonData.Decrypt()
for k, v := range secureJsonData {
......@@ -201,6 +226,7 @@ func GetDataSourceByName(c *middleware.Context) Response {
}
dtos := convertModelToDtos(query.Result)
dtos.ReadOnly = true
return Json(200, &dtos)
}
......@@ -242,6 +268,7 @@ func convertModelToDtos(ds *m.DataSource) dtos.DataSource {
JsonData: ds.JsonData,
SecureJsonFields: map[string]bool{},
Version: ds.Version,
ReadOnly: ds.ReadOnly,
}
for k, v := range ds.SecureJsonData {
......
......@@ -26,6 +26,7 @@ type DataSource struct {
JsonData *simplejson.Json `json:"jsonData,omitempty"`
SecureJsonFields map[string]bool `json:"secureJsonFields"`
Version int `json:"version"`
ReadOnly bool `json:"readOnly"`
}
type DataSourceListItemDTO struct {
......@@ -42,6 +43,7 @@ type DataSourceListItemDTO struct {
BasicAuth bool `json:"basicAuth"`
IsDefault bool `json:"isDefault"`
JsonData *simplejson.Json `json:"jsonData,omitempty"`
ReadOnly bool `json:"readOnly"`
}
type DataSourceList []DataSourceListItemDTO
......
......@@ -146,12 +146,13 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron {
m := macaron.New()
m.Use(middleware.Logger())
m.Use(middleware.Recovery())
if setting.EnableGzip {
m.Use(middleware.Gziper())
}
m.Use(middleware.Recovery())
for _, route := range plugins.StaticRoutes {
pluginRoute := path.Join("/public/plugins/", route.PluginId)
hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory)
......
......@@ -16,7 +16,6 @@ import (
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
......@@ -88,11 +87,6 @@ func main() {
server.Start()
}
func initSql() {
sqlstore.NewEngine()
sqlstore.EnsureAdminUser()
}
func listenToSystemSignals(server models.GrafanaServer) {
signalChan := make(chan os.Signal, 1)
ignoreChan := make(chan os.Signal, 1)
......
......@@ -9,6 +9,9 @@ import (
"strconv"
"time"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
"github.com/grafana/grafana/pkg/services/provisioning"
"golang.org/x/sync/errgroup"
"github.com/grafana/grafana/pkg/api"
......@@ -21,7 +24,9 @@ import (
"github.com/grafana/grafana/pkg/services/cleanup"
"github.com/grafana/grafana/pkg/services/notifications"
"github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/social"
"github.com/grafana/grafana/pkg/tracing"
)
......@@ -54,12 +59,19 @@ func (g *GrafanaServerImpl) Start() {
g.writePIDFile()
initSql()
metrics.Init(setting.Cfg)
search.Init()
login.Init()
social.NewOAuthService()
plugins.Init()
if err := provisioning.StartUp(setting.DatasourcesPath); err != nil {
logger.Error("Failed to provision Grafana from config", "error", err)
g.Shutdown(1, "Startup failed")
return
}
closer, err := tracing.Init(setting.Cfg)
if err != nil {
g.log.Error("Tracing settings is not valid", "error", err)
......@@ -87,6 +99,11 @@ func (g *GrafanaServerImpl) Start() {
g.startHttpServer()
}
func initSql() {
sqlstore.NewEngine()
sqlstore.EnsureAdminUser()
}
func (g *GrafanaServerImpl) initLogging() {
err := setting.NewConfigContext(&setting.CommandLineArgs{
Config: *configFile,
......
......@@ -363,6 +363,7 @@ type scenarioContext struct {
respJson map[string]interface{}
handlerFunc handlerFunc
defaultHandler macaron.Handler
url string
req *http.Request
}
......
......@@ -123,23 +123,22 @@ func Recovery() macaron.Handler {
c.Data["ErrorMsg"] = string(stack)
}
c.HTML(500, "500")
// // Lookup the current responsewriter
// val := c.GetVal(inject.InterfaceOf((*http.ResponseWriter)(nil)))
// res := val.Interface().(http.ResponseWriter)
//
// // respond with panic message while in development mode
// var body []byte
// if setting.Env == setting.DEV {
// res.Header().Set("Content-Type", "text/html")
// body = []byte(fmt.Sprintf(panicHtml, err, err, stack))
// }
//
// res.WriteHeader(http.StatusInternalServerError)
// if nil != body {
// res.Write(body)
// }
ctx, ok := c.Data["ctx"].(*Context)
if ok && ctx.IsApiRequest() {
resp := make(map[string]interface{})
resp["message"] = "Internal Server Error - Check the Grafana server logs for the detailed error message."
if c.Data["ErrorMsg"] != nil {
resp["error"] = fmt.Sprintf("%v - %v", c.Data["Title"], c.Data["ErrorMsg"])
} else {
resp["error"] = c.Data["Title"]
}
c.JSON(500, resp)
} else {
c.HTML(500, "500")
}
}
}()
......
package middleware
import (
"path/filepath"
"testing"
"github.com/go-macaron/session"
"github.com/grafana/grafana/pkg/bus"
. "github.com/smartystreets/goconvey/convey"
"gopkg.in/macaron.v1"
)
func TestRecoveryMiddleware(t *testing.T) {
Convey("Given an api route that panics", t, func() {
apiUrl := "/api/whatever"
recoveryScenario("recovery middleware should return json", apiUrl, func(sc *scenarioContext) {
sc.handlerFunc = PanicHandler
sc.fakeReq("GET", apiUrl).exec()
sc.req.Header.Add("content-type", "application/json")
So(sc.resp.Code, ShouldEqual, 500)
So(sc.respJson["message"], ShouldStartWith, "Internal Server Error - Check the Grafana server logs for the detailed error message.")
So(sc.respJson["error"], ShouldStartWith, "Server Error")
})
})
Convey("Given a non-api route that panics", t, func() {
apiUrl := "/whatever"
recoveryScenario("recovery middleware should return html", apiUrl, func(sc *scenarioContext) {
sc.handlerFunc = PanicHandler
sc.fakeReq("GET", apiUrl).exec()
So(sc.resp.Code, ShouldEqual, 500)
So(sc.resp.Header().Get("content-type"), ShouldEqual, "text/html; charset=UTF-8")
So(sc.resp.Body.String(), ShouldContainSubstring, "<title>Grafana - Error</title>")
})
})
}
func PanicHandler(c *Context) {
panic("Handler has panicked")
}
func recoveryScenario(desc string, url string, fn scenarioFunc) {
Convey(desc, func() {
defer bus.ClearBusHandlers()
sc := &scenarioContext{
url: url,
}
viewsPath, _ := filepath.Abs("../../public/views")
sc.m = macaron.New()
sc.m.Use(Recovery())
sc.m.Use(macaron.Renderer(macaron.RenderOptions{
Directory: viewsPath,
Delims: macaron.Delims{Left: "[[", Right: "]]"},
}))
sc.m.Use(GetContextHandler())
// mock out gc goroutine
startSessionGC = func() {}
sc.m.Use(Sessioner(&session.Options{}))
sc.m.Use(OrgRedirect())
sc.m.Use(AddDefaultResponseHeaders())
sc.defaultHandler = func(c *Context) {
sc.context = c
if sc.handlerFunc != nil {
sc.handlerFunc(sc.context)
}
}
sc.m.Get(url, sc.defaultHandler)
fn(sc)
})
}
......@@ -27,6 +27,7 @@ var (
ErrDataSourceNotFound = errors.New("Data source not found")
ErrDataSourceNameExists = errors.New("Data source with same name already exists")
ErrDataSourceUpdatingOldVersion = errors.New("Trying to update old version of datasource")
ErrDatasourceIsReadOnly = errors.New("Data source is readonly. Can only be updated from configuration.")
)
type DsAccess string
......@@ -50,6 +51,7 @@ type DataSource struct {
IsDefault bool
JsonData *simplejson.Json
SecureJsonData securejsondata.SecureJsonData
ReadOnly bool
Created time.Time
Updated time.Time
......@@ -109,6 +111,7 @@ type AddDataSourceCommand struct {
IsDefault bool `json:"isDefault"`
JsonData *simplejson.Json `json:"jsonData"`
SecureJsonData map[string]string `json:"secureJsonData"`
ReadOnly bool `json:"readOnly"`
OrgId int64 `json:"-"`
......@@ -132,6 +135,7 @@ type UpdateDataSourceCommand struct {
JsonData *simplejson.Json `json:"jsonData"`
SecureJsonData map[string]string `json:"secureJsonData"`
Version int `json:"version"`
ReadOnly bool `json:"readOnly"`
OrgId int64 `json:"-"`
Id int64 `json:"-"`
......@@ -142,11 +146,15 @@ type UpdateDataSourceCommand struct {
type DeleteDataSourceByIdCommand struct {
Id int64
OrgId int64
DeletedDatasourcesCount int64
}
type DeleteDataSourceByNameCommand struct {
Name string
OrgId int64
DeletedDatasourcesCount int64
}
// ---------------------
......@@ -157,6 +165,10 @@ type GetDataSourcesQuery struct {
Result []*DataSource
}
type GetAllDataSourcesQuery struct {
Result []*DataSource
}
type GetDataSourceByIdQuery struct {
Id int64
OrgId int64
......
package datasources
import (
"errors"
"io/ioutil"
"path/filepath"
"strings"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
yaml "gopkg.in/yaml.v2"
)
var (
ErrInvalidConfigToManyDefault = errors.New("datasource.yaml config is invalid. Only one datasource can be marked as default")
)
func Provision(configDirectory string) error {
dc := newDatasourceProvisioner(log.New("provisioning.datasources"))
return dc.applyChanges(configDirectory)
}
type DatasourceProvisioner struct {
log log.Logger
cfgProvider configReader
}
func newDatasourceProvisioner(log log.Logger) DatasourceProvisioner {
return DatasourceProvisioner{
log: log,
cfgProvider: configReader{},
}
}
func (dc *DatasourceProvisioner) apply(cfg *DatasourcesAsConfig) error {
if err := dc.deleteDatasources(cfg.DeleteDatasources); err != nil {
return err
}
for _, ds := range cfg.Datasources {
cmd := &models.GetDataSourceByNameQuery{OrgId: ds.OrgId, Name: ds.Name}
err := bus.Dispatch(cmd)
if err != nil && err != models.ErrDataSourceNotFound {
return err
}
if err == models.ErrDataSourceNotFound {
dc.log.Info("inserting datasource from configuration ", "name", ds.Name)
insertCmd := createInsertCommand(ds)
if err := bus.Dispatch(insertCmd); err != nil {
return err
}
} else {
dc.log.Debug("updating datasource from configuration", "name", ds.Name)
updateCmd := createUpdateCommand(ds, cmd.Result.Id)
if err := bus.Dispatch(updateCmd); err != nil {
return err
}
}
}
return nil
}
func (dc *DatasourceProvisioner) applyChanges(configPath string) error {
configs, err := dc.cfgProvider.readConfig(configPath)
if err != nil {
return err
}
for _, cfg := range configs {
if err := dc.apply(cfg); err != nil {
return err
}
}
return nil
}
func (dc *DatasourceProvisioner) deleteDatasources(dsToDelete []*DeleteDatasourceConfig) error {
for _, ds := range dsToDelete {
cmd := &models.DeleteDataSourceByNameCommand{OrgId: ds.OrgId, Name: ds.Name}
if err := bus.Dispatch(cmd); err != nil {
return err
}
if cmd.DeletedDatasourcesCount > 0 {
dc.log.Info("deleted datasource based on configuration", "name", ds.Name)
}
}
return nil
}
type configReader struct{}
func (configReader) readConfig(path string) ([]*DatasourcesAsConfig, error) {
files, err := ioutil.ReadDir(path)
if err != nil {
return nil, err
}
var datasources []*DatasourcesAsConfig
for _, file := range files {
if strings.HasSuffix(file.Name(), ".yaml") || strings.HasSuffix(file.Name(), ".yml") {
filename, _ := filepath.Abs(filepath.Join(path, file.Name()))
yamlFile, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
var datasource *DatasourcesAsConfig
err = yaml.Unmarshal(yamlFile, &datasource)
if err != nil {
return nil, err
}
datasources = append(datasources, datasource)
}
}
defaultCount := 0
for _, cfg := range datasources {
for _, ds := range cfg.Datasources {
if ds.OrgId == 0 {
ds.OrgId = 1
}
if ds.IsDefault {
defaultCount++
if defaultCount > 1 {
return nil, ErrInvalidConfigToManyDefault
}
}
}
for _, ds := range cfg.DeleteDatasources {
if ds.OrgId == 0 {
ds.OrgId = 1
}
}
}
return datasources, nil
}
package datasources
import (
"testing"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
var (
logger log.Logger = log.New("fake.logger")
oneDatasourcesConfig string = ""
twoDatasourcesConfig string = "./test-configs/two-datasources"
twoDatasourcesConfigPurgeOthers string = "./test-configs/insert-two-delete-two"
doubleDatasourcesConfig string = "./test-configs/double-default"
allProperties string = "./test-configs/all-properties"
brokenYaml string = "./test-configs/broken-yaml"
fakeRepo *fakeRepository
)
func TestDatasourceAsConfig(t *testing.T) {
Convey("Testing datasource as configuration", t, func() {
fakeRepo = &fakeRepository{}
bus.ClearBusHandlers()
bus.AddHandler("test", mockDelete)
bus.AddHandler("test", mockInsert)
bus.AddHandler("test", mockUpdate)
bus.AddHandler("test", mockGet)
bus.AddHandler("test", mockGetAll)
Convey("One configured datasource", func() {
Convey("no datasource in database", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfig)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 0)
So(len(fakeRepo.inserted), ShouldEqual, 2)
So(len(fakeRepo.updated), ShouldEqual, 0)
})
Convey("One datasource in database with same name", func() {
fakeRepo.loadAll = []*models.DataSource{
{Name: "Graphite", OrgId: 1, Id: 1},
}
Convey("should update one datasource", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfig)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 0)
So(len(fakeRepo.inserted), ShouldEqual, 1)
So(len(fakeRepo.updated), ShouldEqual, 1)
})
})
Convey("Two datasources with is_default", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(doubleDatasourcesConfig)
Convey("should raise error", func() {
So(err, ShouldEqual, ErrInvalidConfigToManyDefault)
})
})
})
Convey("Two configured datasource and purge others ", func() {
Convey("two other datasources in database", func() {
fakeRepo.loadAll = []*models.DataSource{
{Name: "old-graphite", OrgId: 1, Id: 1},
{Name: "old-graphite2", OrgId: 1, Id: 2},
}
Convey("should have two new datasources", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfigPurgeOthers)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 2)
So(len(fakeRepo.inserted), ShouldEqual, 2)
So(len(fakeRepo.updated), ShouldEqual, 0)
})
})
})
Convey("Two configured datasource and purge others = false", func() {
Convey("two other datasources in database", func() {
fakeRepo.loadAll = []*models.DataSource{
{Name: "Graphite", OrgId: 1, Id: 1},
{Name: "old-graphite2", OrgId: 1, Id: 2},
}
Convey("should have two new datasources", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(twoDatasourcesConfig)
if err != nil {
t.Fatalf("applyChanges return an error %v", err)
}
So(len(fakeRepo.deleted), ShouldEqual, 0)
So(len(fakeRepo.inserted), ShouldEqual, 1)
So(len(fakeRepo.updated), ShouldEqual, 1)
})
})
})
Convey("broken yaml should return error", func() {
_, err := configReader{}.readConfig(brokenYaml)
So(err, ShouldNotBeNil)
})
Convey("can read all properties", func() {
cfgProvifer := configReader{}
cfg, err := cfgProvifer.readConfig(allProperties)
if err != nil {
t.Fatalf("readConfig return an error %v", err)
}
So(len(cfg), ShouldEqual, 2)
dsCfg := cfg[0]
ds := dsCfg.Datasources[0]
So(ds.Name, ShouldEqual, "name")
So(ds.Type, ShouldEqual, "type")
So(ds.Access, ShouldEqual, models.DS_ACCESS_PROXY)
So(ds.OrgId, ShouldEqual, 2)
So(ds.Url, ShouldEqual, "url")
So(ds.User, ShouldEqual, "user")
So(ds.Password, ShouldEqual, "password")
So(ds.Database, ShouldEqual, "database")
So(ds.BasicAuth, ShouldBeTrue)
So(ds.BasicAuthUser, ShouldEqual, "basic_auth_user")
So(ds.BasicAuthPassword, ShouldEqual, "basic_auth_password")
So(ds.WithCredentials, ShouldBeTrue)
So(ds.IsDefault, ShouldBeTrue)
So(ds.Editable, ShouldBeTrue)
So(len(ds.JsonData), ShouldBeGreaterThan, 2)
So(ds.JsonData["graphiteVersion"], ShouldEqual, "1.1")
So(ds.JsonData["tlsAuth"], ShouldEqual, true)
So(ds.JsonData["tlsAuthWithCACert"], ShouldEqual, true)
So(len(ds.SecureJsonData), ShouldBeGreaterThan, 2)
So(ds.SecureJsonData["tlsCACert"], ShouldEqual, "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==")
So(ds.SecureJsonData["tlsClientCert"], ShouldEqual, "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==")
So(ds.SecureJsonData["tlsClientKey"], ShouldEqual, "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==")
dstwo := cfg[1].Datasources[0]
So(dstwo.Name, ShouldEqual, "name2")
})
})
}
type fakeRepository struct {
inserted []*models.AddDataSourceCommand
deleted []*models.DeleteDataSourceByNameCommand
updated []*models.UpdateDataSourceCommand
loadAll []*models.DataSource
}
func mockDelete(cmd *models.DeleteDataSourceByNameCommand) error {
fakeRepo.deleted = append(fakeRepo.deleted, cmd)
return nil
}
func mockUpdate(cmd *models.UpdateDataSourceCommand) error {
fakeRepo.updated = append(fakeRepo.updated, cmd)
return nil
}
func mockInsert(cmd *models.AddDataSourceCommand) error {
fakeRepo.inserted = append(fakeRepo.inserted, cmd)
return nil
}
func mockGetAll(cmd *models.GetAllDataSourcesQuery) error {
cmd.Result = fakeRepo.loadAll
return nil
}
func mockGet(cmd *models.GetDataSourceByNameQuery) error {
for _, v := range fakeRepo.loadAll {
if cmd.Name == v.Name && cmd.OrgId == v.OrgId {
cmd.Result = v
return nil
}
}
return models.ErrDataSourceNotFound
}
datasources:
- name: name
type: type
access: proxy
org_id: 2
url: url
password: password
user: user
database: database
basic_auth: true
basic_auth_user: basic_auth_user
basic_auth_password: basic_auth_password
with_credentials: true
is_default: true
json_data:
graphiteVersion: "1.1"
tlsAuth: true
tlsAuthWithCACert: true
secure_json_data:
tlsCACert: "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA=="
tlsClientCert: "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ=="
tlsClientKey: "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A=="
editable: true
purge_other_datasources: true
datasources:
- name: name2
type: type2
access: proxy
org_id: 2
url: url2
#sfxzgnsxzcvnbzcvn
cvbn
cvbn
c
vbn
cvbncvbn
\ No newline at end of file
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
is_default: true
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
is_default: true
datasources:
- name: Prometheus
type: prometheus
access: proxy
url: http://localhost:9090
delete_datasources:
- name: old-graphite
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
delete_datasources:
- name: old-graphite3
datasources:
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
- name: Prometheus
type: prometheus
access: proxy
url: http://localhost:9090
package datasources
import "github.com/grafana/grafana/pkg/models"
import "github.com/grafana/grafana/pkg/components/simplejson"
type DatasourcesAsConfig struct {
Datasources []*DataSourceFromConfig `json:"datasources" yaml:"datasources"`
DeleteDatasources []*DeleteDatasourceConfig `json:"delete_datasources" yaml:"delete_datasources"`
}
type DeleteDatasourceConfig struct {
OrgId int64 `json:"org_id" yaml:"org_id"`
Name string `json:"name" yaml:"name"`
}
type DataSourceFromConfig struct {
OrgId int64 `json:"org_id" yaml:"org_id"`
Version int `json:"version" yaml:"version"`
Name string `json:"name" yaml:"name"`
Type string `json:"type" yaml:"type"`
Access string `json:"access" yaml:"access"`
Url string `json:"url" yaml:"url"`
Password string `json:"password" yaml:"password"`
User string `json:"user" yaml:"user"`
Database string `json:"database" yaml:"database"`
BasicAuth bool `json:"basic_auth" yaml:"basic_auth"`
BasicAuthUser string `json:"basic_auth_user" yaml:"basic_auth_user"`
BasicAuthPassword string `json:"basic_auth_password" yaml:"basic_auth_password"`
WithCredentials bool `json:"with_credentials" yaml:"with_credentials"`
IsDefault bool `json:"is_default" yaml:"is_default"`
JsonData map[string]interface{} `json:"json_data" yaml:"json_data"`
SecureJsonData map[string]string `json:"secure_json_data" yaml:"secure_json_data"`
Editable bool `json:"editable" yaml:"editable"`
}
func createInsertCommand(ds *DataSourceFromConfig) *models.AddDataSourceCommand {
jsonData := simplejson.New()
if len(ds.JsonData) > 0 {
for k, v := range ds.JsonData {
jsonData.Set(k, v)
}
}
return &models.AddDataSourceCommand{
OrgId: ds.OrgId,
Name: ds.Name,
Type: ds.Type,
Access: models.DsAccess(ds.Access),
Url: ds.Url,
Password: ds.Password,
User: ds.User,
Database: ds.Database,
BasicAuth: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
BasicAuthPassword: ds.BasicAuthPassword,
WithCredentials: ds.WithCredentials,
IsDefault: ds.IsDefault,
JsonData: jsonData,
SecureJsonData: ds.SecureJsonData,
ReadOnly: !ds.Editable,
}
}
func createUpdateCommand(ds *DataSourceFromConfig, id int64) *models.UpdateDataSourceCommand {
jsonData := simplejson.New()
if len(ds.JsonData) > 0 {
for k, v := range ds.JsonData {
jsonData.Set(k, v)
}
}
return &models.UpdateDataSourceCommand{
Id: id,
OrgId: ds.OrgId,
Name: ds.Name,
Type: ds.Type,
Access: models.DsAccess(ds.Access),
Url: ds.Url,
Password: ds.Password,
User: ds.User,
Database: ds.Database,
BasicAuth: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
BasicAuthPassword: ds.BasicAuthPassword,
WithCredentials: ds.WithCredentials,
IsDefault: ds.IsDefault,
JsonData: jsonData,
SecureJsonData: ds.SecureJsonData,
ReadOnly: !ds.Editable,
}
}
package provisioning
import (
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/services/provisioning/datasources"
)
var (
logger log.Logger = log.New("services.provisioning")
)
func StartUp(datasourcePath string) error {
return datasources.Provision(datasourcePath)
}
......@@ -13,6 +13,7 @@ import (
func init() {
bus.AddHandler("sql", GetDataSources)
bus.AddHandler("sql", GetAllDataSources)
bus.AddHandler("sql", AddDataSource)
bus.AddHandler("sql", DeleteDataSourceById)
bus.AddHandler("sql", DeleteDataSourceByName)
......@@ -54,10 +55,19 @@ func GetDataSources(query *m.GetDataSourcesQuery) error {
return sess.Find(&query.Result)
}
func GetAllDataSources(query *m.GetAllDataSourcesQuery) error {
sess := x.Limit(1000, 0).Asc("name")
query.Result = make([]*m.DataSource, 0)
return sess.Find(&query.Result)
}
func DeleteDataSourceById(cmd *m.DeleteDataSourceByIdCommand) error {
return inTransaction(func(sess *DBSession) error {
var rawSql = "DELETE FROM data_source WHERE id=? and org_id=?"
_, err := sess.Exec(rawSql, cmd.Id, cmd.OrgId)
result, err := sess.Exec(rawSql, cmd.Id, cmd.OrgId)
affected, _ := result.RowsAffected()
cmd.DeletedDatasourcesCount = affected
return err
})
}
......@@ -65,7 +75,9 @@ func DeleteDataSourceById(cmd *m.DeleteDataSourceByIdCommand) error {
func DeleteDataSourceByName(cmd *m.DeleteDataSourceByNameCommand) error {
return inTransaction(func(sess *DBSession) error {
var rawSql = "DELETE FROM data_source WHERE name=? and org_id=?"
_, err := sess.Exec(rawSql, cmd.Name, cmd.OrgId)
result, err := sess.Exec(rawSql, cmd.Name, cmd.OrgId)
affected, _ := result.RowsAffected()
cmd.DeletedDatasourcesCount = affected
return err
})
}
......@@ -98,6 +110,7 @@ func AddDataSource(cmd *m.AddDataSourceCommand) error {
Created: time.Now(),
Updated: time.Now(),
Version: 1,
ReadOnly: cmd.ReadOnly,
}
if _, err := sess.Insert(ds); err != nil {
......@@ -143,12 +156,14 @@ func UpdateDataSource(cmd *m.UpdateDataSourceCommand) error {
JsonData: cmd.JsonData,
SecureJsonData: securejsondata.GetEncryptedJsonData(cmd.SecureJsonData),
Updated: time.Now(),
ReadOnly: cmd.ReadOnly,
Version: cmd.Version + 1,
}
sess.UseBool("is_default")
sess.UseBool("basic_auth")
sess.UseBool("with_credentials")
sess.UseBool("read_only")
var updateSession *xorm.Session
if cmd.Version != 0 {
......
......@@ -45,6 +45,7 @@ func TestDataAccess(t *testing.T) {
Access: m.DS_ACCESS_DIRECT,
Url: "http://test",
Database: "site",
ReadOnly: true,
})
So(err, ShouldBeNil)
......@@ -59,6 +60,7 @@ func TestDataAccess(t *testing.T) {
So(ds.OrgId, ShouldEqual, 10)
So(ds.Database, ShouldEqual, "site")
So(ds.ReadOnly, ShouldBeTrue)
})
Convey("Given a datasource", func() {
......
......@@ -126,4 +126,8 @@ func addDataSourceMigration(mg *Migrator) {
Sqlite(setVersionToOneWhereZero).
Postgres(setVersionToOneWhereZero).
Mysql(setVersionToOneWhereZero))
mg.AddMigration("Add read_only data column", NewAddColumnMigration(tableV2, &Column{
Name: "read_only", Type: DB_Bool, Nullable: true,
}))
}
......@@ -50,11 +50,12 @@ var (
BuildStamp int64
// Paths
LogsPath string
HomePath string
DataPath string
PluginsPath string
CustomInitPath = "conf/custom.ini"
LogsPath string
HomePath string
DataPath string
PluginsPath string
DatasourcesPath string
CustomInitPath = "conf/custom.ini"
// Log settings.
LogModes []string
......@@ -470,6 +471,7 @@ func NewConfigContext(args *CommandLineArgs) error {
Env = Cfg.Section("").Key("app_mode").MustString("development")
InstanceName = Cfg.Section("").Key("instance_name").MustString("unknown_instance_name")
PluginsPath = makeAbsolute(Cfg.Section("paths").Key("plugins").String(), HomePath)
DatasourcesPath = makeAbsolute(Cfg.Section("paths").Key("datasources").String(), HomePath)
server := Cfg.Section("server")
AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server)
......@@ -661,5 +663,6 @@ func LogConfigurationInfo() {
logger.Info("Path Data", "path", DataPath)
logger.Info("Path Logs", "path", LogsPath)
logger.Info("Path Plugins", "path", PluginsPath)
logger.Info("Path Datasources", "path", DatasourcesPath)
logger.Info("App mode " + Env)
}
......@@ -2,9 +2,11 @@ package influxdb
import (
"strconv"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
)
type InfluxdbQueryParser struct{}
......@@ -37,13 +39,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data
return nil, err
}
interval := model.Get("interval").MustString("")
if interval == "" && dsInfo.JsonData != nil {
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
if dsInterval != "" {
interval = dsInterval
}
}
parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond*1)
return &Query{
Measurement: measurement,
......@@ -53,7 +49,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data
Tags: tags,
Selects: selects,
RawQuery: rawQuery,
Interval: interval,
Interval: parsedInterval,
Alias: alias,
UseRawQuery: useRawQuery,
}, nil
......
......@@ -2,6 +2,7 @@ package influxdb
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
......@@ -115,7 +116,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
So(len(res.GroupBy), ShouldEqual, 3)
So(len(res.Selects), ShouldEqual, 3)
So(len(res.Tags), ShouldEqual, 2)
So(res.Interval, ShouldEqual, ">20s")
So(res.Interval, ShouldEqual, time.Second*20)
So(res.Alias, ShouldEqual, "serie alias")
})
......@@ -174,7 +175,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
So(len(res.GroupBy), ShouldEqual, 2)
So(len(res.Selects), ShouldEqual, 1)
So(len(res.Tags), ShouldEqual, 0)
So(res.Interval, ShouldEqual, ">10s")
So(res.Interval, ShouldEqual, time.Second*10)
})
})
}
package influxdb
import "time"
type Query struct {
Measurement string
Policy string
......@@ -10,8 +12,7 @@ type Query struct {
RawQuery string
UseRawQuery bool
Alias string
Interval string
Interval time.Duration
}
type Tag struct {
......
......@@ -29,10 +29,8 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
res += query.renderGroupBy(queryContext)
}
interval, err := getDefinedInterval(query, queryContext)
if err != nil {
return "", err
}
calculator := tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{})
interval := calculator.Calculate(queryContext.TimeRange, query.Interval)
res = strings.Replace(res, "$timeFilter", query.renderTimeFilter(queryContext), -1)
res = strings.Replace(res, "$interval", interval.Text, -1)
......@@ -41,29 +39,6 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
return res, nil
}
func getDefinedInterval(query *Query, queryContext *tsdb.TsdbQuery) (*tsdb.Interval, error) {
defaultInterval := tsdb.CalculateInterval(queryContext.TimeRange)
if query.Interval == "" {
return &defaultInterval, nil
}
setInterval := strings.Replace(strings.Replace(query.Interval, "<", "", 1), ">", "", 1)
parsedSetInterval, err := time.ParseDuration(setInterval)
if err != nil {
return nil, err
}
if strings.Contains(query.Interval, ">") {
if defaultInterval.Value > parsedSetInterval {
return &defaultInterval, nil
}
}
return &tsdb.Interval{Value: parsedSetInterval, Text: setInterval}, nil
}
func (query *Query) renderTags() []string {
var res []string
for i, tag := range query.Tags {
......
......@@ -2,6 +2,7 @@ package influxdb
import (
"testing"
"time"
"strings"
......@@ -38,7 +39,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
Measurement: "cpu",
Policy: "policy",
GroupBy: []*QueryPart{groupBy1, groupBy3},
Interval: "10s",
Interval: time.Second * 10,
}
rawQuery, err := query.Build(queryContext)
......@@ -52,7 +53,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
Measurement: "cpu",
GroupBy: []*QueryPart{groupBy1, groupBy2, groupBy3},
Tags: []*Tag{tag1, tag2},
Interval: "5s",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
......@@ -64,7 +65,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
query := &Query{
Selects: []*Select{{*qp1, *qp2, *mathPartDivideBy100}},
Measurement: "cpu",
Interval: "5s",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
......@@ -76,7 +77,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
query := &Query{
Selects: []*Select{{*qp1, *qp2, *mathPartDivideByIntervalMs}},
Measurement: "cpu",
Interval: "5s",
Interval: time.Second * 5,
}
rawQuery, err := query.Build(queryContext)
......@@ -117,7 +118,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
Measurement: "cpu",
Policy: "policy",
GroupBy: []*QueryPart{groupBy1, groupBy3},
Interval: "10s",
Interval: time.Second * 10,
RawQuery: "Raw query",
UseRawQuery: true,
}
......
......@@ -2,14 +2,18 @@ package tsdb
import (
"fmt"
"strings"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
)
var (
defaultRes int64 = 1500
minInterval time.Duration = 1 * time.Millisecond
year time.Duration = time.Hour * 24 * 365
day time.Duration = time.Hour * 24 * 365
defaultRes int64 = 1500
defaultMinInterval time.Duration = 1 * time.Millisecond
year time.Duration = time.Hour * 24 * 365
day time.Duration = time.Hour * 24
)
type Interval struct {
......@@ -17,14 +21,68 @@ type Interval struct {
Value time.Duration
}
func CalculateInterval(timerange *TimeRange) Interval {
interval := time.Duration((timerange.MustGetTo().UnixNano() - timerange.MustGetFrom().UnixNano()) / defaultRes)
type intervalCalculator struct {
minInterval time.Duration
}
type IntervalCalculator interface {
Calculate(timeRange *TimeRange, minInterval time.Duration) Interval
}
type IntervalOptions struct {
MinInterval time.Duration
}
func NewIntervalCalculator(opt *IntervalOptions) *intervalCalculator {
if opt == nil {
opt = &IntervalOptions{}
}
calc := &intervalCalculator{}
if opt.MinInterval == 0 {
calc.minInterval = defaultMinInterval
} else {
calc.minInterval = opt.MinInterval
}
return calc
}
func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.Duration) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
interval := time.Duration((to - from) / defaultRes)
if interval < minInterval {
return Interval{Text: formatDuration(minInterval), Value: interval}
return Interval{Text: formatDuration(minInterval), Value: minInterval}
}
rounded := roundInterval(interval)
return Interval{Text: formatDuration(rounded), Value: rounded}
}
func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) {
interval := queryModel.Get("interval").MustString("")
if interval == "" && dsInfo.JsonData != nil {
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
if dsInterval != "" {
interval = dsInterval
}
}
if interval == "" {
return defaultInterval, nil
}
interval = strings.Replace(strings.Replace(interval, "<", "", 1), ">", "", 1)
parsedInterval, err := time.ParseDuration(interval)
if err != nil {
return time.Duration(0), err
}
return Interval{Text: formatDuration(roundInterval(interval)), Value: interval}
return parsedInterval, nil
}
func formatDuration(inter time.Duration) string {
......
......@@ -14,31 +14,33 @@ func TestInterval(t *testing.T) {
HomePath: "../../",
})
calculator := NewIntervalCalculator(&IntervalOptions{})
Convey("for 5min", func() {
tr := NewTimeRange("5m", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "200ms")
})
Convey("for 15min", func() {
tr := NewTimeRange("15m", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "500ms")
})
Convey("for 30min", func() {
tr := NewTimeRange("30m", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "1s")
})
Convey("for 1h", func() {
tr := NewTimeRange("1h", "now")
interval := CalculateInterval(tr)
interval := calculator.Calculate(tr, time.Millisecond*1)
So(interval.Text, ShouldEqual, "2s")
})
......@@ -51,6 +53,7 @@ func TestInterval(t *testing.T) {
So(formatDuration(time.Second*61), ShouldEqual, "1m")
So(formatDuration(time.Millisecond*30), ShouldEqual, "30ms")
So(formatDuration(time.Hour*23), ShouldEqual, "23h")
So(formatDuration(time.Hour*24), ShouldEqual, "1d")
So(formatDuration(time.Hour*24*367), ShouldEqual, "1y")
})
})
......
......@@ -4,6 +4,7 @@ import (
"container/list"
"context"
"fmt"
"net/url"
"strconv"
"time"
......@@ -52,7 +53,7 @@ func generateConnectionString(datasource *models.DataSource) string {
}
sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full")
return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", datasource.User, password, datasource.Url, datasource.Database, sslmode)
return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", url.PathEscape(datasource.User), url.PathEscape(password), url.PathEscape(datasource.Url), url.PathEscape(datasource.Database), url.QueryEscape(sslmode))
}
func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
......@@ -186,7 +187,7 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co
case float64:
timestamp = columnValue * 1000
case time.Time:
timestamp = float64(columnValue.Unix() * 1000)
timestamp = float64(columnValue.UnixNano() / 1e6)
default:
return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp")
}
......
......@@ -48,14 +48,16 @@ func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, e
}
var (
plog log.Logger
legendFormat *regexp.Regexp
plog log.Logger
legendFormat *regexp.Regexp
intervalCalculator tsdb.IntervalCalculator
)
func init() {
plog = log.New("tsdb.prometheus")
tsdb.RegisterTsdbQueryEndpoint("prometheus", NewPrometheusExecutor)
legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Second * 1})
}
func (e *PrometheusExecutor) getClient(dsInfo *models.DataSource) (apiv1.API, error) {
......@@ -88,7 +90,7 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc
return nil, err
}
query, err := parseQuery(tsdbQuery.Queries, tsdbQuery)
query, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery)
if err != nil {
return nil, err
}
......@@ -138,7 +140,7 @@ func formatLegend(metric model.Metric, query *PrometheusQuery) string {
return string(result)
}
func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*PrometheusQuery, error) {
func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*PrometheusQuery, error) {
queryModel := queries[0]
expr, err := queryModel.Model.Get("expr").String()
......@@ -146,11 +148,6 @@ func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*Prometheu
return nil, err
}
step, err := queryModel.Model.Get("step").Int64()
if err != nil {
return nil, err
}
format := queryModel.Model.Get("legendFormat").MustString("")
start, err := queryContext.TimeRange.ParseFrom()
......@@ -163,9 +160,18 @@ func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*Prometheu
return nil, err
}
dsInterval, err := tsdb.GetIntervalFrom(dsInfo, queryModel.Model, time.Second*15)
if err != nil {
return nil, err
}
intervalFactor := queryModel.Model.Get("intervalFactor").MustInt64(1)
interval := intervalCalculator.Calculate(queryContext.TimeRange, dsInterval)
step := time.Duration(int64(interval.Value) * intervalFactor)
return &PrometheusQuery{
Expr: expr,
Step: time.Second * time.Duration(step),
Step: step,
LegendFormat: format,
Start: start,
End: end,
......
......@@ -2,13 +2,21 @@ package prometheus
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/components/simplejson"
p "github.com/prometheus/common/model"
. "github.com/smartystreets/goconvey/convey"
)
func TestPrometheus(t *testing.T) {
Convey("Prometheus", t, func() {
dsInfo := &models.DataSource{
JsonData: simplejson.New(),
}
Convey("converting metric name", func() {
metric := map[p.LabelName]p.LabelValue{
......@@ -36,5 +44,108 @@ func TestPrometheus(t *testing.T) {
So(formatLegend(metric, query), ShouldEqual, `http_request_total{app="backend", device="mobile"}`)
})
Convey("parsing query model with step", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("12h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Second*30)
})
})
Convey("parsing query model without step parameter", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("48h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Minute*2)
})
Convey("with 1h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("1h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Second*15)
})
})
Convey("parsing query model with intervalFactor", func() {
Convey("high intervalFactor", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 10,
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("48h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Minute*20)
})
})
Convey("low intervalFactor", func() {
json := `{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`
jsonModel, _ := simplejson.NewJson([]byte(json))
queryContext := &tsdb.TsdbQuery{}
queryModels := []*tsdb.Query{
{Model: jsonModel},
}
Convey("with 48h time range", func() {
queryContext.TimeRange = tsdb.NewTimeRange("48h", "now")
model, err := parseQuery(dsInfo, queryModels, queryContext)
So(err, ShouldBeNil)
So(model.Step, ShouldEqual, time.Minute*2)
})
})
})
})
}
......@@ -57,12 +57,13 @@ func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSour
}
engine, err := xorm.NewEngine(driverName, cnnstr)
engine.SetMaxOpenConns(10)
engine.SetMaxIdleConns(10)
if err != nil {
return err
}
engine.SetMaxOpenConns(10)
engine.SetMaxIdleConns(10)
engineCache.cache[dsInfo.Id] = engine
e.XormEngine = engine
......
......@@ -63,7 +63,9 @@
<span ng-click="ctrl.filterByTag(tag, $event)" ng-repeat="tag in row.tags" tag-color-from-name="tag" class="label label-tag">
{{tag}}
</span>
<i class="fa" ng-class="{'fa-star': row.isStarred, 'fa-star-o': !row.isStarred}"></i>
<span ng-click="ctrl.starDashboard(row, $event)">
<i class="fa" ng-class="{'fa-star': row.isStarred, 'fa-star-o': !row.isStarred}"></i>
</span>
</span>
<span class="search-result-link">
......
......@@ -19,7 +19,7 @@ export class SearchCtrl {
openCompleted: boolean;
/** @ngInject */
constructor($scope, private $location, private $timeout, private backendSrv, public contextSrv, $rootScope) {
constructor($scope, private $location, private $timeout, private backendSrv, private dashboardSrv, public contextSrv, $rootScope) {
$rootScope.onAppEvent('show-dash-search', this.openSearch.bind(this), $scope);
$rootScope.onAppEvent('hide-dash-search', this.closeSearch.bind(this), $scope);
}
......@@ -161,6 +161,15 @@ export class SearchCtrl {
this.searchDashboards();
}
starDashboard(row, evt) {
this.dashboardSrv.starDashboard(row.id, row.isStarred).then(newState => {
row.isStarred = newState;
});
if (evt) {
evt.stopPropagation();
evt.preventDefault();
}
}
}
export function searchDirective() {
......
......@@ -475,6 +475,7 @@ kbn.valueFormats.wpm = kbn.formatBuilders.simpleCountUnit('wpm');
// Energy
kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W');
kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1);
kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1);
kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1);
kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA');
kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1);
......@@ -486,8 +487,10 @@ kbn.valueFormats.joule = kbn.formatBuilders.decimalSIPrefix('J');
kbn.valueFormats.ev = kbn.formatBuilders.decimalSIPrefix('eV');
kbn.valueFormats.amp = kbn.formatBuilders.decimalSIPrefix('A');
kbn.valueFormats.kamp = kbn.formatBuilders.decimalSIPrefix('A', 1);
kbn.valueFormats.mamp = kbn.formatBuilders.decimalSIPrefix('A', -1);
kbn.valueFormats.volt = kbn.formatBuilders.decimalSIPrefix('V');
kbn.valueFormats.kvolt = kbn.formatBuilders.decimalSIPrefix('V', 1);
kbn.valueFormats.mvolt = kbn.formatBuilders.decimalSIPrefix('V', -1);
kbn.valueFormats.dBm = kbn.formatBuilders.decimalSIPrefix('dBm');
kbn.valueFormats.ohm = kbn.formatBuilders.decimalSIPrefix('Ω');
......@@ -891,6 +894,7 @@ kbn.getUnitFormats = function() {
submenu: [
{ text: 'millimetre (mm)', value: 'lengthmm' },
{ text: 'meter (m)', value: 'lengthm' },
{ text: 'feet (ft)', value: 'lengthft' },
{ text: 'kilometer (km)', value: 'lengthkm' },
{ text: 'mile (mi)', value: 'lengthmi' },
],
......@@ -934,21 +938,24 @@ kbn.getUnitFormats = function() {
{
text: 'energy',
submenu: [
{ text: 'watt (W)', value: 'watt' },
{ text: 'kilowatt (kW)', value: 'kwatt' },
{ text: 'volt-ampere (VA)', value: 'voltamp' },
{ text: 'kilovolt-ampere (kVA)', value: 'kvoltamp' },
{ text: 'volt-ampere reactive (var)', value: 'voltampreact' },
{ text: 'kilovolt-ampere reactive (kvar)', value: 'kvoltampreact' },
{ text: 'watt-hour (Wh)', value: 'watth' },
{ text: 'kilowatt-hour (kWh)', value: 'kwatth' },
{ text: 'kilowatt-min (kWm)', value: 'kwattm' },
{ text: 'joule (J)', value: 'joule' },
{ text: 'electron volt (eV)', value: 'ev' },
{ text: 'Watt (W)', value: 'watt' },
{ text: 'Kilowatt (kW)', value: 'kwatt' },
{ text: 'Milliwatt (mW)', value: 'mwatt' },
{ text: 'Volt-ampere (VA)', value: 'voltamp' },
{ text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' },
{ text: 'Volt-ampere reactive (var)', value: 'voltampreact' },
{ text: 'Kilovolt-ampere reactive (kvar)', value: 'kvoltampreact' },
{ text: 'Watt-hour (Wh)', value: 'watth' },
{ text: 'Kilowatt-hour (kWh)', value: 'kwatth' },
{ text: 'Kilowatt-min (kWm)', value: 'kwattm' },
{ text: 'Joule (J)', value: 'joule' },
{ text: 'Electron volt (eV)', value: 'ev' },
{ text: 'Ampere (A)', value: 'amp' },
{ text: 'Kiloampere (kA)', value: 'kamp' },
{ text: 'Milliampere (mA)', value: 'mamp' },
{ text: 'Volt (V)', value: 'volt' },
{ text: 'Kilovolt (kV)', value: 'kvolt' },
{ text: 'Millivolt (mV)', value: 'mvolt' },
{ text: 'Decibel-milliwatt (dBm)', value: 'dBm' },
{ text: 'Ohm (Ω)', value: 'ohm' }
],
......
......@@ -131,6 +131,27 @@ export class DashboardSrv {
modalClass: 'modal--narrow'
});
}
starDashboard(dashboardId, isStarred) {
let promise;
if (isStarred) {
promise = this.backendSrv.delete('/api/user/stars/dashboard/' + dashboardId).then(() => {
return false;
});
} else {
promise = this.backendSrv.post('/api/user/stars/dashboard/' + dashboardId).then(() => {
return true;
});
}
return promise.then(res => {
if (this.dash && this.dash.id === dashboardId) {
this.dash.meta.isStarred = res;
}
return res;
});
}
}
coreModule.service('dashboardSrv', DashboardSrv);
......
......@@ -49,14 +49,9 @@ export class DashNavCtrl {
}
starDashboard() {
if (this.dashboard.meta.isStarred) {
return this.backendSrv.delete('/api/user/stars/dashboard/' + this.dashboard.id).then(() => {
this.dashboard.meta.isStarred = false;
});
}
this.backendSrv.post('/api/user/stars/dashboard/' + this.dashboard.id).then(() => {
this.dashboard.meta.isStarred = true;
this.dashboardSrv.starDashboard(this.dashboard.id, this.dashboard.meta.isStarred)
.then(newState => {
this.dashboard.meta.isStarred = newState;
});
}
......
......@@ -149,6 +149,10 @@ export class DataSourceEditCtrl {
return;
}
if (this.current.readOnly) {
return;
}
if (this.current.id) {
return this.backendSrv.put('/api/datasources/' + this.current.id, this.current).then((result) => {
this.current = result.datasource;
......
......@@ -6,6 +6,8 @@
<h1 ng-show="ctrl.isNew">Add data source</h1>
<h1 ng-hide="ctrl.isNew">Edit data source</h1>
<div ng-if="ctrl.current.readOnly" class="grafana-info-box span8">Disclaimer. This datasource was added by config and cannot be modified using the UI. Please contact your server admin to update this datasource.</div>
<div class="page-header-tabs" ng-show="ctrl.hasDashboards">
<ul class="gf-tabs">
<li class="gf-tabs-item">
......@@ -29,20 +31,20 @@
<div class="gf-form-inline">
<div class="gf-form max-width-30">
<span class="gf-form-label width-7">Name</span>
<input class="gf-form-input max-width-23" type="text" ng-model="ctrl.current.name" placeholder="name" required>
<input class="gf-form-input max-width-23" type="text" ng-model="ctrl.current.name" placeholder="name" ng-disabled="ctrl.current.readOnly" required>
<info-popover offset="0px -135px" mode="right-absolute">
The name is used when you select the data source in panels.
The <em>Default</em> data source is preselected in new
panels.
</info-popover>
</div>
<gf-form-switch class="gf-form" label="Default" checked="ctrl.current.isDefault" switch-class="max-width-6"></gf-form-switch>
<gf-form-switch class="gf-form" label="Default" checked="ctrl.current.isDefault" ng-disabled="ctrl.current.readOnly" switch-class="max-width-6"></gf-form-switch>
</div>
<div class="gf-form max-width-30">
<span class="gf-form-label width-7">Type</span>
<div class="gf-form-select-wrapper max-width-23">
<select class="gf-form-input" ng-model="ctrl.current.type" ng-options="v.id as v.name for v in ctrl.types" ng-change="ctrl.userChangedType()"></select>
<select class="gf-form-input" ng-model="ctrl.current.type" ng-disabled="ctrl.current.readOnly" ng-options="v.id as v.name for v in ctrl.types" ng-change="ctrl.userChangedType()"></select>
</div>
</div>
</div>
......@@ -71,9 +73,9 @@
</div>
<div class="gf-form-button-row">
<button type="submit" class="btn btn-success width-6" ng-show="ctrl.isNew" ng-click="ctrl.saveChanges()">Add</button>
<button type="submit" class="btn btn-success width-8" ng-show="!ctrl.isNew" ng-click="ctrl.saveChanges()">Save &amp; Test</button>
<button type="submit" class="btn btn-danger width-6" ng-show="!ctrl.isNew" ng-click="ctrl.delete()">
<button type="submit" class="btn btn-success width-6" ng-disabled="ctrl.current.readOnly" ng-show="ctrl.isNew" ng-click="ctrl.saveChanges()">Add</button>
<button type="submit" class="btn btn-success width-8" ng-disabled="ctrl.current.readOnly" ng-show="!ctrl.isNew" ng-click="ctrl.saveChanges()">Save &amp; Test</button>
<button type="submit" class="btn btn-danger width-6" ng-disabled="ctrl.current.readOnly" ng-show="!ctrl.isNew" ng-click="ctrl.delete()">
Delete
</button>
<a class="btn btn-link" href="datasources">Cancel</a>
......
......@@ -333,7 +333,7 @@ function (angular, _, moment, dateMath, kbn, templatingVariable) {
};
return backendSrv.datasourceRequest(options).then(function(result) {
return result;
return result.data;
});
};
......
......@@ -68,7 +68,7 @@ describe('CloudWatchDatasource', function() {
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(params) {
requestParams = params.data;
return ctx.$q.when(response);
return ctx.$q.when({data: response});
};
});
......@@ -213,7 +213,7 @@ describe('CloudWatchDatasource', function() {
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(params) {
requestParams = params.data;
return ctx.$q.when(response);
return ctx.$q.when({data: response});
};
});
......@@ -235,7 +235,7 @@ describe('CloudWatchDatasource', function() {
setupCallback();
ctx.backendSrv.datasourceRequest = args => {
scenario.request = args.data;
return ctx.$q.when(scenario.requestResponse);
return ctx.$q.when({data: scenario.requestResponse});
};
ctx.ds.metricFindQuery(query).then(args => {
scenario.result = args;
......
......@@ -19,6 +19,7 @@ export class PrometheusDatasource {
basicAuth: any;
withCredentials: any;
metricsNameCache: any;
interval: string;
/** @ngInject */
constructor(instanceSettings,
......@@ -34,6 +35,7 @@ export class PrometheusDatasource {
this.directUrl = instanceSettings.directUrl;
this.basicAuth = instanceSettings.basicAuth;
this.withCredentials = instanceSettings.withCredentials;
this.interval = instanceSettings.jsonData.timeInterval || '15s';
}
_request(method, url, requestId?) {
......
<datasource-http-settings current="ctrl.current" suggest-url="http://localhost:9090">
</datasource-http-settings>
<div class="gf-form-group">
<div class="gf-form-inline">
<div class="gf-form">
<span class="gf-form-label">Scrape interval</span>
<input type="text" class="gf-form-input width-6" ng-model="ctrl.current.jsonData.timeInterval" spellcheck='false' placeholder="15s"></input>
<info-popover mode="right-absolute">
Set this to your global scrape interval defined in your Prometheus config file. This will be used as a lower limit for
the Prometheus step query parameter.
</info-popover>
</div>
</div>
</div>
......@@ -5,7 +5,7 @@ import {PrometheusDatasource} from '../datasource';
describe('PrometheusDatasource', function() {
var ctx = new helpers.ServiceTestContext();
var instanceSettings = {url: 'proxied', directUrl: 'direct', user: 'test', password: 'mupp' };
var instanceSettings = {url: 'proxied', directUrl: 'direct', user: 'test', password: 'mupp', jsonData: {}};
beforeEach(angularMocks.module('grafana.core'));
beforeEach(angularMocks.module('grafana.services'));
......
......@@ -8,7 +8,7 @@ import PrometheusMetricFindQuery from '../metric_find_query';
describe('PrometheusMetricFindQuery', function() {
var ctx = new helpers.ServiceTestContext();
var instanceSettings = {url: 'proxied', directUrl: 'direct', user: 'test', password: 'mupp' };
var instanceSettings = {url: 'proxied', directUrl: 'direct', user: 'test', password: 'mupp', jsonData: {}};
beforeEach(angularMocks.module('grafana.core'));
beforeEach(angularMocks.module('grafana.services'));
......
......@@ -8,7 +8,7 @@
<span class="dashlist-title">
{{dash.title}}
</span>
<span class="dashlist-star">
<span class="dashlist-star" ng-click="ctrl.starDashboard(dash, $event)">
<i class="fa" ng-class="{'fa-star': dash.isStarred, 'fa-star-o': dash.isStarred === false}"></i>
</span>
</a>
......
......@@ -21,7 +21,7 @@ class DashListCtrl extends PanelCtrl {
};
/** @ngInject */
constructor($scope, $injector, private backendSrv) {
constructor($scope, $injector, private backendSrv, private dashboardSrv) {
super($scope, $injector);
_.defaults(this.panel, this.panelDefaults);
......@@ -105,6 +105,17 @@ class DashListCtrl extends PanelCtrl {
});
}
starDashboard(dash, evt) {
this.dashboardSrv.starDashboard(dash.id, dash.isStarred).then(newState => {
dash.isStarred = newState;
});
if (evt) {
evt.stopPropagation();
evt.preventDefault();
}
}
getRecentDashboards() {
this.groups[1].show = this.panel.recent;
if (!this.panel.recent) {
......
......@@ -13,9 +13,11 @@
padding: 7px;
background-color: $tight-form-bg;
.fa {
float: right;
padding-top: 3px;
}
.dashlist-star {
float: right;
}
.fa-star {
color: $orange;
}
......
The following files were ported to Go from C files of libyaml, and thus
are still covered by their original copyright and license:
apic.go
emitterc.go
parserc.go
readerc.go
scannerc.go
writerc.go
yamlh.go
yamlprivateh.go
Copyright (c) 2006 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# YAML support for the Go language
Introduction
------------
The yaml package enables Go programs to comfortably encode and decode YAML
values. It was developed within [Canonical](https://www.canonical.com) as
part of the [juju](https://juju.ubuntu.com) project, and is based on a
pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
C library to parse and generate YAML data quickly and reliably.
Compatibility
-------------
The yaml package supports most of YAML 1.1 and 1.2, including support for
anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
implemented, and base-60 floats from YAML 1.1 are purposefully not
supported since they're a poor design and are gone in YAML 1.2.
Installation and usage
----------------------
The import path for the package is *gopkg.in/yaml.v2*.
To install it, run:
go get gopkg.in/yaml.v2
API documentation
-----------------
If opened in a browser, the import path itself leads to the API documentation:
* [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
API stability
-------------
The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
License
-------
The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
Example
-------
Some more examples can be found in the "examples" folder.
```Go
package main
import (
"fmt"
"log"
"gopkg.in/yaml.v2"
)
var data = `
a: Easy!
b:
c: 2
d: [3, 4]
`
type T struct {
A string
B struct {
RenamedC int `yaml:"c"`
D []int `yaml:",flow"`
}
}
func main() {
t := T{}
err := yaml.Unmarshal([]byte(data), &t)
if err != nil {
log.Fatalf("error: %v", err)
}
fmt.Printf("--- t:\n%v\n\n", t)
d, err := yaml.Marshal(&t)
if err != nil {
log.Fatalf("error: %v", err)
}
fmt.Printf("--- t dump:\n%s\n\n", string(d))
m := make(map[interface{}]interface{})
err = yaml.Unmarshal([]byte(data), &m)
if err != nil {
log.Fatalf("error: %v", err)
}
fmt.Printf("--- m:\n%v\n\n", m)
d, err = yaml.Marshal(&m)
if err != nil {
log.Fatalf("error: %v", err)
}
fmt.Printf("--- m dump:\n%s\n\n", string(d))
}
```
This example will generate the following output:
```
--- t:
{Easy! {2 [3 4]}}
--- t dump:
a: Easy!
b:
c: 2
d: [3, 4]
--- m:
map[a:Easy! b:map[c:2 d:[3 4]]]
--- m dump:
a: Easy!
b:
c: 2
d:
- 3
- 4
```
package yaml
import (
"encoding"
"fmt"
"reflect"
"regexp"
"sort"
"strconv"
"strings"
"time"
)
type encoder struct {
emitter yaml_emitter_t
event yaml_event_t
out []byte
flow bool
}
func newEncoder() (e *encoder) {
e = &encoder{}
e.must(yaml_emitter_initialize(&e.emitter))
yaml_emitter_set_output_string(&e.emitter, &e.out)
yaml_emitter_set_unicode(&e.emitter, true)
e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
e.emit()
e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
e.emit()
return e
}
func (e *encoder) finish() {
e.must(yaml_document_end_event_initialize(&e.event, true))
e.emit()
e.emitter.open_ended = false
e.must(yaml_stream_end_event_initialize(&e.event))
e.emit()
}
func (e *encoder) destroy() {
yaml_emitter_delete(&e.emitter)
}
func (e *encoder) emit() {
// This will internally delete the e.event value.
if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
e.must(false)
}
}
func (e *encoder) must(ok bool) {
if !ok {
msg := e.emitter.problem
if msg == "" {
msg = "unknown problem generating YAML content"
}
failf("%s", msg)
}
}
func (e *encoder) marshal(tag string, in reflect.Value) {
if !in.IsValid() {
e.nilv()
return
}
iface := in.Interface()
if m, ok := iface.(Marshaler); ok {
v, err := m.MarshalYAML()
if err != nil {
fail(err)
}
if v == nil {
e.nilv()
return
}
in = reflect.ValueOf(v)
} else if m, ok := iface.(encoding.TextMarshaler); ok {
text, err := m.MarshalText()
if err != nil {
fail(err)
}
in = reflect.ValueOf(string(text))
}
switch in.Kind() {
case reflect.Interface:
if in.IsNil() {
e.nilv()
} else {
e.marshal(tag, in.Elem())
}
case reflect.Map:
e.mapv(tag, in)
case reflect.Ptr:
if in.IsNil() {
e.nilv()
} else {
e.marshal(tag, in.Elem())
}
case reflect.Struct:
e.structv(tag, in)
case reflect.Slice:
if in.Type().Elem() == mapItemType {
e.itemsv(tag, in)
} else {
e.slicev(tag, in)
}
case reflect.String:
e.stringv(tag, in)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if in.Type() == durationType {
e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
} else {
e.intv(tag, in)
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
e.uintv(tag, in)
case reflect.Float32, reflect.Float64:
e.floatv(tag, in)
case reflect.Bool:
e.boolv(tag, in)
default:
panic("cannot marshal type: " + in.Type().String())
}
}
func (e *encoder) mapv(tag string, in reflect.Value) {
e.mappingv(tag, func() {
keys := keyList(in.MapKeys())
sort.Sort(keys)
for _, k := range keys {
e.marshal("", k)
e.marshal("", in.MapIndex(k))
}
})
}
func (e *encoder) itemsv(tag string, in reflect.Value) {
e.mappingv(tag, func() {
slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
for _, item := range slice {
e.marshal("", reflect.ValueOf(item.Key))
e.marshal("", reflect.ValueOf(item.Value))
}
})
}
func (e *encoder) structv(tag string, in reflect.Value) {
sinfo, err := getStructInfo(in.Type())
if err != nil {
panic(err)
}
e.mappingv(tag, func() {
for _, info := range sinfo.FieldsList {
var value reflect.Value
if info.Inline == nil {
value = in.Field(info.Num)
} else {
value = in.FieldByIndex(info.Inline)
}
if info.OmitEmpty && isZero(value) {
continue
}
e.marshal("", reflect.ValueOf(info.Key))
e.flow = info.Flow
e.marshal("", value)
}
if sinfo.InlineMap >= 0 {
m := in.Field(sinfo.InlineMap)
if m.Len() > 0 {
e.flow = false
keys := keyList(m.MapKeys())
sort.Sort(keys)
for _, k := range keys {
if _, found := sinfo.FieldsMap[k.String()]; found {
panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
}
e.marshal("", k)
e.flow = false
e.marshal("", m.MapIndex(k))
}
}
}
})
}
func (e *encoder) mappingv(tag string, f func()) {
implicit := tag == ""
style := yaml_BLOCK_MAPPING_STYLE
if e.flow {
e.flow = false
style = yaml_FLOW_MAPPING_STYLE
}
e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
e.emit()
f()
e.must(yaml_mapping_end_event_initialize(&e.event))
e.emit()
}
func (e *encoder) slicev(tag string, in reflect.Value) {
implicit := tag == ""
style := yaml_BLOCK_SEQUENCE_STYLE
if e.flow {
e.flow = false
style = yaml_FLOW_SEQUENCE_STYLE
}
e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
e.emit()
n := in.Len()
for i := 0; i < n; i++ {
e.marshal("", in.Index(i))
}
e.must(yaml_sequence_end_event_initialize(&e.event))
e.emit()
}
// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
//
// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
// in YAML 1.2 and by this package, but these should be marshalled quoted for
// the time being for compatibility with other parsers.
func isBase60Float(s string) (result bool) {
// Fast path.
if s == "" {
return false
}
c := s[0]
if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
return false
}
// Do the full match.
return base60float.MatchString(s)
}
// From http://yaml.org/type/float.html, except the regular expression there
// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
func (e *encoder) stringv(tag string, in reflect.Value) {
var style yaml_scalar_style_t
s := in.String()
rtag, rs := resolve("", s)
if rtag == yaml_BINARY_TAG {
if tag == "" || tag == yaml_STR_TAG {
tag = rtag
s = rs.(string)
} else if tag == yaml_BINARY_TAG {
failf("explicitly tagged !!binary data must be base64-encoded")
} else {
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
}
}
if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
} else if strings.Contains(s, "\n") {
style = yaml_LITERAL_SCALAR_STYLE
} else {
style = yaml_PLAIN_SCALAR_STYLE
}
e.emitScalar(s, "", tag, style)
}
func (e *encoder) boolv(tag string, in reflect.Value) {
var s string
if in.Bool() {
s = "true"
} else {
s = "false"
}
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
}
func (e *encoder) intv(tag string, in reflect.Value) {
s := strconv.FormatInt(in.Int(), 10)
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
}
func (e *encoder) uintv(tag string, in reflect.Value) {
s := strconv.FormatUint(in.Uint(), 10)
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
}
func (e *encoder) floatv(tag string, in reflect.Value) {
// FIXME: Handle 64 bits here.
s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
switch s {
case "+Inf":
s = ".inf"
case "-Inf":
s = "-.inf"
case "NaN":
s = ".nan"
}
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
}
func (e *encoder) nilv() {
e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
}
func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
implicit := tag == ""
e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
e.emit()
}
package yaml
import (
"encoding/base64"
"math"
"regexp"
"strconv"
"strings"
"unicode/utf8"
)
type resolveMapItem struct {
value interface{}
tag string
}
var resolveTable = make([]byte, 256)
var resolveMap = make(map[string]resolveMapItem)
func init() {
t := resolveTable
t[int('+')] = 'S' // Sign
t[int('-')] = 'S'
for _, c := range "0123456789" {
t[int(c)] = 'D' // Digit
}
for _, c := range "yYnNtTfFoO~" {
t[int(c)] = 'M' // In map
}
t[int('.')] = '.' // Float (potentially in map)
var resolveMapList = []struct {
v interface{}
tag string
l []string
}{
{true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
{true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
{true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
{false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
{false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
{false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
{nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
{math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
{math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
{math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
{math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
{"<<", yaml_MERGE_TAG, []string{"<<"}},
}
m := resolveMap
for _, item := range resolveMapList {
for _, s := range item.l {
m[s] = resolveMapItem{item.v, item.tag}
}
}
}
const longTagPrefix = "tag:yaml.org,2002:"
func shortTag(tag string) string {
// TODO This can easily be made faster and produce less garbage.
if strings.HasPrefix(tag, longTagPrefix) {
return "!!" + tag[len(longTagPrefix):]
}
return tag
}
func longTag(tag string) string {
if strings.HasPrefix(tag, "!!") {
return longTagPrefix + tag[2:]
}
return tag
}
func resolvableTag(tag string) bool {
switch tag {
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
return true
}
return false
}
var yamlStyleFloat = regexp.MustCompile(`^[-+]?[0-9]*\.?[0-9]+([eE][-+][0-9]+)?$`)
func resolve(tag string, in string) (rtag string, out interface{}) {
if !resolvableTag(tag) {
return tag, in
}
defer func() {
switch tag {
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
return
}
failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
}()
// Any data is accepted as a !!str or !!binary.
// Otherwise, the prefix is enough of a hint about what it might be.
hint := byte('N')
if in != "" {
hint = resolveTable[in[0]]
}
if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
// Handle things we can lookup in a map.
if item, ok := resolveMap[in]; ok {
return item.tag, item.value
}
// Base 60 floats are a bad idea, were dropped in YAML 1.2, and
// are purposefully unsupported here. They're still quoted on
// the way out for compatibility with other parser, though.
switch hint {
case 'M':
// We've already checked the map above.
case '.':
// Not in the map, so maybe a normal float.
floatv, err := strconv.ParseFloat(in, 64)
if err == nil {
return yaml_FLOAT_TAG, floatv
}
case 'D', 'S':
// Int, float, or timestamp.
plain := strings.Replace(in, "_", "", -1)
intv, err := strconv.ParseInt(plain, 0, 64)
if err == nil {
if intv == int64(int(intv)) {
return yaml_INT_TAG, int(intv)
} else {
return yaml_INT_TAG, intv
}
}
uintv, err := strconv.ParseUint(plain, 0, 64)
if err == nil {
return yaml_INT_TAG, uintv
}
if yamlStyleFloat.MatchString(plain) {
floatv, err := strconv.ParseFloat(plain, 64)
if err == nil {
return yaml_FLOAT_TAG, floatv
}
}
if strings.HasPrefix(plain, "0b") {
intv, err := strconv.ParseInt(plain[2:], 2, 64)
if err == nil {
if intv == int64(int(intv)) {
return yaml_INT_TAG, int(intv)
} else {
return yaml_INT_TAG, intv
}
}
uintv, err := strconv.ParseUint(plain[2:], 2, 64)
if err == nil {
return yaml_INT_TAG, uintv
}
} else if strings.HasPrefix(plain, "-0b") {
intv, err := strconv.ParseInt(plain[3:], 2, 64)
if err == nil {
if intv == int64(int(intv)) {
return yaml_INT_TAG, -int(intv)
} else {
return yaml_INT_TAG, -intv
}
}
}
// XXX Handle timestamps here.
default:
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
}
}
if tag == yaml_BINARY_TAG {
return yaml_BINARY_TAG, in
}
if utf8.ValidString(in) {
return yaml_STR_TAG, in
}
return yaml_BINARY_TAG, encodeBase64(in)
}
// encodeBase64 encodes s as base64 that is broken up into multiple lines
// as appropriate for the resulting length.
func encodeBase64(s string) string {
const lineLen = 70
encLen := base64.StdEncoding.EncodedLen(len(s))
lines := encLen/lineLen + 1
buf := make([]byte, encLen*2+lines)
in := buf[0:encLen]
out := buf[encLen:]
base64.StdEncoding.Encode(in, []byte(s))
k := 0
for i := 0; i < len(in); i += lineLen {
j := i + lineLen
if j > len(in) {
j = len(in)
}
k += copy(out[k:], in[i:j])
if lines > 1 {
out[k] = '\n'
k++
}
}
return string(out[:k])
}
package yaml
import (
"reflect"
"unicode"
)
type keyList []reflect.Value
func (l keyList) Len() int { return len(l) }
func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l keyList) Less(i, j int) bool {
a := l[i]
b := l[j]
ak := a.Kind()
bk := b.Kind()
for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
a = a.Elem()
ak = a.Kind()
}
for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
b = b.Elem()
bk = b.Kind()
}
af, aok := keyFloat(a)
bf, bok := keyFloat(b)
if aok && bok {
if af != bf {
return af < bf
}
if ak != bk {
return ak < bk
}
return numLess(a, b)
}
if ak != reflect.String || bk != reflect.String {
return ak < bk
}
ar, br := []rune(a.String()), []rune(b.String())
for i := 0; i < len(ar) && i < len(br); i++ {
if ar[i] == br[i] {
continue
}
al := unicode.IsLetter(ar[i])
bl := unicode.IsLetter(br[i])
if al && bl {
return ar[i] < br[i]
}
if al || bl {
return bl
}
var ai, bi int
var an, bn int64
for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
an = an*10 + int64(ar[ai]-'0')
}
for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
bn = bn*10 + int64(br[bi]-'0')
}
if an != bn {
return an < bn
}
if ai != bi {
return ai < bi
}
return ar[i] < br[i]
}
return len(ar) < len(br)
}
// keyFloat returns a float value for v if it is a number/bool
// and whether it is a number/bool or not.
func keyFloat(v reflect.Value) (f float64, ok bool) {
switch v.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(v.Int()), true
case reflect.Float32, reflect.Float64:
return v.Float(), true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return float64(v.Uint()), true
case reflect.Bool:
if v.Bool() {
return 1, true
}
return 0, true
}
return 0, false
}
// numLess returns whether a < b.
// a and b must necessarily have the same kind.
func numLess(a, b reflect.Value) bool {
switch a.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return a.Int() < b.Int()
case reflect.Float32, reflect.Float64:
return a.Float() < b.Float()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return a.Uint() < b.Uint()
case reflect.Bool:
return !a.Bool() && b.Bool()
}
panic("not a number")
}
package yaml
// Set the writer error and return false.
func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
emitter.error = yaml_WRITER_ERROR
emitter.problem = problem
return false
}
// Flush the output buffer.
func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
if emitter.write_handler == nil {
panic("write handler not set")
}
// Check if the buffer is empty.
if emitter.buffer_pos == 0 {
return true
}
// If the output encoding is UTF-8, we don't need to recode the buffer.
if emitter.encoding == yaml_UTF8_ENCODING {
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
}
emitter.buffer_pos = 0
return true
}
// Recode the buffer into the raw buffer.
var low, high int
if emitter.encoding == yaml_UTF16LE_ENCODING {
low, high = 0, 1
} else {
high, low = 1, 0
}
pos := 0
for pos < emitter.buffer_pos {
// See the "reader.c" code for more details on UTF-8 encoding. Note
// that we assume that the buffer contains a valid UTF-8 sequence.
// Read the next UTF-8 character.
octet := emitter.buffer[pos]
var w int
var value rune
switch {
case octet&0x80 == 0x00:
w, value = 1, rune(octet&0x7F)
case octet&0xE0 == 0xC0:
w, value = 2, rune(octet&0x1F)
case octet&0xF0 == 0xE0:
w, value = 3, rune(octet&0x0F)
case octet&0xF8 == 0xF0:
w, value = 4, rune(octet&0x07)
}
for k := 1; k < w; k++ {
octet = emitter.buffer[pos+k]
value = (value << 6) + (rune(octet) & 0x3F)
}
pos += w
// Write the character.
if value < 0x10000 {
var b [2]byte
b[high] = byte(value >> 8)
b[low] = byte(value & 0xFF)
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
} else {
// Write the character using a surrogate pair (check "reader.c").
var b [4]byte
value -= 0x10000
b[high] = byte(0xD8 + (value >> 18))
b[low] = byte((value >> 10) & 0xFF)
b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
b[low+2] = byte(value & 0xFF)
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
}
}
// Write the raw buffer.
if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
}
emitter.buffer_pos = 0
emitter.raw_buffer = emitter.raw_buffer[:0]
return true
}
package yaml
const (
// The size of the input raw buffer.
input_raw_buffer_size = 512
// The size of the input buffer.
// It should be possible to decode the whole raw buffer.
input_buffer_size = input_raw_buffer_size * 3
// The size of the output buffer.
output_buffer_size = 128
// The size of the output raw buffer.
// It should be possible to encode the whole output buffer.
output_raw_buffer_size = (output_buffer_size*2 + 2)
// The size of other stacks and queues.
initial_stack_size = 16
initial_queue_size = 16
initial_string_size = 16
)
// Check if the character at the specified position is an alphabetical
// character, a digit, '_', or '-'.
func is_alpha(b []byte, i int) bool {
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
}
// Check if the character at the specified position is a digit.
func is_digit(b []byte, i int) bool {
return b[i] >= '0' && b[i] <= '9'
}
// Get the value of a digit.
func as_digit(b []byte, i int) int {
return int(b[i]) - '0'
}
// Check if the character at the specified position is a hex-digit.
func is_hex(b []byte, i int) bool {
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
}
// Get the value of a hex-digit.
func as_hex(b []byte, i int) int {
bi := b[i]
if bi >= 'A' && bi <= 'F' {
return int(bi) - 'A' + 10
}
if bi >= 'a' && bi <= 'f' {
return int(bi) - 'a' + 10
}
return int(bi) - '0'
}
// Check if the character is ASCII.
func is_ascii(b []byte, i int) bool {
return b[i] <= 0x7F
}
// Check if the character at the start of the buffer can be printed unescaped.
func is_printable(b []byte, i int) bool {
return ((b[i] == 0x0A) || // . == #x0A
(b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
(b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
(b[i] > 0xC2 && b[i] < 0xED) ||
(b[i] == 0xED && b[i+1] < 0xA0) ||
(b[i] == 0xEE) ||
(b[i] == 0xEF && // #xE000 <= . <= #xFFFD
!(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
!(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
}
// Check if the character at the specified position is NUL.
func is_z(b []byte, i int) bool {
return b[i] == 0x00
}
// Check if the beginning of the buffer is a BOM.
func is_bom(b []byte, i int) bool {
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
}
// Check if the character at the specified position is space.
func is_space(b []byte, i int) bool {
return b[i] == ' '
}
// Check if the character at the specified position is tab.
func is_tab(b []byte, i int) bool {
return b[i] == '\t'
}
// Check if the character at the specified position is blank (space or tab).
func is_blank(b []byte, i int) bool {
//return is_space(b, i) || is_tab(b, i)
return b[i] == ' ' || b[i] == '\t'
}
// Check if the character at the specified position is a line break.
func is_break(b []byte, i int) bool {
return (b[i] == '\r' || // CR (#xD)
b[i] == '\n' || // LF (#xA)
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
}
func is_crlf(b []byte, i int) bool {
return b[i] == '\r' && b[i+1] == '\n'
}
// Check if the character is a line break or NUL.
func is_breakz(b []byte, i int) bool {
//return is_break(b, i) || is_z(b, i)
return ( // is_break:
b[i] == '\r' || // CR (#xD)
b[i] == '\n' || // LF (#xA)
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
// is_z:
b[i] == 0)
}
// Check if the character is a line break, space, or NUL.
func is_spacez(b []byte, i int) bool {
//return is_space(b, i) || is_breakz(b, i)
return ( // is_space:
b[i] == ' ' ||
// is_breakz:
b[i] == '\r' || // CR (#xD)
b[i] == '\n' || // LF (#xA)
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
b[i] == 0)
}
// Check if the character is a line break, space, tab, or NUL.
func is_blankz(b []byte, i int) bool {
//return is_blank(b, i) || is_breakz(b, i)
return ( // is_blank:
b[i] == ' ' || b[i] == '\t' ||
// is_breakz:
b[i] == '\r' || // CR (#xD)
b[i] == '\n' || // LF (#xA)
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
b[i] == 0)
}
// Determine the width of the character.
func width(b byte) int {
// Don't replace these by a switch without first
// confirming that it is being inlined.
if b&0x80 == 0x00 {
return 1
}
if b&0xE0 == 0xC0 {
return 2
}
if b&0xF0 == 0xE0 {
return 3
}
if b&0xF8 == 0xF0 {
return 4
}
return 0
}
......@@ -712,6 +712,12 @@
"path": "gopkg.in/macaron.v1",
"revision": "a325110f8b392bce3e5cdeb8c44bf98078ada3be",
"revisionTime": "2017-02-13T09:12:08Z"
},
{
"checksumSHA1": "RDJpJQwkF012L6m/2BJizyOksNw=",
"path": "gopkg.in/yaml.v2",
"revision": "eb3733d160e74a9c7e442f435eb3bea458e1d19f",
"revisionTime": "2017-08-12T16:00:11Z"
}
],
"rootPath": "github.com/grafana/grafana"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment