Commit 65b0365a by ying-jeanne Committed by GitHub

Cloud Monitoring: Convert datasource to use Dataframes (#29830)

* Convert Cloud Monitoring (Stackdriver) Datasource to use Dataframes #29830

* add deeplink into config

* omggggggggggggggg this deeplink works!

* move unit to the backend part

* remove unit from frontend

* only set the config fields[1] for deeplink and unit

* refactory + fix some test

* remove frontend test for unit

* adding backend test for unit mapping

* resolve review

* rewrtie unit logic to do exactly the same as frontend filter

* refactory
parent 382c75d0
......@@ -15,8 +15,8 @@ import (
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
......@@ -34,11 +34,26 @@ var (
)
var (
matchAllCap = regexp.MustCompile("(.)([A-Z][a-z]*)")
legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.(googleapis\.com|io)/(.+)`)
wildcardRegexRe = regexp.MustCompile(`[-\/^$+?.()|[\]{}]`)
alignmentPeriodRe = regexp.MustCompile("[0-9]+")
matchAllCap = regexp.MustCompile("(.)([A-Z][a-z]*)")
legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.(googleapis\.com|io)/(.+)`)
wildcardRegexRe = regexp.MustCompile(`[-\/^$+?.()|[\]{}]`)
alignmentPeriodRe = regexp.MustCompile("[0-9]+")
cloudMonitoringUnitMappings = map[string]string{
"bit": "bits",
"By": "bytes",
"s": "s",
"min": "m",
"h": "h",
"d": "d",
"us": "µs",
"ms": "ms",
"ns": "ns",
"percent": "percent",
"MiBy": "mbytes",
"By/s": "Bps",
"GBy": "decgbytes",
}
)
const (
......@@ -202,17 +217,13 @@ func (e *CloudMonitoringExecutor) executeTimeSeriesQuery(ctx context.Context, ts
return nil, err
}
unit := e.resolvePanelUnitFromQueries(queries)
for _, query := range queries {
queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery)
if err != nil {
return nil, err
}
err = e.parseResponse(queryRes, resp, query)
if err != nil {
queryRes.Error = err
}
result.Results[query.RefID] = queryRes
resourceType := ""
for _, s := range resp.TimeSeries {
......@@ -221,16 +232,48 @@ func (e *CloudMonitoringExecutor) executeTimeSeriesQuery(ctx context.Context, ts
break
}
query.Params.Set("resourceType", resourceType)
dl := ""
if len(resp.TimeSeries) > 0 {
dl = query.buildDeepLink()
err = e.parseResponse(queryRes, resp, query)
if err != nil {
queryRes.Error = err
}
queryRes.Meta.Set("deepLink", dl)
if len(unit) > 0 {
frames, _ := queryRes.Dataframes.Decoded()
for i := range frames {
if frames[i].Fields[1].Config == nil {
frames[i].Fields[1].Config = &data.FieldConfig{}
}
frames[i].Fields[1].Config.Unit = unit
}
queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames)
}
result.Results[query.RefID] = queryRes
}
return result, nil
}
func (e *CloudMonitoringExecutor) resolvePanelUnitFromQueries(queries []*cloudMonitoringQuery) string {
if len(queries) == 0 {
return ""
}
unit := queries[0].Unit
if len(queries) > 1 {
for _, query := range queries[1:] {
if query.Unit != unit {
return ""
}
}
}
if len(unit) > 0 {
if val, ok := cloudMonitoringUnitMappings[unit]; ok {
return val
}
}
return ""
}
func (e *CloudMonitoringExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*cloudMonitoringQuery, error) {
cloudMonitoringQueries := []*cloudMonitoringQuery{}
......@@ -286,7 +329,7 @@ func (e *CloudMonitoringExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*cl
target = params.Encode()
sq.Target = target
sq.Params = params
sq.Unit = q.MetricQuery.Unit
if setting.Env == setting.Dev {
slog.Debug("CloudMonitoring request", "params", params)
}
......@@ -507,9 +550,8 @@ func (e *CloudMonitoringExecutor) unmarshalResponse(res *http.Response) (cloudMo
}
func handleDistributionSeries(series timeSeries, defaultMetricName string, seriesLabels map[string]string,
query *cloudMonitoringQuery, queryRes *tsdb.QueryResult) {
points := make([]tsdb.TimePoint, 0)
for i := len(series.Points) - 1; i >= 0; i-- {
query *cloudMonitoringQuery, queryRes *tsdb.QueryResult, frame *data.Frame) {
for i := 0; i < len(series.Points); i++ {
point := series.Points[i]
value := point.Value.DoubleValue
......@@ -527,27 +569,27 @@ func handleDistributionSeries(series timeSeries, defaultMetricName string, serie
value = 0
}
}
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
frame.SetRow(len(series.Points)-1-i, point.Interval.EndTime, value)
}
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, seriesLabels, nil, query)
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: metricName,
Points: points,
})
dataField := frame.Fields[1]
dataField.Name = metricName
}
func (e *CloudMonitoringExecutor) parseResponse(queryRes *tsdb.QueryResult, data cloudMonitoringResponse, query *cloudMonitoringQuery) error {
func (e *CloudMonitoringExecutor) parseResponse(queryRes *tsdb.QueryResult, cmr cloudMonitoringResponse, query *cloudMonitoringQuery) error {
labels := make(map[string]map[string]bool)
for _, series := range data.TimeSeries {
seriesLabels := make(map[string]string)
frames := data.Frames{}
for _, series := range cmr.TimeSeries {
seriesLabels := data.Labels{}
defaultMetricName := series.Metric.Type
labels["resource.type"] = map[string]bool{series.Resource.Type: true}
seriesLabels["resource.type"] = series.Resource.Type
frame := data.NewFrameOfFieldTypes("", len(series.Points), data.FieldTypeTime, data.FieldTypeFloat64)
frame.RefID = query.RefID
for key, value := range series.Metric.Labels {
if _, ok := labels["metric.label."+key]; !ok {
labels["metric.label."+key] = map[string]bool{}
......@@ -602,10 +644,11 @@ func (e *CloudMonitoringExecutor) parseResponse(queryRes *tsdb.QueryResult, data
// reverse the order to be ascending
if series.ValueType != "DISTRIBUTION" {
handleDistributionSeries(series, defaultMetricName, seriesLabels, query, queryRes)
handleDistributionSeries(
series, defaultMetricName, seriesLabels, query, queryRes, frame)
frames = append(frames, frame)
} else {
buckets := make(map[int]*tsdb.TimeSeries)
buckets := make(map[int]*data.Frame)
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
if len(point.Value.DistributionValue.BucketCounts) == 0 {
......@@ -622,34 +665,56 @@ func (e *CloudMonitoringExecutor) parseResponse(queryRes *tsdb.QueryResult, data
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, nil, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
frameName := formatLegendKeys(series.Metric.Type, defaultMetricName, nil, additionalLabels, query)
valueField.Name = frameName
buckets[i] = &data.Frame{
Name: frameName,
Fields: []*data.Field{
timeField,
valueField,
},
RefID: query.RefID,
}
if maxKey < i {
maxKey = i
}
}
buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
buckets[i].AppendRow(point.Interval.EndTime, value)
}
// fill empty bucket
for i := 0; i < maxKey; i++ {
if _, ok := buckets[i]; !ok {
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, seriesLabels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
additionalLabels := data.Labels{"bucket": bucketBound}
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
frameName := formatLegendKeys(series.Metric.Type, defaultMetricName, seriesLabels, additionalLabels, query)
valueField.Name = frameName
buckets[i] = &data.Frame{
Name: frameName,
Fields: []*data.Field{
timeField,
valueField,
},
RefID: query.RefID,
}
}
}
}
for i := 0; i < len(buckets); i++ {
queryRes.Series = append(queryRes.Series, buckets[i])
frames = append(frames, buckets[i])
}
}
}
if len(cmr.TimeSeries) > 0 {
frames = addConfigData(frames, query)
}
queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames)
labelsByKey := make(map[string][]string)
for key, values := range labels {
......@@ -660,10 +725,25 @@ func (e *CloudMonitoringExecutor) parseResponse(queryRes *tsdb.QueryResult, data
queryRes.Meta.Set("labels", labelsByKey)
queryRes.Meta.Set("groupBys", query.GroupBys)
return nil
}
func addConfigData(frames data.Frames, query *cloudMonitoringQuery) data.Frames {
dl := query.buildDeepLink()
for i := range frames {
if frames[i].Fields[1].Config == nil {
frames[i].Fields[1].Config = &data.FieldConfig{}
}
deepLink := data.DataLink{
Title: "View in Metrics Explorer",
TargetBlank: true,
URL: dl,
}
frames[i].Fields[1].Config.Links = append(frames[i].Fields[1].Config.Links, deepLink)
}
return frames
}
func toSnakeCase(str string) string {
return strings.ToLower(matchAllCap.ReplaceAllString(str, "${1}_${2}"))
}
......
......@@ -13,14 +13,12 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func TestCloudMonitoring(t *testing.T) {
Convey("Google Cloud Monitoring", t, func() {
executor := &CloudMonitoringExecutor{}
Convey("Parse migrated queries from frontend and build Google Cloud Monitoring API queries", func() {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
tsdbQuery := &tsdb.TsdbQuery{
......@@ -589,20 +587,20 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 1)
So(res.Series[0].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count")
So(len(res.Series[0].Points), ShouldEqual, 3)
frames, _ := res.Dataframes.Decoded()
So(len(frames), ShouldEqual, 1)
So(frames[0].Fields[1].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count")
So(frames[0].Fields[1].Len(), ShouldEqual, 3)
Convey("timestamps should be in ascending order", func() {
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 0.05)
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1536670020000))
So(frames[0].Fields[1].At(0), ShouldEqual, 0.05)
So(frames[0].Fields[0].At(0), ShouldEqual, time.Unix(int64(1536670020000/1000), 0))
So(res.Series[0].Points[1][0].Float64, ShouldEqual, 1.05)
So(res.Series[0].Points[1][1].Float64, ShouldEqual, int64(1536670080000))
So(frames[0].Fields[1].At(1), ShouldEqual, 1.05)
So(frames[0].Fields[0].At(1), ShouldEqual, time.Unix(int64(1536670080000/1000), 0))
So(res.Series[0].Points[2][0].Float64, ShouldEqual, 1.0666666666667)
So(res.Series[0].Points[2][1].Float64, ShouldEqual, int64(1536670260000))
So(frames[0].Fields[1].At(2), ShouldEqual, 1.0666666666667)
So(frames[0].Fields[0].At(2), ShouldEqual, time.Unix(int64(1536670260000/1000), 0))
})
})
......@@ -615,19 +613,20 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
frames, _ := res.Dataframes.Decoded()
Convey("Should add labels to metric name", func() {
So(len(res.Series), ShouldEqual, 3)
So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1")
So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1")
So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1")
So(len(frames), ShouldEqual, 3)
So(frames[0].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1")
So(frames[1].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1")
So(frames[2].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1")
})
Convey("Should parse to time series", func() {
So(len(res.Series[0].Points), ShouldEqual, 3)
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 9.8566497180145)
So(res.Series[0].Points[1][0].Float64, ShouldEqual, 9.7323568146676)
So(res.Series[0].Points[2][0].Float64, ShouldEqual, 9.7730520330369)
So(frames[0].Fields[1].Len(), ShouldEqual, 3)
So(frames[0].Fields[1].At(0), ShouldEqual, 9.8566497180145)
So(frames[0].Fields[1].At(1), ShouldEqual, 9.7323568146676)
So(frames[0].Fields[1].At(2), ShouldEqual, 9.7730520330369)
})
Convey("Should add meta for labels to the response", func() {
......@@ -657,12 +656,12 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
frames, _ := res.Dataframes.Decoded()
Convey("Should add instance name and zone labels to metric name", func() {
So(len(res.Series), ShouldEqual, 3)
So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a")
So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b")
So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b")
So(len(frames), ShouldEqual, 3)
So(frames[0].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a")
So(frames[1].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b")
So(frames[2].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b")
})
})
......@@ -677,12 +676,12 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
frames, _ := res.Dataframes.Decoded()
Convey("Should use alias by formatting and only show instance name", func() {
So(len(res.Series), ShouldEqual, 3)
So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a")
So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b")
So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b")
So(len(frames), ShouldEqual, 3)
So(frames[0].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a")
So(frames[1].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b")
So(frames[2].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b")
})
})
......@@ -690,12 +689,12 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
frames, _ := res.Dataframes.Decoded()
Convey("Should use alias by formatting and only show instance name", func() {
So(len(res.Series), ShouldEqual, 3)
So(res.Series[0].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
So(res.Series[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
So(res.Series[2].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
So(len(frames), ShouldEqual, 3)
So(frames[0].Fields[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
So(frames[1].Fields[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
So(frames[2].Fields[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
})
})
})
......@@ -709,41 +708,41 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{AliasBy: "{{bucket}}"}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 11)
frames, _ := res.Dataframes.Decoded()
So(len(frames), ShouldEqual, 11)
for i := 0; i < 11; i++ {
if i == 0 {
So(res.Series[i].Name, ShouldEqual, "0")
So(frames[i].Fields[1].Name, ShouldEqual, "0")
} else {
So(res.Series[i].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10))
So(frames[i].Fields[1].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10))
}
So(len(res.Series[i].Points), ShouldEqual, 3)
So(frames[i].Fields[0].Len(), ShouldEqual, 3)
}
Convey("timestamps should be in ascending order", func() {
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1536668940000))
So(res.Series[0].Points[1][1].Float64, ShouldEqual, int64(1536669000000))
So(res.Series[0].Points[2][1].Float64, ShouldEqual, int64(1536669060000))
So(frames[0].Fields[0].At(0), ShouldEqual, time.Unix(int64(1536668940000/1000), 0))
So(frames[0].Fields[0].At(1), ShouldEqual, time.Unix(int64(1536669000000/1000), 0))
So(frames[0].Fields[0].At(2), ShouldEqual, time.Unix(int64(1536669060000/1000), 0))
})
Convey("bucket bounds should be correct", func() {
So(res.Series[0].Name, ShouldEqual, "0")
So(res.Series[1].Name, ShouldEqual, "1")
So(res.Series[2].Name, ShouldEqual, "2")
So(res.Series[3].Name, ShouldEqual, "4")
So(res.Series[4].Name, ShouldEqual, "8")
So(frames[0].Fields[1].Name, ShouldEqual, "0")
So(frames[1].Fields[1].Name, ShouldEqual, "1")
So(frames[2].Fields[1].Name, ShouldEqual, "2")
So(frames[3].Fields[1].Name, ShouldEqual, "4")
So(frames[4].Fields[1].Name, ShouldEqual, "8")
})
Convey("value should be correct", func() {
So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[10].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[8].Points[1][0].Float64, ShouldEqual, 0)
So(res.Series[9].Points[1][0].Float64, ShouldEqual, 0)
So(res.Series[10].Points[1][0].Float64, ShouldEqual, 1)
So(res.Series[8].Points[2][0].Float64, ShouldEqual, 0)
So(res.Series[9].Points[2][0].Float64, ShouldEqual, 1)
So(res.Series[10].Points[2][0].Float64, ShouldEqual, 0)
So(frames[8].Fields[1].At(0), ShouldEqual, 1)
So(frames[9].Fields[1].At(0), ShouldEqual, 1)
So(frames[10].Fields[1].At(0), ShouldEqual, 1)
So(frames[8].Fields[1].At(1), ShouldEqual, 0)
So(frames[9].Fields[1].At(1), ShouldEqual, 0)
So(frames[10].Fields[1].At(1), ShouldEqual, 1)
So(frames[8].Fields[1].At(2), ShouldEqual, 0)
So(frames[9].Fields[1].At(2), ShouldEqual, 1)
So(frames[10].Fields[1].At(2), ShouldEqual, 0)
})
})
......@@ -756,34 +755,34 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{AliasBy: "{{bucket}}"}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 33)
frames, _ := res.Dataframes.Decoded()
So(len(frames), ShouldEqual, 33)
for i := 0; i < 33; i++ {
if i == 0 {
So(res.Series[i].Name, ShouldEqual, "0")
So(frames[i].Fields[1].Name, ShouldEqual, "0")
}
So(len(res.Series[i].Points), ShouldEqual, 2)
So(frames[i].Fields[1].Len(), ShouldEqual, 2)
}
Convey("timestamps should be in ascending order", func() {
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1550859086000))
So(res.Series[0].Points[1][1].Float64, ShouldEqual, int64(1550859146000))
So(frames[0].Fields[0].At(0), ShouldEqual, time.Unix(int64(1550859086000/1000), 0))
So(frames[0].Fields[0].At(1), ShouldEqual, time.Unix(int64(1550859146000/1000), 0))
})
Convey("bucket bounds should be correct", func() {
So(res.Series[0].Name, ShouldEqual, "0")
So(res.Series[1].Name, ShouldEqual, "0.01")
So(res.Series[2].Name, ShouldEqual, "0.05")
So(res.Series[3].Name, ShouldEqual, "0.1")
So(frames[0].Fields[1].Name, ShouldEqual, "0")
So(frames[1].Fields[1].Name, ShouldEqual, "0.01")
So(frames[2].Fields[1].Name, ShouldEqual, "0.05")
So(frames[3].Fields[1].Name, ShouldEqual, "0.1")
})
Convey("value should be correct", func() {
So(res.Series[8].Points[0][0].Float64, ShouldEqual, 381)
So(res.Series[9].Points[0][0].Float64, ShouldEqual, 212)
So(res.Series[10].Points[0][0].Float64, ShouldEqual, 56)
So(res.Series[8].Points[1][0].Float64, ShouldEqual, 375)
So(res.Series[9].Points[1][0].Float64, ShouldEqual, 213)
So(res.Series[10].Points[1][0].Float64, ShouldEqual, 56)
So(frames[8].Fields[1].At(0), ShouldEqual, 381)
So(frames[9].Fields[1].At(0), ShouldEqual, 212)
So(frames[10].Fields[1].At(0), ShouldEqual, 56)
So(frames[8].Fields[1].At(1), ShouldEqual, 375)
So(frames[9].Fields[1].At(1), ShouldEqual, 213)
So(frames[10].Fields[1].At(1), ShouldEqual, 56)
})
})
......@@ -797,8 +796,8 @@ func TestCloudMonitoring(t *testing.T) {
err = executor.parseResponse(res, data, query)
labels := res.Meta.Get("labels").Interface().(map[string][]string)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 3)
frames, _ := res.Dataframes.Decoded()
So(len(frames), ShouldEqual, 3)
Convey("and systemlabel contains key with array of string", func() {
So(len(labels["metadata.system_labels.test"]), ShouldEqual, 5)
......@@ -835,11 +834,12 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{AliasBy: "{{metadata.system_labels.test}}"}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 3)
fmt.Println(res.Series[0].Name)
So(res.Series[0].Name, ShouldEqual, "value1, value2")
So(res.Series[1].Name, ShouldEqual, "value1, value2, value3")
So(res.Series[2].Name, ShouldEqual, "value1, value2, value4, value5")
frames, _ := res.Dataframes.Decoded()
So(len(frames), ShouldEqual, 3)
fmt.Println(frames[0].Fields[1].Name)
So(frames[0].Fields[1].Name, ShouldEqual, "value1, value2")
So(frames[1].Fields[1].Name, ShouldEqual, "value1, value2, value3")
So(frames[2].Fields[1].Name, ShouldEqual, "value1, value2, value4, value5")
})
Convey("and systemlabel contains key with array of string2", func() {
......@@ -847,9 +847,9 @@ func TestCloudMonitoring(t *testing.T) {
query := &cloudMonitoringQuery{AliasBy: "{{metadata.system_labels.test2}}"}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 3)
fmt.Println(res.Series[0].Name)
So(res.Series[2].Name, ShouldEqual, "testvalue")
frames, _ := res.Dataframes.Decoded()
So(len(frames), ShouldEqual, 3)
So(frames[2].Fields[1].Name, ShouldEqual, "testvalue")
})
})
......@@ -868,8 +868,9 @@ func TestCloudMonitoring(t *testing.T) {
AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}",
}
err = executor.parseResponse(res, data, query)
frames, _ := res.Dataframes.Decoded()
So(err, ShouldBeNil)
So(res.Series[0].Name, ShouldEqual, "test-proj - test-service - test-slo - select_slo_compliance")
So(frames[0].Fields[1].Name, ShouldEqual, "test-proj - test-service - test-slo - select_slo_compliance")
})
})
......@@ -887,8 +888,67 @@ func TestCloudMonitoring(t *testing.T) {
Slo: "test-slo",
}
err = executor.parseResponse(res, data, query)
frames, _ := res.Dataframes.Decoded()
So(err, ShouldBeNil)
So(res.Series[0].Name, ShouldEqual, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")")
So(frames[0].Fields[1].Name, ShouldEqual, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")")
})
})
})
Convey("Parse cloud monitoring unit", func() {
Convey("when there is only one query", func() {
Convey("and cloud monitoring unit does not have a corresponding grafana unit", func() {
queries := []*cloudMonitoringQuery{
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service",
Slo: "test-slo", Unit: "megaseconds"}}
unit := executor.resolvePanelUnitFromQueries(queries)
So(unit, ShouldEqual, "")
})
Convey("and cloud monitoring unit has a corresponding grafana unit", func() {
for key, element := range cloudMonitoringUnitMappings {
queries := []*cloudMonitoringQuery{
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service",
Slo: "test-slo", Unit: key}}
unit := executor.resolvePanelUnitFromQueries(queries)
So(unit, ShouldEqual, element)
}
})
})
Convey("when there are more than one query", func() {
Convey("and all target units are the same", func() {
for key, element := range cloudMonitoringUnitMappings {
queries := []*cloudMonitoringQuery{
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service1",
Slo: "test-slo", Unit: key},
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service2",
Slo: "test-slo", Unit: key},
}
unit := executor.resolvePanelUnitFromQueries(queries)
So(unit, ShouldEqual, element)
}
})
Convey("and all target units are the same but does not have grafana mappings", func() {
queries := []*cloudMonitoringQuery{
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service1",
Slo: "test-slo", Unit: "megaseconds"},
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service2",
Slo: "test-slo", Unit: "megaseconds"},
}
unit := executor.resolvePanelUnitFromQueries(queries)
So(unit, ShouldEqual, "")
})
Convey("and all target units are not the same", func() {
queries := []*cloudMonitoringQuery{
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service1",
Slo: "test-slo", Unit: "bit"},
{ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service2",
Slo: "test-slo", Unit: "min"},
}
unit := executor.resolvePanelUnitFromQueries(queries)
So(unit, ShouldEqual, "")
})
})
})
......
......@@ -16,6 +16,7 @@ type (
Selector string
Service string
Slo string
Unit string
}
metricQuery struct {
......@@ -28,6 +29,7 @@ type (
Filters []string
AliasBy string
View string
Unit string
}
sloQuery struct {
......
......@@ -246,22 +246,6 @@ export const alignmentPeriods = [
{ text: '1w', value: '+604800s' },
];
export const cloudMonitoringUnitMappings = {
bit: 'bits',
By: 'bytes',
s: 's',
min: 'm',
h: 'h',
d: 'd',
us: 'µs',
ms: 'ms',
ns: 'ns',
percent: 'percent',
MiBy: 'mbytes',
'By/s': 'Bps',
GBy: 'decgbytes',
};
export const systemLabels = [
'metadata.system_labels.cloud_account',
'metadata.system_labels.name',
......
......@@ -2,26 +2,28 @@ import _ from 'lodash';
import {
DataQueryRequest,
DataQueryResponseData,
DataSourceApi,
DataSourceInstanceSettings,
ScopedVars,
SelectableValue,
toDataFrame,
DataQueryResponse,
} from '@grafana/data';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { CloudMonitoringOptions, CloudMonitoringQuery, Filter, MetricDescriptor, QueryType } from './types';
import { cloudMonitoringUnitMappings } from './constants';
import API, { PostResponse } from './api';
import API from './api';
import { DataSourceWithBackend } from '@grafana/runtime';
import { CloudMonitoringVariableSupport } from './variables';
import { catchError, map, mergeMap } from 'rxjs/operators';
import { from, Observable, of, throwError } from 'rxjs';
export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonitoringQuery, CloudMonitoringOptions> {
export default class CloudMonitoringDatasource extends DataSourceWithBackend<
CloudMonitoringQuery,
CloudMonitoringOptions
> {
api: API;
authenticationType: string;
intervalMs: number;
constructor(
private instanceSettings: DataSourceInstanceSettings<CloudMonitoringOptions>,
......@@ -31,7 +33,6 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
super(instanceSettings);
this.authenticationType = instanceSettings.jsonData.authenticationType || 'jwt';
this.api = new API(`${instanceSettings.url!}/cloudmonitoring/v3/projects/`);
this.variables = new CloudMonitoringVariableSupport(this);
}
......@@ -39,52 +40,12 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
return this.templateSrv.getVariables().map(v => `$${v.name}`);
}
query(options: DataQueryRequest<CloudMonitoringQuery>): Observable<DataQueryResponseData> {
return this.getTimeSeries(options).pipe(
map(data => {
if (!data.results) {
return { data: [] };
}
const result: DataQueryResponseData[] = [];
const values = Object.values(data.results);
for (const queryRes of values) {
if (!queryRes.series) {
continue;
}
const unit = this.resolvePanelUnitFromTargets(options.targets);
for (const series of queryRes.series) {
let timeSerie: any = {
target: series.name,
datapoints: series.points,
refId: queryRes.refId,
meta: queryRes.meta,
};
if (unit) {
timeSerie = { ...timeSerie, unit };
}
const df = toDataFrame(timeSerie);
for (const field of df.fields) {
if (queryRes.meta?.deepLink && queryRes.meta?.deepLink.length > 0) {
field.config.links = [
{
url: queryRes.meta?.deepLink,
title: 'View in Metrics Explorer',
targetBlank: true,
},
];
}
}
result.push(df);
}
}
return { data: result };
})
);
query(request: DataQueryRequest<CloudMonitoringQuery>): Observable<DataQueryResponse> {
request.targets = request.targets.map(t => ({
...this.migrateQuery(t),
intervalMs: request.intervalMs,
}));
return super.query(request);
}
async annotationQuery(options: any) {
......@@ -134,33 +95,32 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
.toPromise();
}
getTimeSeries(options: DataQueryRequest<CloudMonitoringQuery>): Observable<PostResponse> {
const queries = options.targets
.map(this.migrateQuery)
.filter(this.shouldRunQuery)
.map(q => this.prepareTimeSeriesQuery(q, options.scopedVars))
.map(q => ({ ...q, intervalMs: options.intervalMs, type: 'timeSeriesQuery' }));
if (!queries.length) {
return of({ results: [] });
}
return from(this.ensureGCEDefaultProject()).pipe(
mergeMap(() => {
return this.api.post({
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries,
});
}),
map(({ data }) => {
return data;
})
);
applyTemplateVariables(
{ metricQuery, refId, queryType, sloQuery }: CloudMonitoringQuery,
scopedVars: ScopedVars
): Record<string, any> {
return {
datasourceId: this.id,
refId,
intervalMs: this.intervalMs,
type: 'timeSeriesQuery',
queryType,
metricQuery: {
...this.interpolateProps(metricQuery, scopedVars),
projectName: this.templateSrv.replace(
metricQuery.projectName ? metricQuery.projectName : this.getDefaultProject(),
scopedVars
),
filters: this.interpolateFilters(metricQuery.filters || [], scopedVars),
groupBys: this.interpolateGroupBys(metricQuery.groupBys || [], scopedVars),
view: metricQuery.view || 'FULL',
},
sloQuery: sloQuery && this.interpolateProps(sloQuery, scopedVars),
};
}
async getLabels(metricType: string, refId: string, projectName: string, groupBys?: string[]) {
return this.getTimeSeries({
const options = {
targets: [
{
refId,
......@@ -176,8 +136,26 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
},
],
range: this.timeSrv.timeRange(),
} as DataQueryRequest<CloudMonitoringQuery>)
} as DataQueryRequest<CloudMonitoringQuery>;
const queries = options.targets;
if (!queries.length) {
return of({ results: [] }).toPromise();
}
return from(this.ensureGCEDefaultProject())
.pipe(
mergeMap(() => {
return this.api.post({
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries,
});
}),
map(({ data }) => {
return data;
}),
map(response => {
const result = response.results[refId];
return result && result.meta ? result.meta.labels : {};
......@@ -311,9 +289,11 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
migrateQuery(query: CloudMonitoringQuery): CloudMonitoringQuery {
if (!query.hasOwnProperty('metricQuery')) {
const { hide, refId, datasource, key, queryType, maxLines, metric, ...rest } = query as any;
const { hide, refId, datasource, key, queryType, maxLines, metric, intervalMs, type, ...rest } = query as any;
return {
refId,
intervalMs,
type,
hide,
queryType: QueryType.METRICS,
metricQuery: {
......@@ -334,7 +314,7 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
}, {} as T);
}
shouldRunQuery(query: CloudMonitoringQuery): boolean {
filterQuery(query: CloudMonitoringQuery): boolean {
if (query.hide) {
return false;
}
......@@ -349,30 +329,8 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
return !!metricType;
}
prepareTimeSeriesQuery(
{ metricQuery, refId, queryType, sloQuery }: CloudMonitoringQuery,
scopedVars: ScopedVars
): CloudMonitoringQuery {
return {
datasourceId: this.id,
refId,
queryType,
metricQuery: {
...this.interpolateProps(metricQuery, scopedVars),
projectName: this.templateSrv.replace(
metricQuery.projectName ? metricQuery.projectName : this.getDefaultProject(),
scopedVars
),
filters: this.interpolateFilters(metricQuery.filters || [], scopedVars),
groupBys: this.interpolateGroupBys(metricQuery.groupBys || [], scopedVars),
view: metricQuery.view || 'FULL',
},
sloQuery: sloQuery && this.interpolateProps(sloQuery, scopedVars),
};
}
interpolateVariablesInQueries(queries: CloudMonitoringQuery[], scopedVars: ScopedVars): CloudMonitoringQuery[] {
return queries.map(query => this.prepareTimeSeriesQuery(query, scopedVars));
return queries.map(query => this.applyTemplateVariables(query, scopedVars) as CloudMonitoringQuery);
}
interpolateFilters(filters: string[], scopedVars: ScopedVars) {
......@@ -409,15 +367,4 @@ export default class CloudMonitoringDatasource extends DataSourceApi<CloudMonito
});
return interpolatedGroupBys;
}
resolvePanelUnitFromTargets(targets: any) {
let unit;
if (targets.length > 0 && targets.every((t: any) => t.unit === targets[0].unit)) {
if (cloudMonitoringUnitMappings.hasOwnProperty(targets[0].unit!)) {
// @ts-ignore
unit = cloudMonitoringUnitMappings[targets[0].unit];
}
}
return unit;
}
}
......@@ -213,51 +213,6 @@ describe('CloudMonitoringDataSource', () => {
});
});
});
describe('unit parsing', () => {
const { ds } = getTestcontext();
describe('when theres only one target', () => {
describe('and the cloud monitoring unit does nott have a corresponding grafana unit', () => {
it('should return undefined', () => {
const res = ds.resolvePanelUnitFromTargets([{ unit: 'megaseconds' }]);
expect(res).toBeUndefined();
});
});
describe('and the cloud monitoring unit has a corresponding grafana unit', () => {
it('should return bits', () => {
const res = ds.resolvePanelUnitFromTargets([{ unit: 'bit' }]);
expect(res).toEqual('bits');
});
});
});
describe('when theres more than one target', () => {
describe('and all target units are the same', () => {
it('should return bits', () => {
const res = ds.resolvePanelUnitFromTargets([{ unit: 'bit' }, { unit: 'bit' }]);
expect(res).toEqual('bits');
});
});
describe('and all target units are the same but does not have grafana mappings', () => {
it('should return the default value of undefined', () => {
const res = ds.resolvePanelUnitFromTargets([{ unit: 'megaseconds' }, { unit: 'megaseconds' }]);
expect(res).toBeUndefined();
});
});
describe('and all target units are not the same', () => {
it('should return the default value of undefined', () => {
const res = ds.resolvePanelUnitFromTargets([{ unit: 'bit' }, { unit: 'min' }]);
expect(res).toBeUndefined();
});
});
});
});
});
function initTemplateSrv(values: any, multi = false) {
......
......@@ -94,6 +94,8 @@ export interface CloudMonitoringQuery extends DataQuery {
queryType: QueryType;
metricQuery: MetricQuery;
sloQuery?: SLOQuery;
intervalMs: number;
type: string;
}
export interface CloudMonitoringOptions extends DataSourceJsonData {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment