Commit 9240d850 by Daniel Lee Committed by GitHub

Merge pull request #15623 from grafana/stackdriver-bounds

stackdriver: fix for float64 bounds for distribution metrics
parents 3a6f4983 d1e249a8
...@@ -336,6 +336,8 @@ func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (Stackdriver ...@@ -336,6 +336,8 @@ func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (Stackdriver
return StackdriverResponse{}, err return StackdriverResponse{}, err
} }
// slog.Info("stackdriver", "response", string(body))
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
slog.Error("Request failed", "status", res.Status, "body", string(body)) slog.Error("Request failed", "status", res.Status, "body", string(body))
return StackdriverResponse{}, fmt.Errorf(string(body)) return StackdriverResponse{}, fmt.Errorf(string(body))
...@@ -559,7 +561,7 @@ func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string { ...@@ -559,7 +561,7 @@ func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
} else if bucketOptions.ExponentialBuckets != nil { } else if bucketOptions.ExponentialBuckets != nil {
bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10) bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
} else if bucketOptions.ExplicitBuckets != nil { } else if bucketOptions.ExplicitBuckets != nil {
bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10) bucketBound = fmt.Sprintf("%g", bucketOptions.ExplicitBuckets.Bounds[n])
} }
return bucketBound return bucketBound
} }
......
...@@ -344,8 +344,8 @@ func TestStackdriver(t *testing.T) { ...@@ -344,8 +344,8 @@ func TestStackdriver(t *testing.T) {
}) })
}) })
Convey("when data from query is distribution", func() { Convey("when data from query is distribution with exponential bounds", func() {
data, err := loadTestFile("./test-data/3-series-response-distribution.json") data, err := loadTestFile("./test-data/3-series-response-distribution-exponential.json")
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(len(data.TimeSeries), ShouldEqual, 1) So(len(data.TimeSeries), ShouldEqual, 1)
...@@ -370,6 +370,14 @@ func TestStackdriver(t *testing.T) { ...@@ -370,6 +370,14 @@ func TestStackdriver(t *testing.T) {
So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000) So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000)
}) })
Convey("bucket bounds should be correct", func() {
So(res.Series[0].Name, ShouldEqual, "0")
So(res.Series[1].Name, ShouldEqual, "1")
So(res.Series[2].Name, ShouldEqual, "2")
So(res.Series[3].Name, ShouldEqual, "4")
So(res.Series[4].Name, ShouldEqual, "8")
})
Convey("value should be correct", func() { Convey("value should be correct", func() {
So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1) So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1) So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1)
...@@ -383,6 +391,45 @@ func TestStackdriver(t *testing.T) { ...@@ -383,6 +391,45 @@ func TestStackdriver(t *testing.T) {
}) })
}) })
Convey("when data from query is distribution with explicit bounds", func() {
data, err := loadTestFile("./test-data/4-series-response-distribution-explicit.json")
So(err, ShouldBeNil)
So(len(data.TimeSeries), ShouldEqual, 1)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
query := &StackdriverQuery{AliasBy: "{{bucket}}"}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 33)
for i := 0; i < 33; i++ {
if i == 0 {
So(res.Series[i].Name, ShouldEqual, "0")
}
So(len(res.Series[i].Points), ShouldEqual, 2)
}
Convey("timestamps should be in ascending order", func() {
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1550859086000)
So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1550859146000)
})
Convey("bucket bounds should be correct", func() {
So(res.Series[0].Name, ShouldEqual, "0")
So(res.Series[1].Name, ShouldEqual, "0.01")
So(res.Series[2].Name, ShouldEqual, "0.05")
So(res.Series[3].Name, ShouldEqual, "0.1")
})
Convey("value should be correct", func() {
So(res.Series[8].Points[0][0].Float64, ShouldEqual, 381)
So(res.Series[9].Points[0][0].Float64, ShouldEqual, 212)
So(res.Series[10].Points[0][0].Float64, ShouldEqual, 56)
So(res.Series[8].Points[1][0].Float64, ShouldEqual, 375)
So(res.Series[9].Points[1][0].Float64, ShouldEqual, 213)
So(res.Series[10].Points[1][0].Float64, ShouldEqual, 56)
})
})
}) })
Convey("when interpolating filter wildcards", func() { Convey("when interpolating filter wildcards", func() {
......
{
"timeSeries": [
{
"metric": {
"type": "custom.googleapis.com\/opencensus\/grpc.io\/client\/roundtrip_latency"
},
"resource": {
"type": "global",
"labels": {
"project_id": "grafana-demo"
}
},
"metricKind": "DELTA",
"valueType": "DISTRIBUTION",
"points": [
{
"interval": {
"startTime": "2019-02-22T18:11:26Z",
"endTime": "2019-02-22T18:12:26Z"
},
"value": {
"distributionValue": {
"count": "1878",
"mean": 17.813718392255,
"sumOfSquaredDeviation": 7141630.651914,
"bucketOptions": {
"explicitBuckets": {
"bounds": [
0,
0.01,
0.05,
0.1,
0.3,
0.6,
0.8,
1,
2,
3,
4,
5,
6,
8,
10,
13,
16,
20,
25,
30,
40,
50,
65,
80,
100,
130,
160,
200,
250,
300,
400,
500,
650,
800,
1000,
2000,
5000,
10000,
20000,
50000,
100000
]
}
},
"bucketCounts": [
"0",
"0",
"0",
"0",
"8",
"403",
"297",
"184",
"375",
"213",
"56",
"31",
"15",
"13",
"4",
"1",
"5",
"2",
"8",
"13",
"26",
"13",
"45",
"48",
"61",
"10",
"3",
"6",
"7",
"4",
"7",
"12",
"8"
]
}
}
},
{
"interval": {
"startTime": "2019-02-22T18:10:26Z",
"endTime": "2019-02-22T18:11:26Z"
},
"value": {
"distributionValue": {
"count": "1887",
"mean": 17.654277577766,
"sumOfSquaredDeviation": 7082587.2133073,
"bucketOptions": {
"explicitBuckets": {
"bounds": [
0,
0.01,
0.05,
0.1,
0.3,
0.6,
0.8,
1,
2,
3,
4,
5,
6,
8,
10,
13,
16,
20,
25,
30,
40,
50,
65,
80,
100,
130,
160,
200,
250,
300,
400,
500,
650,
800,
1000,
2000,
5000,
10000,
20000,
50000,
100000
]
}
},
"bucketCounts": [
"0",
"0",
"0",
"0",
"8",
"404",
"298",
"187",
"381",
"212",
"56",
"31",
"15",
"14",
"4",
"1",
"4",
"2",
"9",
"13",
"24",
"13",
"46",
"46",
"61",
"11",
"3",
"6",
"7",
"5",
"7",
"11",
"8"
]
}
}
}
]
}
]
}
...@@ -26,7 +26,7 @@ type StackdriverBucketOptions struct { ...@@ -26,7 +26,7 @@ type StackdriverBucketOptions struct {
Scale float64 `json:"scale"` Scale float64 `json:"scale"`
} `json:"exponentialBuckets"` } `json:"exponentialBuckets"`
ExplicitBuckets *struct { ExplicitBuckets *struct {
Bounds []int64 `json:"bounds"` Bounds []float64 `json:"bounds"`
} `json:"explicitBuckets"` } `json:"explicitBuckets"`
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment