Commit 6770f2e9 by Mitsuhiro Tanda

stackdriver heatmap support

parent c3a8dd28
...@@ -300,29 +300,6 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta ...@@ -300,29 +300,6 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
for _, series := range data.TimeSeries { for _, series := range data.TimeSeries {
points := make([]tsdb.TimePoint, 0) points := make([]tsdb.TimePoint, 0)
// reverse the order to be ascending
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
value := point.Value.DoubleValue
if series.ValueType == "INT64" {
parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
if err == nil {
value = parsedValue
}
}
if series.ValueType == "BOOL" {
if point.Value.BoolValue {
value = 1
} else {
value = 0
}
}
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
defaultMetricName := series.Metric.Type defaultMetricName := series.Metric.Type
for key, value := range series.Metric.Labels { for key, value := range series.Metric.Labels {
...@@ -338,18 +315,87 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta ...@@ -338,18 +315,87 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
if !containsLabel(resourceLabels[key], value) { if !containsLabel(resourceLabels[key], value) {
resourceLabels[key] = append(resourceLabels[key], value) resourceLabels[key] = append(resourceLabels[key], value)
} }
if containsLabel(query.GroupBys, "resource.label."+key) { if containsLabel(query.GroupBys, "resource.label."+key) {
defaultMetricName += " " + value defaultMetricName += " " + value
} }
} }
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query) // reverse the order to be ascending
if series.ValueType != "DISTRIBUTION" {
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
value := point.Value.DoubleValue
if series.ValueType == "INT64" {
parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
if err == nil {
value = parsedValue
}
}
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ if series.ValueType == "BOOL" {
Name: metricName, if point.Value.BoolValue {
Points: points, value = 1
}) } else {
value = 0
}
}
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: metricName,
Points: points,
})
} else {
buckets := make(map[int]*tsdb.TimeSeries)
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
if len(point.Value.DistributionValue.BucketCounts) == 0 {
continue
}
maxKey := 0
for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ {
value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64)
if err != nil {
continue
}
if _, ok := buckets[i]; !ok {
// set lower bounds
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
}
if maxKey < i {
maxKey = i
}
}
buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
// fill empty bucket
for i := 0; i < maxKey; i++ {
if _, ok := buckets[i]; !ok {
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
}
}
}
}
for i := 0; i < len(buckets); i++ {
queryRes.Series = append(queryRes.Series, buckets[i])
}
}
} }
queryRes.Meta.Set("resourceLabels", resourceLabels) queryRes.Meta.Set("resourceLabels", resourceLabels)
...@@ -368,7 +414,7 @@ func containsLabel(labels []string, newLabel string) bool { ...@@ -368,7 +414,7 @@ func containsLabel(labels []string, newLabel string) bool {
return false return false
} }
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string { func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
if query.AliasBy == "" { if query.AliasBy == "" {
return defaultMetricName return defaultMetricName
} }
...@@ -400,6 +446,10 @@ func formatLegendKeys(metricType string, defaultMetricName string, metricLabels ...@@ -400,6 +446,10 @@ func formatLegendKeys(metricType string, defaultMetricName string, metricLabels
return []byte(val) return []byte(val)
} }
if val, exists := additionalLabels[metaPartName]; exists {
return []byte(val)
}
return in return in
}) })
...@@ -425,6 +475,22 @@ func replaceWithMetricPart(metaPartName string, metricType string) []byte { ...@@ -425,6 +475,22 @@ func replaceWithMetricPart(metaPartName string, metricType string) []byte {
return nil return nil
} }
func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
bucketBound := "0"
if n == 0 {
return bucketBound
}
if bucketOptions.LinearBuckets != nil {
bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10)
} else if bucketOptions.ExponentialBuckets != nil {
bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
} else if bucketOptions.ExplicitBuckets != nil {
bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10)
}
return bucketBound
}
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
u, _ := url.Parse(dsInfo.Url) u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "render") u.Path = path.Join(u.Path, "render")
......
...@@ -14,6 +14,22 @@ type StackdriverQuery struct { ...@@ -14,6 +14,22 @@ type StackdriverQuery struct {
AliasBy string AliasBy string
} }
type StackdriverBucketOptions struct {
LinearBuckets *struct {
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
Width int64 `json:"width"`
Offset int64 `json:"offset"`
} `json:"linearBuckets"`
ExponentialBuckets *struct {
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
GrowthFactor float64 `json:"growthFactor"`
Scale float64 `json:"scale"`
} `json:"exponentialBuckets"`
ExplicitBuckets *struct {
Bounds []int64 `json:"bounds"`
} `json:"explicitBuckets"`
}
// StackdriverResponse is the data returned from the external Google Stackdriver API // StackdriverResponse is the data returned from the external Google Stackdriver API
type StackdriverResponse struct { type StackdriverResponse struct {
TimeSeries []struct { TimeSeries []struct {
...@@ -33,10 +49,26 @@ type StackdriverResponse struct { ...@@ -33,10 +49,26 @@ type StackdriverResponse struct {
EndTime time.Time `json:"endTime"` EndTime time.Time `json:"endTime"`
} `json:"interval"` } `json:"interval"`
Value struct { Value struct {
DoubleValue float64 `json:"doubleValue"` DoubleValue float64 `json:"doubleValue"`
StringValue string `json:"stringValue"` StringValue string `json:"stringValue"`
BoolValue bool `json:"boolValue"` BoolValue bool `json:"boolValue"`
IntValue string `json:"int64Value"` IntValue string `json:"int64Value"`
DistributionValue struct {
Count string `json:"count"`
Mean float64 `json:"mean"`
SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
Range struct {
Min int `json:"min"`
Max int `json:"max"`
} `json:"range"`
BucketOptions StackdriverBucketOptions `json:"bucketOptions"`
BucketCounts []string `json:"bucketCounts"`
Examplars []struct {
Value float64 `json:"value"`
Timestamp string `json:"timestamp"`
// attachments
} `json:"examplars"`
} `json:"distributionValue"`
} `json:"value"` } `json:"value"`
} `json:"points"` } `json:"points"`
} `json:"timeSeries"` } `json:"timeSeries"`
......
...@@ -19,7 +19,7 @@ export const alignOptions = [ ...@@ -19,7 +19,7 @@ export const alignOptions = [
{ {
text: 'delta', text: 'delta',
value: 'ALIGN_DELTA', value: 'ALIGN_DELTA',
valueTypes: [ValueTypes.INT64, ValueTypes.DOUBLE, ValueTypes.MONEY], valueTypes: [ValueTypes.INT64, ValueTypes.DOUBLE, ValueTypes.MONEY, ValueTypes.DISTRIBUTION],
metricKinds: [MetricKind.CUMULATIVE, MetricKind.DELTA], metricKinds: [MetricKind.CUMULATIVE, MetricKind.DELTA],
}, },
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment