Commit fcb9dcb0 by Kyle Brandt Committed by GitHub

AzureMonitor: Types for front-end queries (#24937)

parent 139be3d7
......@@ -79,32 +79,30 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
}
for _, query := range queries {
applicationInsightsTarget := query.Model.Get("appInsights").MustMap()
azlog.Debug("Application Insights", "target", applicationInsightsTarget)
rawQuery := false
if asInterface, ok := applicationInsightsTarget["rawQuery"]; ok {
if asBool, ok := asInterface.(bool); ok {
rawQuery = asBool
} else {
return nil, errors.New("'rawQuery' should be a boolean")
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Application Insights query into JSON: %w", err)
}
} else {
return nil, errors.New("missing 'rawQuery' property")
queryJSONModel := insightsJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Application Insights query object from JSON: %w", err)
}
if rawQuery {
var rawQueryString string
if asInterface, ok := applicationInsightsTarget["rawQueryString"]; ok {
if asString, ok := asInterface.(string); ok {
rawQueryString = asString
}
insightsJSONModel := queryJSONModel.AppInsights
azlog.Debug("Application Insights", "target", insightsJSONModel)
if insightsJSONModel.RawQuery == nil {
return nil, fmt.Errorf("missing the 'rawQuery' property")
}
if rawQueryString == "" {
if *insightsJSONModel.RawQuery {
var rawQueryString string
if insightsJSONModel.RawQueryString == "" {
return nil, errors.New("rawQuery requires rawQueryString")
}
rawQueryString, err := KqlInterpolate(query, timeRange, fmt.Sprintf("%v", rawQueryString))
rawQueryString, err := KqlInterpolate(query, timeRange, insightsJSONModel.RawQueryString)
if err != nil {
return nil, err
}
......@@ -117,20 +115,15 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
IsRaw: true,
ApiURL: "query",
Params: params,
TimeColumnName: fmt.Sprintf("%v", applicationInsightsTarget["timeColumn"]),
ValueColumnName: fmt.Sprintf("%v", applicationInsightsTarget["valueColumn"]),
SegmentColumnName: fmt.Sprintf("%v", applicationInsightsTarget["segmentColumn"]),
TimeColumnName: insightsJSONModel.TimeColumn,
ValueColumnName: insightsJSONModel.ValueColumn,
SegmentColumnName: insightsJSONModel.SegmentColumn,
Target: params.Encode(),
})
} else {
alias := ""
if val, ok := applicationInsightsTarget["alias"]; ok {
alias = fmt.Sprintf("%v", val)
}
azureURL := fmt.Sprintf("metrics/%s", fmt.Sprintf("%v", applicationInsightsTarget["metricName"]))
timeGrain := fmt.Sprintf("%v", applicationInsightsTarget["timeGrain"])
timeGrains := applicationInsightsTarget["allowedTimeGrainsMs"]
azureURL := fmt.Sprintf("metrics/%s", insightsJSONModel.MetricName)
timeGrain := insightsJSONModel.TimeGrain
timeGrains := insightsJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains)
if err != nil {
......@@ -143,16 +136,17 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
if timeGrain != "none" {
params.Add("interval", timeGrain)
}
params.Add("aggregation", fmt.Sprintf("%v", applicationInsightsTarget["aggregation"]))
params.Add("aggregation", insightsJSONModel.Aggregation)
dimension := strings.TrimSpace(fmt.Sprintf("%v", applicationInsightsTarget["dimension"]))
if applicationInsightsTarget["dimension"] != nil && len(dimension) > 0 && !strings.EqualFold(dimension, "none") {
dimension := strings.TrimSpace(insightsJSONModel.Dimension)
// Azure Monitor combines this and the following logic such that if dimensionFilter, must also Dimension, should that be done here as well?
if dimension != "" && !strings.EqualFold(dimension, "none") {
params.Add("segment", dimension)
}
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", applicationInsightsTarget["dimensionFilter"]))
if applicationInsightsTarget["dimensionFilter"] != nil && len(dimensionFilter) > 0 {
params.Add("filter", fmt.Sprintf("%v", dimensionFilter))
dimensionFilter := strings.TrimSpace(insightsJSONModel.DimensionFilter)
if dimensionFilter != "" {
params.Add("filter", dimensionFilter)
}
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{
......@@ -160,7 +154,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
IsRaw: false,
ApiURL: azureURL,
Params: params,
Alias: alias,
Alias: insightsJSONModel.Alias,
Target: params.Encode(),
})
}
......
package azuremonitor
import (
"encoding/json"
"fmt"
"io/ioutil"
"testing"
......@@ -92,7 +91,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
"allowedTimeGrainsMs": []interface{}{"auto", json.Number("60000"), json.Number("300000")},
"allowedTimeGrainsMs": []int64{60000, 300000},
},
})
tsdbQuery.Queries[0].IntervalMs = 400000
......
......@@ -72,20 +72,31 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
for _, query := range queries {
azureLogAnalyticsTarget := query.Model.Get("azureLogAnalytics").MustMap()
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Log Analytics query into JSON: %w", err)
}
queryJSONModel := logJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Log Analytics query object from JSON: %w", err)
}
azureLogAnalyticsTarget := queryJSONModel.AzureLogAnalytics
azlog.Debug("AzureLogAnalytics", "target", azureLogAnalyticsTarget)
resultFormat := fmt.Sprintf("%v", azureLogAnalyticsTarget["resultFormat"])
resultFormat := azureLogAnalyticsTarget.ResultFormat
if resultFormat == "" {
resultFormat = "time_series"
}
urlComponents := map[string]string{}
urlComponents["workspace"] = fmt.Sprintf("%v", azureLogAnalyticsTarget["workspace"])
urlComponents["workspace"] = azureLogAnalyticsTarget.Workspace
apiURL := fmt.Sprintf("%s/query", urlComponents["workspace"])
params := url.Values{}
rawQuery, err := KqlInterpolate(query, timeRange, fmt.Sprintf("%v", azureLogAnalyticsTarget["query"]), "TimeGenerated")
rawQuery, err := KqlInterpolate(query, timeRange, azureLogAnalyticsTarget.Query, "TimeGenerated")
if err != nil {
return nil, err
}
......
......@@ -36,6 +36,8 @@ var (
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
)
const azureMonitorAPIVersion = "2018-01-01"
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
......@@ -81,31 +83,38 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
for _, query := range queries {
var target string
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Monitor query into JSON: %w", err)
}
azureMonitorTarget := query.Model.Get("azureMonitor").MustMap()
queryJSONModel := azureMonitorJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
urlComponents := map[string]string{}
urlComponents["subscription"] = fmt.Sprintf("%v", query.Model.Get("subscription").MustString())
urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"])
urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"])
urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"])
urlComponents["subscription"] = queryJSONModel.Subscription
urlComponents["resourceGroup"] = azJSONModel.ResourceGroup
urlComponents["metricDefinition"] = azJSONModel.MetricDefinition
urlComponents["resourceName"] = azJSONModel.ResourceName
ub := urlBuilder{
DefaultSubscription: query.DataSource.JsonData.Get("subscriptionId").MustString(),
Subscription: urlComponents["subscription"],
ResourceGroup: urlComponents["resourceGroup"],
MetricDefinition: urlComponents["metricDefinition"],
ResourceName: urlComponents["resourceName"],
Subscription: queryJSONModel.Subscription,
ResourceGroup: queryJSONModel.AzureMonitor.ResourceGroup,
MetricDefinition: azJSONModel.MetricDefinition,
ResourceName: azJSONModel.ResourceName,
}
azureURL := ub.Build()
alias := ""
if val, ok := azureMonitorTarget["alias"]; ok {
alias = fmt.Sprintf("%v", val)
}
alias := azJSONModel.Alias
timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])
timeGrains := azureMonitorTarget["allowedTimeGrainsMs"]
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains)
if err != nil {
......@@ -114,18 +123,18 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
}
params := url.Values{}
params.Add("api-version", "2018-01-01")
params.Add("api-version", azureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"]))
params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"]))
params.Add("metricnamespace", fmt.Sprintf("%v", azureMonitorTarget["metricNamespace"]))
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName) // MetricName or MetricNames ?
params.Add("metricnamespace", azJSONModel.MetricNamespace)
dimension := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimension"]))
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"]))
if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && len(dimension) > 0 && len(dimensionFilter) > 0 && dimension != "None" {
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
if dimension != "" && dimensionFilter != "" && dimension != "None" {
params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
params.Add("top", fmt.Sprintf("%v", azureMonitorTarget["top"]))
params.Add("top", azJSONModel.Top)
}
target = params.Encode()
......
......@@ -104,7 +104,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Azure Monitor",
"allowedTimeGrainsMs": []interface{}{"auto", json.Number("60000"), json.Number("300000")},
"allowedTimeGrainsMs": []int64{60000, 300000},
},
})
tsdbQuery.Queries[0].IntervalMs = 400000
......
package azuremonitor
import "encoding/json"
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
// if the metric has a limited set of possible intervals/time grains then use those
// instead of the default list of intervals
func setAutoTimeGrain(intervalMs int64, timeGrains interface{}) (string, error) {
// parses array of numbers from the timeGrains json field
allowedTimeGrains := []int64{}
tgs, ok := timeGrains.([]interface{})
if ok {
for _, v := range tgs {
jsonNumber, ok := v.(json.Number)
if ok {
tg, err := jsonNumber.Int64()
if err == nil {
allowedTimeGrains = append(allowedTimeGrains, tg)
}
}
}
}
autoInterval := findClosestAllowedIntervalMS(intervalMs, allowedTimeGrains)
func setAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) {
autoInterval := findClosestAllowedIntervalMS(intervalMs, timeGrains)
tg := &TimeGrain{}
autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval)
if err != nil {
......
......@@ -90,3 +90,51 @@ type column struct {
Name string `json:"name"`
Type string `json:"type"`
}
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type azureMonitorJSONQuery struct {
AzureMonitor struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimension string `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
Format string `json:"format"`
MetricDefinition string `json:"metricDefinition"`
MetricName string `json:"metricName"`
MetricNamespace string `json:"metricNamespace"`
ResourceGroup string `json:"resourceGroup"`
ResourceName string `json:"resourceName"`
TimeGrain string `json:"timeGrain"`
Top string `json:"top"`
} `json:"azureMonitor"`
Subscription string `json:"subscription"`
}
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type insightsJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimension string `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
RawQuery *bool `json:"rawQuery"`
RawQueryString string `json:"rawQueryString"`
TimeGrain string `json:"timeGrain"`
TimeColumn string `json:"timeColumn"`
ValueColumn string `json:"valueColumn"`
SegmentColumn string `json:"segmentColumn"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
// logJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type logJSONQuery struct {
AzureLogAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
Workspace string `json:"workspace"`
} `json:"azureLogAnalytics"`
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment