Commit ef61a64c by Kyle Brandt Committed by GitHub

Azure Monitor: Log Analytics response to data frames (#25297)

Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
parent c3549f84
......@@ -6,6 +6,7 @@ import {
DataQuery,
DataSourceJsonData,
ScopedVars,
DataFrame,
} from '@grafana/data';
import { Observable, from, of } from 'rxjs';
import { config } from '..';
......@@ -109,17 +110,35 @@ export class DataSourceWithBackend<
requestId,
})
.then((rsp: any) => {
return toDataQueryResponse(rsp);
const dqs = toDataQueryResponse(rsp);
if (this.processResponse) {
return this.processResponse(dqs);
}
return dqs;
})
.catch(err => {
err.isHandled = true; // Avoid extra popup warning
return toDataQueryResponse(err);
const dqs = toDataQueryResponse(err);
if (this.processResponse) {
return this.processResponse(dqs);
}
return dqs;
});
return from(req);
}
/**
* Optionally augment the response before returning the results to the
*/
processResponse?(res: DataQueryResponse): Promise<DataQueryResponse>;
/**
* Optionally process the results for display
*/
processDataFrameResult?(frame: DataFrame, idx: number): Promise<DataFrame>;
/**
* Override to skip executing a query
*
* @virtual
......
package azuremonitor
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/url"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
......@@ -83,235 +79,6 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
}
}
func TestParsingAzureLogAnalyticsResponses(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
tests := []struct {
name string
testFile string
query string
series tsdb.TimeSeriesSlice
meta string
Err require.ErrorAssertionFunc
}{
{
name: "Response with single series should be parsed into the Grafana time series format",
testFile: "loganalytics/1-log-analytics-response-metrics-single-series.json",
query: "test query",
series: tsdb.TimeSeriesSlice{
&tsdb.TimeSeries{
Name: "grafana-vm",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(1.1), null.FloatFrom(1587323766000)},
{null.FloatFrom(2.2), null.FloatFrom(1587323776000)},
{null.FloatFrom(3.3), null.FloatFrom(1587323786000)},
},
},
},
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
Err: require.NoError,
},
{
name: "Response with multiple series should be parsed into the Grafana time series format",
testFile: "loganalytics/2-log-analytics-response-metrics-multiple-series.json",
query: "test query",
series: tsdb.TimeSeriesSlice{
&tsdb.TimeSeries{
Name: "Processor",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(0.75), null.FloatFrom(1587418800000)},
{null.FloatFrom(1.0055555555555555), null.FloatFrom(1587419100000)},
{null.FloatFrom(0.7407407407407407), null.FloatFrom(1587419400000)},
},
},
&tsdb.TimeSeries{
Name: "Logical Disk",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(16090.551851851851), null.FloatFrom(1587418800000)},
{null.FloatFrom(16090.537037037036), null.FloatFrom(1587419100000)},
{null.FloatFrom(16090.586419753086), null.FloatFrom(1587419400000)},
},
},
&tsdb.TimeSeries{
Name: "Memory",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(702.0666666666667), null.FloatFrom(1587418800000)},
{null.FloatFrom(700.5888888888888), null.FloatFrom(1587419100000)},
{null.FloatFrom(703.1111111111111), null.FloatFrom(1587419400000)},
},
},
},
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"ObjectName","type":"string"},{"name":"avg_CounterValue","type":"real"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
Err: require.NoError,
},
{
name: "Response with no metric name column should use the value column name as the series name",
testFile: "loganalytics/3-log-analytics-response-metrics-no-metric-column.json",
query: "test query",
series: tsdb.TimeSeriesSlice{
&tsdb.TimeSeries{
Name: "avg_CounterValue",
Points: tsdb.TimeSeriesPoints{
{null.FloatFrom(1), null.FloatFrom(1587323766000)},
{null.FloatFrom(2), null.FloatFrom(1587323776000)},
{null.FloatFrom(3), null.FloatFrom(1587323786000)},
},
},
},
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"avg_CounterValue","type":"int"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
Err: require.NoError,
},
{
name: "Response with no time column should return no data",
testFile: "loganalytics/4-log-analytics-response-metrics-no-time-column.json",
query: "test query",
series: nil,
meta: `{"columns":[{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
Err: require.NoError,
},
{
name: "Response with no value column should return no data",
testFile: "loganalytics/5-log-analytics-response-metrics-no-value-column.json",
query: "test query",
series: nil,
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
data, _ := loadLogAnalyticsTestFile(tt.testFile)
model := simplejson.NewFromAny(map[string]interface{}{
"subscriptionId": "1234",
"azureLogAnalytics": map[string]interface{}{
"workspace": "aworkspace",
},
})
params := url.Values{}
params.Add("query", tt.query)
series, meta, err := datasource.parseToTimeSeries(data, model, params)
tt.Err(t, err)
if diff := cmp.Diff(tt.series, series, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
json, _ := json.Marshal(meta)
cols := string(json)
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestParsingAzureLogAnalyticsTableResponses(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
tests := []struct {
name string
testFile string
query string
tables []*tsdb.Table
meta string
Err require.ErrorAssertionFunc
}{
{
name: "Table data should be parsed into the table format Response",
testFile: "loganalytics/6-log-analytics-response-table.json",
query: "test query",
tables: []*tsdb.Table{
{
Columns: []tsdb.TableColumn{
{Text: "TenantId"},
{Text: "Computer"},
{Text: "ObjectName"},
{Text: "CounterName"},
{Text: "InstanceName"},
{Text: "Min"},
{Text: "Max"},
{Text: "SampleCount"},
{Text: "CounterValue"},
{Text: "TimeGenerated"},
},
Rows: []tsdb.RowValues{
{
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
string("grafana-vm"),
string("Memory"),
string("Available MBytes Memory"),
string("Memory"),
nil,
nil,
nil,
float64(2040),
string("2020-04-23T11:46:03.857Z"),
},
{
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
string("grafana-vm"),
string("Memory"),
string("Available MBytes Memory"),
string("Memory"),
nil,
nil,
nil,
float64(2066),
string("2020-04-23T11:46:13.857Z"),
},
{
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
string("grafana-vm"),
string("Memory"),
string("Available MBytes Memory"),
string("Memory"),
nil,
nil,
nil,
float64(2066),
string("2020-04-23T11:46:23.857Z"),
},
},
},
},
meta: `{"columns":[{"name":"TenantId","type":"string"},{"name":"Computer","type":"string"},{"name":"ObjectName","type":"string"},{"name":"CounterName","type":"string"},` +
`{"name":"InstanceName","type":"string"},{"name":"Min","type":"real"},{"name":"Max","type":"real"},{"name":"SampleCount","type":"int"},{"name":"CounterValue","type":"real"},` +
`{"name":"TimeGenerated","type":"datetime"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
data, _ := loadLogAnalyticsTestFile(tt.testFile)
model := simplejson.NewFromAny(map[string]interface{}{
"subscriptionId": "1234",
"azureLogAnalytics": map[string]interface{}{
"workspace": "aworkspace",
},
})
params := url.Values{}
params.Add("query", tt.query)
tables, meta, err := datasource.parseToTables(data, model, params)
tt.Err(t, err)
if diff := cmp.Diff(tt.tables, tables, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
json, _ := json.Marshal(meta)
cols := string(json)
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestPluginRoutes(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
plugin := &plugins.DataSourcePlugin{
......@@ -389,15 +156,3 @@ func TestPluginRoutes(t *testing.T) {
}
}
func loadLogAnalyticsTestFile(name string) (AzureLogAnalyticsResponse, error) {
var data AzureLogAnalyticsResponse
path := filepath.Join("testdata", name)
jsonBody, err := ioutil.ReadFile(path)
if err != nil {
return data, err
}
err = json.Unmarshal(jsonBody, &data)
return data, err
}
package azuremonitor
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// LogTableToFrame converts an AzureLogAnalyticsTable to a data.Frame.
func LogTableToFrame(table *AzureLogAnalyticsTable) (*data.Frame, error) {
converterFrame, err := converterFrameForTable(table)
if err != nil {
return nil, err
}
for rowIdx, row := range table.Rows {
for fieldIdx, field := range row {
err = converterFrame.Set(fieldIdx, rowIdx, field)
if err != nil {
return nil, err
}
}
}
return converterFrame.Frame, nil
}
func converterFrameForTable(t *AzureLogAnalyticsTable) (*data.FrameInputConverter, error) {
converters := []data.FieldConverter{}
colNames := make([]string, len(t.Columns))
colTypes := make([]string, len(t.Columns)) // for metadata
for i, col := range t.Columns {
colNames[i] = col.Name
colTypes[i] = col.Type
converter, ok := converterMap[col.Type]
if !ok {
return nil, fmt.Errorf("unsupported analytics column type %v", col.Type)
}
converters = append(converters, converter)
}
fic, err := data.NewFrameInputConverter(converters, len(t.Rows))
if err != nil {
return nil, err
}
err = fic.Frame.SetFieldNames(colNames...)
if err != nil {
return nil, err
}
fic.Frame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{"azureColumnTypes": colTypes},
}
return fic, nil
}
var converterMap = map[string]data.FieldConverter{
"string": stringConverter,
"guid": stringConverter,
"timespan": stringConverter,
"dynamic": stringConverter,
"datetime": timeConverter,
"int": intConverter,
"long": longConverter,
"real": realConverter,
"bool": boolConverter,
}
var stringConverter = data.FieldConverter{
OutputFieldType: data.FieldTypeNullableString,
Converter: func(v interface{}) (interface{}, error) {
var as *string
if v == nil {
return as, nil
}
s, ok := v.(string)
if !ok {
return nil, fmt.Errorf("unexpected type, expected string but got %T", v)
}
as = &s
return as, nil
},
}
var timeConverter = data.FieldConverter{
OutputFieldType: data.FieldTypeNullableTime,
Converter: func(v interface{}) (interface{}, error) {
var at *time.Time
if v == nil {
return at, nil
}
s, ok := v.(string)
if !ok {
return nil, fmt.Errorf("unexpected type, expected string but got %T", v)
}
t, err := time.Parse(time.RFC3339Nano, s)
if err != nil {
return nil, err
}
return &t, nil
},
}
var realConverter = data.FieldConverter{
OutputFieldType: data.FieldTypeNullableFloat64,
Converter: func(v interface{}) (interface{}, error) {
var af *float64
if v == nil {
return af, nil
}
jN, ok := v.(json.Number)
if !ok {
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
}
f, err := jN.Float64()
if err != nil {
return nil, err
}
return &f, err
},
}
var boolConverter = data.FieldConverter{
OutputFieldType: data.FieldTypeNullableBool,
Converter: func(v interface{}) (interface{}, error) {
var ab *bool
if v == nil {
return ab, nil
}
b, ok := v.(bool)
if !ok {
return nil, fmt.Errorf("unexpected type, expected bool but got %T", v)
}
return &b, nil
},
}
var intConverter = data.FieldConverter{
OutputFieldType: data.FieldTypeNullableInt32,
Converter: func(v interface{}) (interface{}, error) {
var ai *int32
if v == nil {
return ai, nil
}
jN, ok := v.(json.Number)
if !ok {
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
}
var err error
iv, err := strconv.ParseInt(jN.String(), 10, 32)
if err != nil {
return nil, err
}
aInt := int32(iv)
return &aInt, nil
},
}
var longConverter = data.FieldConverter{
OutputFieldType: data.FieldTypeNullableInt64,
Converter: func(v interface{}) (interface{}, error) {
var ai *int64
if v == nil {
return ai, nil
}
jN, ok := v.(json.Number)
if !ok {
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
}
out, err := jN.Int64()
if err != nil {
return nil, err
}
return &out, err
},
}
package azuremonitor
import (
"encoding/json"
"os"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
)
func TestLogTableToFrame(t *testing.T) {
tests := []struct {
name string
testFile string
expectedFrame func() *data.Frame
}{
{
name: "single series",
testFile: "loganalytics/1-log-analytics-response-metrics-single-series.json",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("TimeGenerated", nil, []*time.Time{
pointer.Time(time.Date(2020, 4, 19, 19, 16, 6, 5e8, time.UTC)),
pointer.Time(time.Date(2020, 4, 19, 19, 16, 16, 5e8, time.UTC)),
pointer.Time(time.Date(2020, 4, 19, 19, 16, 26, 5e8, time.UTC)),
}),
data.NewField("Computer", nil, []*string{
pointer.String("grafana-vm"),
pointer.String("grafana-vm"),
pointer.String("grafana-vm"),
}),
data.NewField("avg_CounterValue", nil, []*float64{
pointer.Float64(1.1),
pointer.Float64(2.2),
pointer.Float64(3.3),
}),
)
frame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{"azureColumnTypes": []string{"datetime", "string", "real"}},
}
return frame
},
},
{
name: "response table",
testFile: "loganalytics/6-log-analytics-response-table.json",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("TenantId", nil, []*string{
pointer.String("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
pointer.String("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
pointer.String("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
}),
data.NewField("Computer", nil, []*string{
pointer.String("grafana-vm"),
pointer.String("grafana-vm"),
pointer.String("grafana-vm"),
}),
data.NewField("ObjectName", nil, []*string{
pointer.String("Memory"),
pointer.String("Memory"),
pointer.String("Memory"),
}),
data.NewField("CounterName", nil, []*string{
pointer.String("Available MBytes Memory"),
pointer.String("Available MBytes Memory"),
pointer.String("Available MBytes Memory"),
}),
data.NewField("InstanceName", nil, []*string{
pointer.String("Memory"),
pointer.String("Memory"),
pointer.String("Memory"),
}),
data.NewField("Min", nil, []*float64{nil, nil, nil}),
data.NewField("Max", nil, []*float64{nil, nil, nil}),
data.NewField("SampleCount", nil, []*int32{nil, nil, nil}),
data.NewField("CounterValue", nil, []*float64{
pointer.Float64(2040),
pointer.Float64(2066),
pointer.Float64(2066),
}),
data.NewField("TimeGenerated", nil, []*time.Time{
pointer.Time(time.Date(2020, 4, 23, 11, 46, 3, 857e6, time.UTC)),
pointer.Time(time.Date(2020, 4, 23, 11, 46, 13, 857e6, time.UTC)),
pointer.Time(time.Date(2020, 4, 23, 11, 46, 23, 857e6, time.UTC)),
}),
)
frame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{"azureColumnTypes": []string{"string", "string", "string",
"string", "string", "real", "real", "int", "real", "datetime"}},
}
return frame
},
},
{
name: "all supported field types",
testFile: "loganalytics/7-log-analytics-all-types-table.json",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("XBool", nil, []*bool{pointer.Bool(true)}),
data.NewField("XString", nil, []*string{pointer.String("Grafana")}),
data.NewField("XDateTime", nil, []*time.Time{pointer.Time(time.Date(2006, 1, 2, 22, 4, 5, 1*1e8, time.UTC))}),
data.NewField("XDynamic", nil, []*string{pointer.String(`[{"person":"Daniel"},{"cats":23},{"diagnosis":"cat problem"}]`)}),
data.NewField("XGuid", nil, []*string{pointer.String("74be27de-1e4e-49d9-b579-fe0b331d3642")}),
data.NewField("XInt", nil, []*int32{pointer.Int32(2147483647)}),
data.NewField("XLong", nil, []*int64{pointer.Int64(9223372036854775807)}),
data.NewField("XReal", nil, []*float64{pointer.Float64(1.797693134862315708145274237317043567981e+308)}),
data.NewField("XTimeSpan", nil, []*string{pointer.String("00:00:00.0000001")}),
)
frame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{"azureColumnTypes": []string{"bool", "string", "datetime",
"dynamic", "guid", "int", "long", "real", "timespan"}},
}
return frame
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res, err := loadLogAnalyticsTestFileWithNumber(tt.testFile)
require.NoError(t, err)
frame, err := LogTableToFrame(&res.Tables[0])
require.NoError(t, err)
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func loadLogAnalyticsTestFileWithNumber(name string) (AzureLogAnalyticsResponse, error) {
var data AzureLogAnalyticsResponse
path := filepath.Join("testdata", name)
f, err := os.Open(path)
if err != nil {
return data, err
}
defer f.Close()
d := json.NewDecoder(f)
d.UseNumber()
err = d.Decode(&data)
return data, err
}
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "XBool",
"type": "bool"
},
{
"name": "XString",
"type": "string"
},
{
"name": "XDateTime",
"type": "datetime"
},
{
"name": "XDynamic",
"type": "dynamic"
},
{
"name": "XGuid",
"type": "guid"
},
{
"name": "XInt",
"type": "int"
},
{
"name": "XLong",
"type": "long"
},
{
"name": "XReal",
"type": "real"
},
{
"name": "XTimeSpan",
"type": "timespan"
}
],
"rows": [
[
true,
"Grafana",
"2006-01-02T22:04:05.1Z",
"[{\"person\":\"Daniel\"},{\"cats\":23},{\"diagnosis\":\"cat problem\"}]",
"74be27de-1e4e-49d9-b579-fe0b331d3642",
2147483647,
9223372036854775807,
1.7976931348623157e+308,
"00:00:00.0000001"
]
]
}
]
}
\ No newline at end of file
......@@ -78,19 +78,6 @@ type AzureLogAnalyticsTable struct {
Rows [][]interface{} `json:"rows"`
}
type metadata struct {
Columns []column `json:"columns"`
Subscription string `json:"subscription"`
Workspace string `json:"workspace"`
Query string `json:"query"`
EncodedQuery string `json:"encodedQuery"`
}
type column struct {
Name string `json:"name"`
Type string `json:"type"`
}
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type azureMonitorJSONQuery struct {
AzureMonitor struct {
......
......@@ -3,17 +3,18 @@ import { DataFrame, toUtc, getFrameDisplayName } from '@grafana/data';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
const templateSrv = new TemplateSrv();
jest.mock('@grafana/runtime', () => ({
...jest.requireActual('@grafana/runtime'),
getBackendSrv: () => backendSrv,
getTemplateSrv: () => templateSrv,
}));
describe('AppInsightsDatasource', () => {
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
const ctx: any = {
templateSrv: new TemplateSrv(),
};
const ctx: any = {};
beforeEach(() => {
jest.clearAllMocks();
......@@ -22,7 +23,7 @@ describe('AppInsightsDatasource', () => {
url: 'http://appinsightsapi',
};
ctx.ds = new Datasource(ctx.instanceSettings, ctx.templateSrv);
ctx.ds = new Datasource(ctx.instanceSettings);
});
describe('When performing testDatasource', () => {
......
import { TimeSeries, toDataFrame } from '@grafana/data';
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
import { getBackendSrv } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { getBackendSrv, getTemplateSrv } from '@grafana/runtime';
import _ from 'lodash';
import TimegrainConverter from '../time_grain_converter';
......@@ -20,8 +19,7 @@ export default class AppInsightsDatasource {
applicationId: string;
logAnalyticsColumns: { [key: string]: LogAnalyticsColumn[] } = {};
/** @ngInject */
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>, private templateSrv: TemplateSrv) {
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
this.id = instanceSettings.id;
this.applicationId = instanceSettings.jsonData.appInsightsAppId || '';
......@@ -66,7 +64,7 @@ export default class AppInsightsDatasource {
raw: false,
appInsights: {
rawQuery: true,
rawQueryString: this.templateSrv.replace(item.rawQueryString, options.scopedVars),
rawQueryString: getTemplateSrv().replace(item.rawQueryString, options.scopedVars),
timeColumn: item.timeColumn,
valueColumn: item.valueColumn,
segmentColumn: item.segmentColumn,
......@@ -91,17 +89,19 @@ export default class AppInsightsDatasource {
item.dimensionFilter = item.filter;
}
const templateSrv = getTemplateSrv();
return {
type: 'timeSeriesQuery',
raw: false,
appInsights: {
rawQuery: false,
timeGrain: this.templateSrv.replace((item.timeGrain || '').toString(), options.scopedVars),
timeGrain: templateSrv.replace((item.timeGrain || '').toString(), options.scopedVars),
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
metricName: this.templateSrv.replace(item.metricName, options.scopedVars),
aggregation: this.templateSrv.replace(item.aggregation, options.scopedVars),
dimension: this.templateSrv.replace(item.dimension, options.scopedVars),
dimensionFilter: this.templateSrv.replace(item.dimensionFilter, options.scopedVars),
metricName: templateSrv.replace(item.metricName, options.scopedVars),
aggregation: templateSrv.replace(item.aggregation, options.scopedVars),
dimension: templateSrv.replace(item.dimension, options.scopedVars),
dimensionFilter: templateSrv.replace(item.dimensionFilter, options.scopedVars),
alias: item.alias,
format: target.format,
},
......@@ -198,7 +198,7 @@ export default class AppInsightsDatasource {
const appInsightsGroupByQuery = query.match(/^AppInsightsGroupBys\(([^\)]+?)(,\s?([^,]+?))?\)/i);
if (appInsightsGroupByQuery) {
const metricName = appInsightsGroupByQuery[1];
return this.getGroupBys(this.templateSrv.replace(metricName));
return this.getGroupBys(getTemplateSrv().replace(metricName));
}
return undefined;
......
......@@ -2,12 +2,15 @@ import AzureMonitorDatasource from '../datasource';
import FakeSchemaData from './__mocks__/schema';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { KustoSchema, AzureLogsVariable } from '../types';
import { toUtc, getFrameDisplayName } from '@grafana/data';
import { toUtc } from '@grafana/data';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
const templateSrv = new TemplateSrv();
jest.mock('@grafana/runtime', () => ({
...jest.requireActual('@grafana/runtime'),
getBackendSrv: () => backendSrv,
getTemplateSrv: () => templateSrv,
}));
describe('AzureLogAnalyticsDatasource', () => {
......@@ -18,9 +21,7 @@ describe('AzureLogAnalyticsDatasource', () => {
datasourceRequestMock.mockImplementation(jest.fn());
});
const ctx: any = {
templateSrv: new TemplateSrv(),
};
const ctx: any = {};
beforeEach(() => {
ctx.instanceSettings = {
......@@ -28,7 +29,7 @@ describe('AzureLogAnalyticsDatasource', () => {
url: 'http://azureloganalyticsapi',
};
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, ctx.templateSrv);
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
});
describe('When the config option "Same as Azure Monitor" has been chosen', () => {
......@@ -67,7 +68,7 @@ describe('AzureLogAnalyticsDatasource', () => {
ctx.instanceSettings.jsonData.tenantId = 'xxx';
ctx.instanceSettings.jsonData.clientId = 'xxx';
ctx.instanceSettings.jsonData.azureLogAnalyticsSameAs = true;
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, ctx.templateSrv);
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
datasourceRequestMock.mockImplementation((options: { url: string }) => {
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
......@@ -119,112 +120,6 @@ describe('AzureLogAnalyticsDatasource', () => {
});
});
describe('When performing query', () => {
const options = {
range: {
from: toUtc('2017-08-22T20:00:00Z'),
to: toUtc('2017-08-22T23:59:00Z'),
},
rangeRaw: {
from: 'now-4h',
to: 'now',
},
targets: [
{
apiVersion: '2016-09-01',
refId: 'A',
queryType: 'Azure Log Analytics',
azureLogAnalytics: {
resultFormat: 'time_series',
query:
'AzureActivity | where TimeGenerated > ago(2h) ' +
'| summarize count() by Category, bin(TimeGenerated, 5min) ' +
'| project TimeGenerated, Category, count_ | order by TimeGenerated asc',
},
},
],
};
const response = {
results: {
A: {
refId: 'A',
meta: {
columns: ['TimeGenerated', 'Computer', 'avg_CounterValue'],
subscription: 'xxx',
workspace: 'aaaa-1111-bbbb-2222',
query:
'Perf\r\n| where ObjectName == "Memory" and CounterName == "Available MBytes Memory"\n| where TimeGenerated >= datetime(\'2020-04-23T09:15:20Z\') and TimeGenerated <= datetime(\'2020-04-23T09:20:20Z\')\n| where 1 == 1\n| summarize avg(CounterValue) by bin(TimeGenerated, 1m), Computer \n| order by TimeGenerated asc',
encodedQuery: 'gzipped_base64_encoded_query',
},
series: [
{
name: 'grafana-vm',
points: [
[2017.25, 1587633300000],
[2048, 1587633360000],
[2048.3333333333335, 1587633420000],
[2049, 1587633480000],
[2049, 1587633540000],
[2049, 1587633600000],
],
},
],
},
},
};
const workspacesResponse = {
value: [
{
properties: {
customerId: 'aaaa-1111-bbbb-2222',
},
id:
'/subscriptions/44693801-6ee6-49de-9b2d-9106972f9572/resourcegroups/defaultresourcegroup/providers/microsoft.operationalinsights/workspaces/aworkspace',
name: 'aworkspace',
type: 'Microsoft.OperationalInsights/workspaces',
},
],
};
describe('in time series format', () => {
describe('and the data is valid (has time, metric and value columns)', () => {
beforeEach(() => {
datasourceRequestMock.mockImplementation((options: { url: string }) => {
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > 0) {
return Promise.resolve({ data: workspacesResponse, status: 200 });
} else {
expect(options.url).toContain('/api/tsdb/query');
return Promise.resolve({ data: response, status: 200 });
}
});
});
it('should return a list of datapoints', () => {
return ctx.ds.query(options).then((results: any) => {
expect(results.data.length).toBe(1);
expect(getFrameDisplayName(results.data[0])).toEqual('grafana-vm');
expect(results.data[0].fields.length).toBe(2);
expect(results.data[0].name).toBe('grafana-vm');
expect(results.data[0].fields[0].name).toBe('Time');
expect(results.data[0].fields[1].name).toBe('Value');
expect(results.data[0].fields[0].values.toArray().length).toBe(6);
expect(results.data[0].fields[0].values.get(0)).toEqual(1587633300000);
expect(results.data[0].fields[1].values.get(0)).toEqual(2017.25);
expect(results.data[0].fields[0].values.get(1)).toEqual(1587633360000);
expect(results.data[0].fields[1].values.get(1)).toEqual(2048);
expect(results.data[0].fields[0].config.links[0].title).toEqual('View in Azure Portal');
expect(results.data[0].fields[0].config.links[0].targetBlank).toBe(true);
expect(results.data[0].fields[0].config.links[0].url).toEqual(
'https://portal.azure.com/#blade/Microsoft_OperationsManagementSuite_Workspace/AnalyticsBlade/initiator/AnalyticsShareLinkToQuery/isQueryEditorVisible/true/scope/%7B%22resources%22%3A%5B%7B%22resourceId%22%3A%22%2Fsubscriptions%2Fxxx%2Fresourcegroups%2Fdefaultresourcegroup%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2Faworkspace%22%7D%5D%7D/query/gzipped_base64_encoded_query/isQueryBase64Compressed/true/timespanInIsoFormat/P1D'
);
});
});
});
});
});
describe('When performing getSchema', () => {
beforeEach(() => {
datasourceRequestMock.mockImplementation((options: { url: string }) => {
......
......@@ -2,13 +2,19 @@ import _ from 'lodash';
import LogAnalyticsQuerystringBuilder from '../log_analytics/querystring_builder';
import ResponseParser from './response_parser';
import { AzureMonitorQuery, AzureDataSourceJsonData, AzureLogsVariable } from '../types';
import { TimeSeries, toDataFrame } from '@grafana/data';
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
import { getBackendSrv } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
export default class AzureLogAnalyticsDatasource {
id: number;
import {
DataQueryResponse,
ScopedVars,
DataSourceInstanceSettings,
QueryResultMeta,
MetricFindValue,
} from '@grafana/data';
import { getBackendSrv, getTemplateSrv, DataSourceWithBackend } from '@grafana/runtime';
export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
AzureMonitorQuery,
AzureDataSourceJsonData
> {
url: string;
baseUrl: string;
applicationId: string;
......@@ -17,12 +23,8 @@ export default class AzureLogAnalyticsDatasource {
subscriptionId: string;
cache: Map<string, any>;
/** @ngInject */
constructor(
private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>,
private templateSrv: TemplateSrv
) {
this.id = instanceSettings.id;
constructor(private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
super(instanceSettings);
this.cache = new Map();
switch (this.instanceSettings.jsonData.cloudName) {
......@@ -88,7 +90,7 @@ export default class AzureLogAnalyticsDatasource {
}
getWorkspaceList(subscription: string): Promise<any> {
const subscriptionId = this.templateSrv.replace(subscription || this.subscriptionId);
const subscriptionId = getTemplateSrv().replace(subscription || this.subscriptionId);
const workspaceListUrl =
this.azureMonitorUrl +
......@@ -100,103 +102,70 @@ export default class AzureLogAnalyticsDatasource {
if (!workspace) {
return Promise.resolve();
}
const url = `${this.baseUrl}/${this.templateSrv.replace(workspace, {})}/metadata`;
const url = `${this.baseUrl}/${getTemplateSrv().replace(workspace, {})}/metadata`;
return this.doRequest(url).then((response: any) => {
return new ResponseParser(response.data).parseSchemaResult();
});
}
async query(options: DataQueryRequest<AzureMonitorQuery>) {
const queries = _.filter(options.targets, item => {
return item.hide !== true;
}).map(target => {
const item = target.azureLogAnalytics;
let workspace = this.templateSrv.replace(item.workspace, options.scopedVars);
if (!workspace && this.defaultOrFirstWorkspace) {
workspace = this.defaultOrFirstWorkspace;
}
filterQuery(item: AzureMonitorQuery): boolean {
return item.hide !== true && !!item.azureLogAnalytics;
}
const subscriptionId = this.templateSrv.replace(target.subscription || this.subscriptionId, options.scopedVars);
const query = this.templateSrv.replace(item.query, options.scopedVars, this.interpolateVariable);
applyTemplateVariables(target: AzureMonitorQuery, scopedVars: ScopedVars): Record<string, any> {
const item = target.azureLogAnalytics;
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
format: target.format,
queryType: 'Azure Log Analytics',
subscriptionId: subscriptionId,
azureLogAnalytics: {
resultFormat: item.resultFormat,
query: query,
workspace: workspace,
},
};
});
const templateSrv = getTemplateSrv();
let workspace = templateSrv.replace(item.workspace, scopedVars);
if (!queries || queries.length === 0) {
return [];
if (!workspace && this.defaultOrFirstWorkspace) {
workspace = this.defaultOrFirstWorkspace;
}
const { data } = await getBackendSrv().datasourceRequest({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries,
const subscriptionId = templateSrv.replace(target.subscription || this.subscriptionId, scopedVars);
const query = templateSrv.replace(item.query, scopedVars, this.interpolateVariable);
return {
refId: target.refId,
format: target.format,
queryType: 'Azure Log Analytics',
subscriptionId: subscriptionId,
azureLogAnalytics: {
resultFormat: item.resultFormat,
query: query,
workspace: workspace,
},
});
};
}
const result: DataQueryResponseData[] = [];
if (data.results) {
const results: any[] = Object.values(data.results);
for (let queryRes of results) {
for (let series of queryRes.series || []) {
const timeSeries: TimeSeries = {
target: series.name,
datapoints: series.points,
refId: queryRes.refId,
meta: queryRes.meta,
};
const df = toDataFrame(timeSeries);
if (queryRes.meta.encodedQuery && queryRes.meta.encodedQuery.length > 0) {
const url = await this.buildDeepLink(queryRes);
if (url.length > 0) {
for (const field of df.fields) {
field.config.links = [
{
url: url,
title: 'View in Azure Portal',
targetBlank: true,
},
];
}
async processResponse(res: DataQueryResponse): Promise<DataQueryResponse> {
if (res.data) {
for (const df of res.data) {
const encodedQuery = df.meta?.custom?.encodedQuery;
if (encodedQuery && encodedQuery.length > 0) {
const url = await this.buildDeepLink(df.meta);
if (url?.length) {
for (const field of df.fields) {
field.config.links = [
{
url: url,
title: 'View in Azure Portal',
targetBlank: true,
},
];
}
}
result.push(df);
}
for (let table of queryRes.tables || []) {
result.push(toDataFrame(table));
}
}
}
return result;
return res;
}
private async buildDeepLink(queryRes: any) {
const base64Enc = encodeURIComponent(queryRes.meta.encodedQuery);
const workspaceId = queryRes.meta.workspace;
const subscription = queryRes.meta.subscription;
private async buildDeepLink(meta: QueryResultMeta) {
const base64Enc = encodeURIComponent(meta.custom.encodedQuery);
const workspaceId = meta.custom.workspace;
const subscription = meta.custom.subscription;
const details = await this.getWorkspaceDetails(workspaceId);
if (!details.workspace || !details.resourceGroup) {
......@@ -235,7 +204,7 @@ export default class AzureLogAnalyticsDatasource {
};
}
metricFindQuery(query: string) {
metricFindQuery(query: string): Promise<MetricFindValue[]> {
const workspacesQuery = query.match(/^workspaces\(\)/i);
if (workspacesQuery) {
return this.getWorkspaces(this.subscriptionId);
......@@ -268,12 +237,12 @@ export default class AzureLogAnalyticsDatasource {
throw { message: err.error.data.error.message };
}
});
});
}) as Promise<MetricFindValue[]>; // ??
}
private buildQuery(query: string, options: any, workspace: any) {
const querystringBuilder = new LogAnalyticsQuerystringBuilder(
this.templateSrv.replace(query, {}, this.interpolateVariable),
getTemplateSrv().replace(query, {}, this.interpolateVariable),
options,
'TimeGenerated'
);
......@@ -382,10 +351,10 @@ export default class AzureLogAnalyticsDatasource {
}
}
testDatasource() {
testDatasource(): Promise<any> {
const validationError = this.isValidConfig();
if (validationError) {
return validationError;
return Promise.resolve(validationError);
}
return this.getDefaultOrFirstWorkspace()
......
......@@ -30,7 +30,7 @@ describe('AzureMonitorDatasource', () => {
jsonData: { subscriptionId: '9935389e-9122-4ef9-95f9-1513dd24753f' },
cloudName: 'azuremonitor',
} as unknown) as DataSourceInstanceSettings<AzureDataSourceJsonData>;
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, templateSrv);
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
});
describe('When performing testDatasource', () => {
......
......@@ -10,7 +10,6 @@ import {
DataQueryResponse,
DataQueryResponseData,
} from '@grafana/data';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { Observable } from 'rxjs';
export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDataSourceJsonData> {
......@@ -18,12 +17,11 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
appInsightsDatasource: AppInsightsDatasource;
azureLogAnalyticsDatasource: AzureLogAnalyticsDatasource;
/** @ngInject */
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>, private templateSrv: TemplateSrv) {
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
super(instanceSettings);
this.azureMonitorDatasource = new AzureMonitorDatasource(instanceSettings);
this.appInsightsDatasource = new AppInsightsDatasource(instanceSettings, this.templateSrv);
this.azureLogAnalyticsDatasource = new AzureLogAnalyticsDatasource(instanceSettings, this.templateSrv);
this.appInsightsDatasource = new AppInsightsDatasource(instanceSettings);
this.azureLogAnalyticsDatasource = new AzureLogAnalyticsDatasource(instanceSettings);
}
query(options: DataQueryRequest<AzureMonitorQuery>): Promise<DataQueryResponse> | Observable<DataQueryResponseData> {
......@@ -44,10 +42,13 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
}
if (azureLogAnalyticsOptions.targets.length > 0) {
const alaPromise = this.azureLogAnalyticsDatasource.query(azureLogAnalyticsOptions);
if (alaPromise) {
promises.push(alaPromise);
const obs = this.azureLogAnalyticsDatasource.query(azureLogAnalyticsOptions);
if (!promises.length) {
return obs; // return the observable directly
}
// NOTE: this only includes the data!
// When all three query types are ready to be observale, they should all use observable
promises.push(obs.toPromise().then(r => r.data));
}
if (azureMonitorOptions.targets.length > 0) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment