Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
N
nexpie-grafana-theme
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Registry
Registry
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Kornkitt Poolsup
nexpie-grafana-theme
Commits
fcb9dcb0
Unverified
Commit
fcb9dcb0
authored
May 26, 2020
by
Kyle Brandt
Committed by
GitHub
May 26, 2020
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
AzureMonitor: Types for front-end queries (#24937)
parent
139be3d7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
130 additions
and
86 deletions
+130
-86
pkg/tsdb/azuremonitor/applicationinsights-datasource.go
+31
-37
pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go
+1
-2
pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go
+15
-4
pkg/tsdb/azuremonitor/azuremonitor-datasource.go
+32
-23
pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go
+1
-1
pkg/tsdb/azuremonitor/azuremonitor-time.go
+2
-19
pkg/tsdb/azuremonitor/types.go
+48
-0
No files found.
pkg/tsdb/azuremonitor/applicationinsights-datasource.go
View file @
fcb9dcb0
...
@@ -79,32 +79,30 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
...
@@ -79,32 +79,30 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
}
}
for
_
,
query
:=
range
queries
{
for
_
,
query
:=
range
queries
{
applicationInsightsTarget
:=
query
.
Model
.
Get
(
"appInsights"
)
.
MustMap
()
queryBytes
,
err
:=
query
.
Model
.
Encode
()
azlog
.
Debug
(
"Application Insights"
,
"target"
,
applicationInsightsTarget
)
if
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"failed to re-encode the Azure Application Insights query into JSON: %w"
,
err
)
rawQuery
:=
false
if
asInterface
,
ok
:=
applicationInsightsTarget
[
"rawQuery"
];
ok
{
if
asBool
,
ok
:=
asInterface
.
(
bool
);
ok
{
rawQuery
=
asBool
}
else
{
return
nil
,
errors
.
New
(
"'rawQuery' should be a boolean"
)
}
}
}
else
{
queryJSONModel
:=
insightsJSONQuery
{}
return
nil
,
errors
.
New
(
"missing 'rawQuery' property"
)
err
=
json
.
Unmarshal
(
queryBytes
,
&
queryJSONModel
)
if
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"failed to decode the Azure Application Insights query object from JSON: %w"
,
err
)
}
}
if
rawQuery
{
insightsJSONModel
:=
queryJSONModel
.
AppInsights
var
rawQueryString
string
azlog
.
Debug
(
"Application Insights"
,
"target"
,
insightsJSONModel
)
if
asInterface
,
ok
:=
applicationInsightsTarget
[
"rawQueryString"
];
ok
{
if
asString
,
ok
:=
asInterface
.
(
string
);
ok
{
if
insightsJSONModel
.
RawQuery
==
nil
{
rawQueryString
=
asString
return
nil
,
fmt
.
Errorf
(
"missing the 'rawQuery' property"
)
}
}
}
if
rawQueryString
==
""
{
if
*
insightsJSONModel
.
RawQuery
{
var
rawQueryString
string
if
insightsJSONModel
.
RawQueryString
==
""
{
return
nil
,
errors
.
New
(
"rawQuery requires rawQueryString"
)
return
nil
,
errors
.
New
(
"rawQuery requires rawQueryString"
)
}
}
rawQueryString
,
err
:=
KqlInterpolate
(
query
,
timeRange
,
fmt
.
Sprintf
(
"%v"
,
rawQueryString
)
)
rawQueryString
,
err
:=
KqlInterpolate
(
query
,
timeRange
,
insightsJSONModel
.
RawQueryString
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
...
@@ -117,20 +115,15 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
...
@@ -117,20 +115,15 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
IsRaw
:
true
,
IsRaw
:
true
,
ApiURL
:
"query"
,
ApiURL
:
"query"
,
Params
:
params
,
Params
:
params
,
TimeColumnName
:
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"timeColumn"
])
,
TimeColumnName
:
insightsJSONModel
.
TimeColumn
,
ValueColumnName
:
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"valueColumn"
])
,
ValueColumnName
:
insightsJSONModel
.
ValueColumn
,
SegmentColumnName
:
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"segmentColumn"
])
,
SegmentColumnName
:
insightsJSONModel
.
SegmentColumn
,
Target
:
params
.
Encode
(),
Target
:
params
.
Encode
(),
})
})
}
else
{
}
else
{
alias
:=
""
azureURL
:=
fmt
.
Sprintf
(
"metrics/%s"
,
insightsJSONModel
.
MetricName
)
if
val
,
ok
:=
applicationInsightsTarget
[
"alias"
];
ok
{
timeGrain
:=
insightsJSONModel
.
TimeGrain
alias
=
fmt
.
Sprintf
(
"%v"
,
val
)
timeGrains
:=
insightsJSONModel
.
AllowedTimeGrainsMs
}
azureURL
:=
fmt
.
Sprintf
(
"metrics/%s"
,
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"metricName"
]))
timeGrain
:=
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"timeGrain"
])
timeGrains
:=
applicationInsightsTarget
[
"allowedTimeGrainsMs"
]
if
timeGrain
==
"auto"
{
if
timeGrain
==
"auto"
{
timeGrain
,
err
=
setAutoTimeGrain
(
query
.
IntervalMs
,
timeGrains
)
timeGrain
,
err
=
setAutoTimeGrain
(
query
.
IntervalMs
,
timeGrains
)
if
err
!=
nil
{
if
err
!=
nil
{
...
@@ -143,16 +136,17 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
...
@@ -143,16 +136,17 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
if
timeGrain
!=
"none"
{
if
timeGrain
!=
"none"
{
params
.
Add
(
"interval"
,
timeGrain
)
params
.
Add
(
"interval"
,
timeGrain
)
}
}
params
.
Add
(
"aggregation"
,
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"aggregation"
])
)
params
.
Add
(
"aggregation"
,
insightsJSONModel
.
Aggregation
)
dimension
:=
strings
.
TrimSpace
(
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"dimension"
]))
dimension
:=
strings
.
TrimSpace
(
insightsJSONModel
.
Dimension
)
if
applicationInsightsTarget
[
"dimension"
]
!=
nil
&&
len
(
dimension
)
>
0
&&
!
strings
.
EqualFold
(
dimension
,
"none"
)
{
// Azure Monitor combines this and the following logic such that if dimensionFilter, must also Dimension, should that be done here as well?
if
dimension
!=
""
&&
!
strings
.
EqualFold
(
dimension
,
"none"
)
{
params
.
Add
(
"segment"
,
dimension
)
params
.
Add
(
"segment"
,
dimension
)
}
}
dimensionFilter
:=
strings
.
TrimSpace
(
fmt
.
Sprintf
(
"%v"
,
applicationInsightsTarget
[
"dimensionFilter"
])
)
dimensionFilter
:=
strings
.
TrimSpace
(
insightsJSONModel
.
DimensionFilter
)
if
applicationInsightsTarget
[
"dimensionFilter"
]
!=
nil
&&
len
(
dimensionFilter
)
>
0
{
if
dimensionFilter
!=
""
{
params
.
Add
(
"filter"
,
fmt
.
Sprintf
(
"%v"
,
dimensionFilter
)
)
params
.
Add
(
"filter"
,
dimensionFilter
)
}
}
applicationInsightsQueries
=
append
(
applicationInsightsQueries
,
&
ApplicationInsightsQuery
{
applicationInsightsQueries
=
append
(
applicationInsightsQueries
,
&
ApplicationInsightsQuery
{
...
@@ -160,7 +154,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
...
@@ -160,7 +154,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
IsRaw
:
false
,
IsRaw
:
false
,
ApiURL
:
azureURL
,
ApiURL
:
azureURL
,
Params
:
params
,
Params
:
params
,
Alias
:
a
lias
,
Alias
:
insightsJSONModel
.
A
lias
,
Target
:
params
.
Encode
(),
Target
:
params
.
Encode
(),
})
})
}
}
...
...
pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go
View file @
fcb9dcb0
package
azuremonitor
package
azuremonitor
import
(
import
(
"encoding/json"
"fmt"
"fmt"
"io/ioutil"
"io/ioutil"
"testing"
"testing"
...
@@ -92,7 +91,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
...
@@ -92,7 +91,7 @@ func TestApplicationInsightsDatasource(t *testing.T) {
"metricName"
:
"Percentage CPU"
,
"metricName"
:
"Percentage CPU"
,
"alias"
:
"testalias"
,
"alias"
:
"testalias"
,
"queryType"
:
"Application Insights"
,
"queryType"
:
"Application Insights"
,
"allowedTimeGrainsMs"
:
[]
int
erface
{}{
"auto"
,
json
.
Number
(
"60000"
),
json
.
Number
(
"300000"
)
},
"allowedTimeGrainsMs"
:
[]
int
64
{
60000
,
300000
},
},
},
})
})
tsdbQuery
.
Queries
[
0
]
.
IntervalMs
=
400000
tsdbQuery
.
Queries
[
0
]
.
IntervalMs
=
400000
...
...
pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go
View file @
fcb9dcb0
...
@@ -72,20 +72,31 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa
...
@@ -72,20 +72,31 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa
azureLogAnalyticsQueries
:=
[]
*
AzureLogAnalyticsQuery
{}
azureLogAnalyticsQueries
:=
[]
*
AzureLogAnalyticsQuery
{}
for
_
,
query
:=
range
queries
{
for
_
,
query
:=
range
queries
{
azureLogAnalyticsTarget
:=
query
.
Model
.
Get
(
"azureLogAnalytics"
)
.
MustMap
()
queryBytes
,
err
:=
query
.
Model
.
Encode
()
if
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"failed to re-encode the Azure Log Analytics query into JSON: %w"
,
err
)
}
queryJSONModel
:=
logJSONQuery
{}
err
=
json
.
Unmarshal
(
queryBytes
,
&
queryJSONModel
)
if
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"failed to decode the Azure Log Analytics query object from JSON: %w"
,
err
)
}
azureLogAnalyticsTarget
:=
queryJSONModel
.
AzureLogAnalytics
azlog
.
Debug
(
"AzureLogAnalytics"
,
"target"
,
azureLogAnalyticsTarget
)
azlog
.
Debug
(
"AzureLogAnalytics"
,
"target"
,
azureLogAnalyticsTarget
)
resultFormat
:=
fmt
.
Sprintf
(
"%v"
,
azureLogAnalyticsTarget
[
"resultFormat"
])
resultFormat
:=
azureLogAnalyticsTarget
.
ResultFormat
if
resultFormat
==
""
{
if
resultFormat
==
""
{
resultFormat
=
"time_series"
resultFormat
=
"time_series"
}
}
urlComponents
:=
map
[
string
]
string
{}
urlComponents
:=
map
[
string
]
string
{}
urlComponents
[
"workspace"
]
=
fmt
.
Sprintf
(
"%v"
,
azureLogAnalyticsTarget
[
"workspace"
])
urlComponents
[
"workspace"
]
=
azureLogAnalyticsTarget
.
Workspace
apiURL
:=
fmt
.
Sprintf
(
"%s/query"
,
urlComponents
[
"workspace"
])
apiURL
:=
fmt
.
Sprintf
(
"%s/query"
,
urlComponents
[
"workspace"
])
params
:=
url
.
Values
{}
params
:=
url
.
Values
{}
rawQuery
,
err
:=
KqlInterpolate
(
query
,
timeRange
,
fmt
.
Sprintf
(
"%v"
,
azureLogAnalyticsTarget
[
"query"
])
,
"TimeGenerated"
)
rawQuery
,
err
:=
KqlInterpolate
(
query
,
timeRange
,
azureLogAnalyticsTarget
.
Query
,
"TimeGenerated"
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
...
...
pkg/tsdb/azuremonitor/azuremonitor-datasource.go
View file @
fcb9dcb0
...
@@ -36,6 +36,8 @@ var (
...
@@ -36,6 +36,8 @@ var (
defaultAllowedIntervalsMS
=
[]
int64
{
60000
,
300000
,
900000
,
1800000
,
3600000
,
21600000
,
43200000
,
86400000
}
defaultAllowedIntervalsMS
=
[]
int64
{
60000
,
300000
,
900000
,
1800000
,
3600000
,
21600000
,
43200000
,
86400000
}
)
)
const
azureMonitorAPIVersion
=
"2018-01-01"
// executeTimeSeriesQuery does the following:
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 2. executes each query by calling the Azure Monitor API
...
@@ -81,31 +83,38 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
...
@@ -81,31 +83,38 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
for
_
,
query
:=
range
queries
{
for
_
,
query
:=
range
queries
{
var
target
string
var
target
string
queryBytes
,
err
:=
query
.
Model
.
Encode
()
if
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"failed to re-encode the Azure Monitor query into JSON: %w"
,
err
)
}
azureMonitorTarget
:=
query
.
Model
.
Get
(
"azureMonitor"
)
.
MustMap
()
queryJSONModel
:=
azureMonitorJSONQuery
{}
err
=
json
.
Unmarshal
(
queryBytes
,
&
queryJSONModel
)
if
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"failed to decode the Azure Monitor query object from JSON: %w"
,
err
)
}
azJSONModel
:=
queryJSONModel
.
AzureMonitor
urlComponents
:=
map
[
string
]
string
{}
urlComponents
:=
map
[
string
]
string
{}
urlComponents
[
"subscription"
]
=
fmt
.
Sprintf
(
"%v"
,
query
.
Model
.
Get
(
"subscription"
)
.
MustString
())
urlComponents
[
"subscription"
]
=
queryJSONModel
.
Subscription
urlComponents
[
"resourceGroup"
]
=
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"resourceGroup"
])
urlComponents
[
"resourceGroup"
]
=
azJSONModel
.
ResourceGroup
urlComponents
[
"metricDefinition"
]
=
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"metricDefinition"
])
urlComponents
[
"metricDefinition"
]
=
azJSONModel
.
MetricDefinition
urlComponents
[
"resourceName"
]
=
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"resourceName"
])
urlComponents
[
"resourceName"
]
=
azJSONModel
.
ResourceName
ub
:=
urlBuilder
{
ub
:=
urlBuilder
{
DefaultSubscription
:
query
.
DataSource
.
JsonData
.
Get
(
"subscriptionId"
)
.
MustString
(),
DefaultSubscription
:
query
.
DataSource
.
JsonData
.
Get
(
"subscriptionId"
)
.
MustString
(),
Subscription
:
urlComponents
[
"subscription"
]
,
Subscription
:
queryJSONModel
.
Subscription
,
ResourceGroup
:
urlComponents
[
"resourceGroup"
]
,
ResourceGroup
:
queryJSONModel
.
AzureMonitor
.
ResourceGroup
,
MetricDefinition
:
urlComponents
[
"metricDefinition"
]
,
MetricDefinition
:
azJSONModel
.
MetricDefinition
,
ResourceName
:
urlComponents
[
"resourceName"
]
,
ResourceName
:
azJSONModel
.
ResourceName
,
}
}
azureURL
:=
ub
.
Build
()
azureURL
:=
ub
.
Build
()
alias
:=
""
alias
:=
azJSONModel
.
Alias
if
val
,
ok
:=
azureMonitorTarget
[
"alias"
];
ok
{
alias
=
fmt
.
Sprintf
(
"%v"
,
val
)
}
timeGrain
:=
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"timeGrain"
])
timeGrain
:=
azJSONModel
.
TimeGrain
timeGrains
:=
az
ureMonitorTarget
[
"allowedTimeGrainsMs"
]
timeGrains
:=
az
JSONModel
.
AllowedTimeGrainsMs
if
timeGrain
==
"auto"
{
if
timeGrain
==
"auto"
{
timeGrain
,
err
=
setAutoTimeGrain
(
query
.
IntervalMs
,
timeGrains
)
timeGrain
,
err
=
setAutoTimeGrain
(
query
.
IntervalMs
,
timeGrains
)
if
err
!=
nil
{
if
err
!=
nil
{
...
@@ -114,18 +123,18 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
...
@@ -114,18 +123,18 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
}
}
params
:=
url
.
Values
{}
params
:=
url
.
Values
{}
params
.
Add
(
"api-version"
,
"2018-01-01"
)
params
.
Add
(
"api-version"
,
azureMonitorAPIVersion
)
params
.
Add
(
"timespan"
,
fmt
.
Sprintf
(
"%v/%v"
,
startTime
.
UTC
()
.
Format
(
time
.
RFC3339
),
endTime
.
UTC
()
.
Format
(
time
.
RFC3339
)))
params
.
Add
(
"timespan"
,
fmt
.
Sprintf
(
"%v/%v"
,
startTime
.
UTC
()
.
Format
(
time
.
RFC3339
),
endTime
.
UTC
()
.
Format
(
time
.
RFC3339
)))
params
.
Add
(
"interval"
,
timeGrain
)
params
.
Add
(
"interval"
,
timeGrain
)
params
.
Add
(
"aggregation"
,
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"aggregation"
])
)
params
.
Add
(
"aggregation"
,
azJSONModel
.
Aggregation
)
params
.
Add
(
"metricnames"
,
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"metricName"
]))
params
.
Add
(
"metricnames"
,
azJSONModel
.
MetricName
)
// MetricName or MetricNames ?
params
.
Add
(
"metricnamespace"
,
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"metricNamespace"
])
)
params
.
Add
(
"metricnamespace"
,
azJSONModel
.
MetricNamespace
)
dimension
:=
strings
.
TrimSpace
(
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"dimension"
])
)
dimension
:=
strings
.
TrimSpace
(
azJSONModel
.
Dimension
)
dimensionFilter
:=
strings
.
TrimSpace
(
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"dimensionFilter"
])
)
dimensionFilter
:=
strings
.
TrimSpace
(
azJSONModel
.
DimensionFilter
)
if
azureMonitorTarget
[
"dimension"
]
!=
nil
&&
azureMonitorTarget
[
"dimensionFilter"
]
!=
nil
&&
len
(
dimension
)
>
0
&&
len
(
dimensionFilter
)
>
0
&&
dimension
!=
"None"
{
if
dimension
!=
""
&&
dimensionFilter
!=
""
&&
dimension
!=
"None"
{
params
.
Add
(
"$filter"
,
fmt
.
Sprintf
(
"%s eq '%s'"
,
dimension
,
dimensionFilter
))
params
.
Add
(
"$filter"
,
fmt
.
Sprintf
(
"%s eq '%s'"
,
dimension
,
dimensionFilter
))
params
.
Add
(
"top"
,
fmt
.
Sprintf
(
"%v"
,
azureMonitorTarget
[
"top"
])
)
params
.
Add
(
"top"
,
azJSONModel
.
Top
)
}
}
target
=
params
.
Encode
()
target
=
params
.
Encode
()
...
...
pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go
View file @
fcb9dcb0
...
@@ -104,7 +104,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
...
@@ -104,7 +104,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
"metricName"
:
"Percentage CPU"
,
"metricName"
:
"Percentage CPU"
,
"alias"
:
"testalias"
,
"alias"
:
"testalias"
,
"queryType"
:
"Azure Monitor"
,
"queryType"
:
"Azure Monitor"
,
"allowedTimeGrainsMs"
:
[]
int
erface
{}{
"auto"
,
json
.
Number
(
"60000"
),
json
.
Number
(
"300000"
)
},
"allowedTimeGrainsMs"
:
[]
int
64
{
60000
,
300000
},
},
},
})
})
tsdbQuery
.
Queries
[
0
]
.
IntervalMs
=
400000
tsdbQuery
.
Queries
[
0
]
.
IntervalMs
=
400000
...
...
pkg/tsdb/azuremonitor/azuremonitor-time.go
View file @
fcb9dcb0
package
azuremonitor
package
azuremonitor
import
"encoding/json"
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
// if the metric has a limited set of possible intervals/time grains then use those
// if the metric has a limited set of possible intervals/time grains then use those
// instead of the default list of intervals
// instead of the default list of intervals
func
setAutoTimeGrain
(
intervalMs
int64
,
timeGrains
interface
{})
(
string
,
error
)
{
func
setAutoTimeGrain
(
intervalMs
int64
,
timeGrains
[]
int64
)
(
string
,
error
)
{
// parses array of numbers from the timeGrains json field
autoInterval
:=
findClosestAllowedIntervalMS
(
intervalMs
,
timeGrains
)
allowedTimeGrains
:=
[]
int64
{}
tgs
,
ok
:=
timeGrains
.
([]
interface
{})
if
ok
{
for
_
,
v
:=
range
tgs
{
jsonNumber
,
ok
:=
v
.
(
json
.
Number
)
if
ok
{
tg
,
err
:=
jsonNumber
.
Int64
()
if
err
==
nil
{
allowedTimeGrains
=
append
(
allowedTimeGrains
,
tg
)
}
}
}
}
autoInterval
:=
findClosestAllowedIntervalMS
(
intervalMs
,
allowedTimeGrains
)
tg
:=
&
TimeGrain
{}
tg
:=
&
TimeGrain
{}
autoTimeGrain
,
err
:=
tg
.
createISO8601DurationFromIntervalMS
(
autoInterval
)
autoTimeGrain
,
err
:=
tg
.
createISO8601DurationFromIntervalMS
(
autoInterval
)
if
err
!=
nil
{
if
err
!=
nil
{
...
...
pkg/tsdb/azuremonitor/types.go
View file @
fcb9dcb0
...
@@ -90,3 +90,51 @@ type column struct {
...
@@ -90,3 +90,51 @@ type column struct {
Name
string
`json:"name"`
Name
string
`json:"name"`
Type
string
`json:"type"`
Type
string
`json:"type"`
}
}
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type
azureMonitorJSONQuery
struct
{
AzureMonitor
struct
{
Aggregation
string
`json:"aggregation"`
Alias
string
`json:"alias"`
AllowedTimeGrainsMs
[]
int64
`json:"allowedTimeGrainsMs"`
Dimension
string
`json:"dimension"`
DimensionFilter
string
`json:"dimensionFilter"`
Format
string
`json:"format"`
MetricDefinition
string
`json:"metricDefinition"`
MetricName
string
`json:"metricName"`
MetricNamespace
string
`json:"metricNamespace"`
ResourceGroup
string
`json:"resourceGroup"`
ResourceName
string
`json:"resourceName"`
TimeGrain
string
`json:"timeGrain"`
Top
string
`json:"top"`
}
`json:"azureMonitor"`
Subscription
string
`json:"subscription"`
}
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type
insightsJSONQuery
struct
{
AppInsights
struct
{
Aggregation
string
`json:"aggregation"`
Alias
string
`json:"alias"`
AllowedTimeGrainsMs
[]
int64
`json:"allowedTimeGrainsMs"`
Dimension
string
`json:"dimension"`
DimensionFilter
string
`json:"dimensionFilter"`
MetricName
string
`json:"metricName"`
RawQuery
*
bool
`json:"rawQuery"`
RawQueryString
string
`json:"rawQueryString"`
TimeGrain
string
`json:"timeGrain"`
TimeColumn
string
`json:"timeColumn"`
ValueColumn
string
`json:"valueColumn"`
SegmentColumn
string
`json:"segmentColumn"`
}
`json:"appInsights"`
Raw
*
bool
`json:"raw"`
}
// logJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type
logJSONQuery
struct
{
AzureLogAnalytics
struct
{
Query
string
`json:"query"`
ResultFormat
string
`json:"resultFormat"`
Workspace
string
`json:"workspace"`
}
`json:"azureLogAnalytics"`
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment