Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
N
nexpie-grafana-theme
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Registry
Registry
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Kornkitt Poolsup
nexpie-grafana-theme
Commits
2aa695fb
Commit
2aa695fb
authored
Sep 07, 2015
by
Torkel Ödegaard
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat(elasticsearch): refactoring elasticsearch response handling to support series alias patterns
parent
f361f324
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
142 additions
and
51 deletions
+142
-51
public/app/plugins/datasource/elasticsearch/elasticResponse.js
+77
-41
public/test/specs/elasticsearch-querybuilder-specs.js
+0
-1
public/test/specs/elasticsearch-response-specs.js
+65
-9
No files found.
public/app/plugins/datasource/elasticsearch/elasticResponse.js
View file @
2aa695fb
define
([
define
([
"lodash"
],
],
function
()
{
function
(
_
)
{
'use strict'
;
'use strict'
;
function
ElasticResponse
(
targets
,
response
)
{
function
ElasticResponse
(
targets
,
response
)
{
...
@@ -10,66 +11,106 @@ function () {
...
@@ -10,66 +11,106 @@ function () {
// This is quite complex
// This is quite complex
// neeed to recurise down the nested buckets to build series
// neeed to recurise down the nested buckets to build series
ElasticResponse
.
prototype
.
processBuckets
=
function
(
aggs
,
target
,
series
,
level
,
parentName
)
{
ElasticResponse
.
prototype
.
processBuckets
=
function
(
aggs
,
target
,
series
,
level
)
{
var
seriesName
,
value
,
metric
,
i
,
y
,
bucket
,
aggDef
,
esAgg
;
var
value
,
metric
,
i
,
y
,
bucket
,
aggDef
,
esAgg
,
nestedSeries
;
function
addMetricPoint
(
seriesName
,
value
,
time
)
{
var
current
=
series
[
seriesName
];
if
(
!
current
)
{
current
=
series
[
seriesName
]
=
{
target
:
seriesName
,
datapoints
:
[]};
}
current
.
datapoints
.
push
([
value
,
time
]);
}
aggDef
=
target
.
bucketAggs
[
level
];
aggDef
=
target
.
bucketAggs
[
level
];
esAgg
=
aggs
[
aggDef
.
id
];
esAgg
=
aggs
[
aggDef
.
id
];
if
(
level
<
target
.
bucketAggs
.
length
-
1
)
{
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
bucket
=
esAgg
.
buckets
[
i
];
nestedSeries
=
{
prop
:
{
key
:
bucket
.
key
,
field
:
aggDef
.
field
},
series
:
[]};
series
.
push
(
nestedSeries
);
this
.
processBuckets
(
bucket
,
target
,
nestedSeries
.
series
,
level
+
1
);
}
return
;
}
// if last agg collect series
if
(
level
===
target
.
bucketAggs
.
length
-
1
)
{
for
(
y
=
0
;
y
<
target
.
metrics
.
length
;
y
++
)
{
for
(
y
=
0
;
y
<
target
.
metrics
.
length
;
y
++
)
{
metric
=
target
.
metrics
[
y
];
metric
=
target
.
metrics
[
y
];
seriesName
=
parentName
;
switch
(
metric
.
type
)
{
switch
(
metric
.
type
)
{
case
'count'
:
{
case
'count'
:
{
seriesName
+=
' count'
;
var
countSeries
=
{
datapoints
:
[],
metric
:
'count'
};
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
value
=
bucket
.
doc_count
;
value
=
bucket
.
doc_count
;
addMetricPoint
(
seriesName
,
value
,
bucket
.
key
);
countSeries
.
datapoints
.
push
([
value
,
bucket
.
key
]);
}
series
.
push
(
countSeries
);
break
;
break
;
}
}
case
'percentiles'
:
{
case
'percentiles'
:
{
var
values
=
bucket
[
metric
.
id
].
values
;
// for (i = 0; i < esAgg.buckets.length; i++) {
for
(
var
prop
in
values
)
{
// bucket = esAgg.buckets[i];
addMetricPoint
(
seriesName
+
' '
+
prop
,
values
[
prop
],
bucket
.
key
);
// var values = bucket[metric.id].values;
}
// for (var prop in values) {
// addMetricPoint(seriesName + ' ' + prop, values[prop], bucket.key);
// }
// }
break
;
break
;
}
}
case
'extended_stats'
:
{
case
'extended_stats'
:
{
var
stats
=
bucket
[
metric
.
id
];
//
var stats = bucket[metric.id];
stats
.
std_deviation_bounds_upper
=
stats
.
std_deviation_bounds
.
upper
;
//
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
stats
.
std_deviation_bounds_lower
=
stats
.
std_deviation_bounds
.
lower
;
//
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
//
for
(
var
statName
in
metric
.
meta
)
{
//
for (var statName in metric.meta) {
if
(
metric
.
meta
[
statName
])
{
//
if (metric.meta[statName]) {
addMetricPoint
(
seriesName
+
' '
+
statName
,
stats
[
statName
],
bucket
.
key
);
//
addMetricPoint(seriesName + ' ' + statName, stats[statName], bucket.key);
}
//
}
}
//
}
break
;
break
;
}
}
default
:
{
default
:
{
seriesName
+=
' '
+
metric
.
field
+
' '
+
metric
.
type
;
var
newSeries
=
{
datapoints
:
[],
metric
:
metric
.
type
+
' '
+
metric
.
field
};
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
value
=
bucket
[
metric
.
id
].
value
;
value
=
bucket
[
metric
.
id
].
value
;
addMetricPoint
(
seriesName
,
value
,
bucket
.
key
);
newSeries
.
datapoints
.
push
([
value
,
bucket
.
key
]);
}
series
.
push
(
newSeries
);
break
;
break
;
}
}
}
}
}
}
};
ElasticResponse
.
prototype
.
_getSeriesName
=
function
(
props
,
metric
,
alias
)
{
if
(
alias
)
{
return
alias
;
}
var
propKeys
=
_
.
keys
(
props
);
if
(
propKeys
.
length
===
0
)
{
return
metric
;
}
}
else
{
this
.
processBuckets
(
bucket
,
target
,
series
,
level
+
1
,
parentName
+
' '
+
bucket
.
key
);
var
name
=
''
;
for
(
var
propName
in
props
)
{
name
+=
props
[
propName
]
+
' '
;
}
if
(
propKeys
.
length
===
1
)
{
return
name
.
trim
();
}
return
name
.
trim
()
+
' '
+
metric
;
};
ElasticResponse
.
prototype
.
_collectSeriesFromTree
=
function
(
seriesTree
,
props
,
seriesList
,
alias
)
{
console
.
log
(
'props: '
,
props
);
for
(
var
i
=
0
;
i
<
seriesTree
.
length
;
i
++
)
{
var
series
=
seriesTree
[
i
];
if
(
series
.
datapoints
)
{
series
.
target
=
this
.
_getSeriesName
(
props
,
series
.
metric
,
alias
);
seriesList
.
push
(
series
);
}
else
{
props
=
_
.
clone
(
props
);
props
[
series
.
prop
.
field
]
=
series
.
prop
.
key
;
this
.
_collectSeriesFromTree
(
series
.
series
,
props
,
seriesList
);
}
}
}
}
};
};
...
@@ -85,15 +126,10 @@ function () {
...
@@ -85,15 +126,10 @@ function () {
var
aggregations
=
response
.
aggregations
;
var
aggregations
=
response
.
aggregations
;
var
target
=
this
.
targets
[
i
];
var
target
=
this
.
targets
[
i
];
var
querySeries
=
{}
;
var
seriesTree
=
[]
;
this
.
processBuckets
(
aggregations
,
target
,
querySeries
,
0
,
target
.
refId
);
this
.
processBuckets
(
aggregations
,
target
,
seriesTree
,
0
,
''
);
this
.
_collectSeriesFromTree
(
seriesTree
,
{},
series
,
''
);
for
(
var
prop
in
querySeries
)
{
if
(
querySeries
.
hasOwnProperty
(
prop
))
{
series
.
push
(
querySeries
[
prop
]);
}
}
}
}
return
{
data
:
series
};
return
{
data
:
series
};
...
...
public/test/specs/elasticsearch-querybuilder-specs.js
View file @
2aa695fb
...
@@ -35,7 +35,6 @@ define([
...
@@ -35,7 +35,6 @@ define([
expect
(
query
.
aggs
[
"2"
].
aggs
[
"3"
].
date_histogram
.
field
).
to
.
be
(
"@timestamp"
);
expect
(
query
.
aggs
[
"2"
].
aggs
[
"3"
].
date_histogram
.
field
).
to
.
be
(
"@timestamp"
);
});
});
it
(
'with select field'
,
function
()
{
it
(
'with select field'
,
function
()
{
var
query
=
builder
.
build
({
var
query
=
builder
.
build
({
metrics
:
[{
type
:
'avg'
,
field
:
'@value'
,
id
:
'1'
}],
metrics
:
[{
type
:
'avg'
,
field
:
'@value'
,
id
:
'1'
}],
...
...
public/test/specs/elasticsearch-response-specs.js
View file @
2aa695fb
...
@@ -40,6 +40,7 @@ define([
...
@@ -40,6 +40,7 @@ define([
it
(
'should return 1 series'
,
function
()
{
it
(
'should return 1 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
1
);
expect
(
result
.
data
.
length
).
to
.
be
(
1
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'count'
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
...
@@ -86,7 +87,7 @@ define([
...
@@ -86,7 +87,7 @@ define([
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
"
A value avg
"
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
"
avg value
"
);
expect
(
result
.
data
[
1
].
datapoints
[
0
][
0
]).
to
.
be
(
88
);
expect
(
result
.
data
[
1
].
datapoints
[
0
][
0
]).
to
.
be
(
88
);
expect
(
result
.
data
[
1
].
datapoints
[
1
][
0
]).
to
.
be
(
99
);
expect
(
result
.
data
[
1
].
datapoints
[
1
][
0
]).
to
.
be
(
99
);
});
});
...
@@ -139,12 +140,12 @@ define([
...
@@ -139,12 +140,12 @@ define([
it
(
'should return 2 series'
,
function
()
{
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
A server1 count
'
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
server1
'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
A server2 count
'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
server2
'
);
});
});
});
});
describe
(
'with percentiles '
,
function
()
{
describe
.
skip
(
'with percentiles '
,
function
()
{
var
result
;
var
result
;
beforeEach
(
function
()
{
beforeEach
(
function
()
{
...
@@ -180,15 +181,15 @@ define([
...
@@ -180,15 +181,15 @@ define([
it
(
'should return 2 series'
,
function
()
{
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
A
75'
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'75'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
A
90'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'90'
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
3.3
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
3.3
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
1
].
datapoints
[
1
][
0
]).
to
.
be
(
4.5
);
expect
(
result
.
data
[
1
].
datapoints
[
1
][
0
]).
to
.
be
(
4.5
);
});
});
});
});
describe
(
'with extended_stats '
,
function
()
{
describe
.
skip
(
'with extended_stats '
,
function
()
{
var
result
;
var
result
;
beforeEach
(
function
()
{
beforeEach
(
function
()
{
...
@@ -224,8 +225,8 @@ define([
...
@@ -224,8 +225,8 @@ define([
it
(
'should return 2 series'
,
function
()
{
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
A
max'
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'max'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
A
std_deviation_bounds_upper'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'std_deviation_bounds_upper'
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10.2
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10.2
);
expect
(
result
.
data
[
0
].
datapoints
[
1
][
0
]).
to
.
be
(
7.2
);
expect
(
result
.
data
[
0
].
datapoints
[
1
][
0
]).
to
.
be
(
7.2
);
...
@@ -235,5 +236,60 @@ define([
...
@@ -235,5 +236,60 @@ define([
});
});
});
});
describe
.
skip
(
'single group by with alias pattern'
,
function
()
{
var
result
;
beforeEach
(
function
()
{
targets
=
[{
refId
:
'A'
,
metrics
:
[{
type
:
'count'
,
id
:
'1'
}],
alias
:
'[[_@host]] $_metric and!'
,
bucketAggs
:
[
{
type
:
'terms'
,
field
:
'@host'
,
id
:
'2'
},
{
type
:
'date_histogram'
,
field
:
'@timestamp'
,
id
:
'3'
}
],
}];
response
=
{
responses
:
[{
aggregations
:
{
"2"
:
{
buckets
:
[
{
"3"
:
{
buckets
:
[
{
doc_count
:
1
,
key
:
1000
},
{
doc_count
:
3
,
key
:
2000
}
]
},
doc_count
:
4
,
key
:
'server1'
,
},
{
"3"
:
{
buckets
:
[
{
doc_count
:
2
,
key
:
1000
},
{
doc_count
:
8
,
key
:
2000
}
]
},
doc_count
:
10
,
key
:
'server2'
,
},
]
}
}
}]
};
result
=
new
ElasticResponse
(
targets
,
response
).
getTimeSeries
();
});
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'server1 count and!'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'server2 count and!'
);
});
});
});
});
});
});
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment