Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
N
nexpie-grafana-theme
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Registry
Registry
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Kornkitt Poolsup
nexpie-grafana-theme
Commits
2aa695fb
Commit
2aa695fb
authored
Sep 07, 2015
by
Torkel Ödegaard
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat(elasticsearch): refactoring elasticsearch response handling to support series alias patterns
parent
f361f324
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
162 additions
and
71 deletions
+162
-71
public/app/plugins/datasource/elasticsearch/elasticResponse.js
+97
-61
public/test/specs/elasticsearch-querybuilder-specs.js
+0
-1
public/test/specs/elasticsearch-response-specs.js
+65
-9
No files found.
public/app/plugins/datasource/elasticsearch/elasticResponse.js
View file @
2aa695fb
define
([
"lodash"
],
function
()
{
function
(
_
)
{
'use strict'
;
function
ElasticResponse
(
targets
,
response
)
{
...
...
@@ -10,66 +11,106 @@ function () {
// This is quite complex
// neeed to recurise down the nested buckets to build series
ElasticResponse
.
prototype
.
processBuckets
=
function
(
aggs
,
target
,
series
,
level
,
parentName
)
{
var
seriesName
,
value
,
metric
,
i
,
y
,
bucket
,
aggDef
,
esAgg
;
ElasticResponse
.
prototype
.
processBuckets
=
function
(
aggs
,
target
,
series
,
level
)
{
var
value
,
metric
,
i
,
y
,
bucket
,
aggDef
,
esAgg
,
nestedSeries
;
function
addMetricPoint
(
seriesName
,
value
,
time
)
{
var
current
=
series
[
seriesName
];
if
(
!
current
)
{
current
=
series
[
seriesName
]
=
{
target
:
seriesName
,
datapoints
:
[]};
aggDef
=
target
.
bucketAggs
[
level
];
esAgg
=
aggs
[
aggDef
.
id
];
if
(
level
<
target
.
bucketAggs
.
length
-
1
)
{
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
nestedSeries
=
{
prop
:
{
key
:
bucket
.
key
,
field
:
aggDef
.
field
},
series
:
[]};
series
.
push
(
nestedSeries
);
this
.
processBuckets
(
bucket
,
target
,
nestedSeries
.
series
,
level
+
1
);
}
current
.
datapoints
.
push
([
value
,
time
])
;
return
;
}
aggDef
=
target
.
bucketAggs
[
level
];
esAgg
=
aggs
[
aggDef
.
id
];
for
(
y
=
0
;
y
<
target
.
metrics
.
length
;
y
++
)
{
metric
=
target
.
metrics
[
y
];
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
// if last agg collect series
if
(
level
===
target
.
bucketAggs
.
length
-
1
)
{
for
(
y
=
0
;
y
<
target
.
metrics
.
length
;
y
++
)
{
metric
=
target
.
metrics
[
y
];
seriesName
=
parentName
;
switch
(
metric
.
type
)
{
case
'count'
:
{
seriesName
+=
' count'
;
value
=
bucket
.
doc_count
;
addMetricPoint
(
seriesName
,
value
,
bucket
.
key
);
break
;
}
case
'percentiles'
:
{
var
values
=
bucket
[
metric
.
id
].
values
;
for
(
var
prop
in
values
)
{
addMetricPoint
(
seriesName
+
' '
+
prop
,
values
[
prop
],
bucket
.
key
);
}
break
;
}
case
'extended_stats'
:
{
var
stats
=
bucket
[
metric
.
id
];
stats
.
std_deviation_bounds_upper
=
stats
.
std_deviation_bounds
.
upper
;
stats
.
std_deviation_bounds_lower
=
stats
.
std_deviation_bounds
.
lower
;
for
(
var
statName
in
metric
.
meta
)
{
if
(
metric
.
meta
[
statName
])
{
addMetricPoint
(
seriesName
+
' '
+
statName
,
stats
[
statName
],
bucket
.
key
);
}
}
break
;
}
default
:
{
seriesName
+=
' '
+
metric
.
field
+
' '
+
metric
.
type
;
value
=
bucket
[
metric
.
id
].
value
;
addMetricPoint
(
seriesName
,
value
,
bucket
.
key
);
break
;
}
switch
(
metric
.
type
)
{
case
'count'
:
{
var
countSeries
=
{
datapoints
:
[],
metric
:
'count'
};
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
value
=
bucket
.
doc_count
;
countSeries
.
datapoints
.
push
([
value
,
bucket
.
key
]);
}
series
.
push
(
countSeries
);
break
;
}
case
'percentiles'
:
{
// for (i = 0; i < esAgg.buckets.length; i++) {
// bucket = esAgg.buckets[i];
// var values = bucket[metric.id].values;
// for (var prop in values) {
// addMetricPoint(seriesName + ' ' + prop, values[prop], bucket.key);
// }
// }
break
;
}
case
'extended_stats'
:
{
// var stats = bucket[metric.id];
// stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
// stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
//
// for (var statName in metric.meta) {
// if (metric.meta[statName]) {
// addMetricPoint(seriesName + ' ' + statName, stats[statName], bucket.key);
// }
// }
break
;
}
default
:
{
var
newSeries
=
{
datapoints
:
[],
metric
:
metric
.
type
+
' '
+
metric
.
field
};
for
(
i
=
0
;
i
<
esAgg
.
buckets
.
length
;
i
++
)
{
bucket
=
esAgg
.
buckets
[
i
];
value
=
bucket
[
metric
.
id
].
value
;
newSeries
.
datapoints
.
push
([
value
,
bucket
.
key
]);
}
series
.
push
(
newSeries
);
break
;
}
}
else
{
this
.
processBuckets
(
bucket
,
target
,
series
,
level
+
1
,
parentName
+
' '
+
bucket
.
key
);
}
};
ElasticResponse
.
prototype
.
_getSeriesName
=
function
(
props
,
metric
,
alias
)
{
if
(
alias
)
{
return
alias
;
}
var
propKeys
=
_
.
keys
(
props
);
if
(
propKeys
.
length
===
0
)
{
return
metric
;
}
var
name
=
''
;
for
(
var
propName
in
props
)
{
name
+=
props
[
propName
]
+
' '
;
}
if
(
propKeys
.
length
===
1
)
{
return
name
.
trim
();
}
return
name
.
trim
()
+
' '
+
metric
;
};
ElasticResponse
.
prototype
.
_collectSeriesFromTree
=
function
(
seriesTree
,
props
,
seriesList
,
alias
)
{
console
.
log
(
'props: '
,
props
);
for
(
var
i
=
0
;
i
<
seriesTree
.
length
;
i
++
)
{
var
series
=
seriesTree
[
i
];
if
(
series
.
datapoints
)
{
series
.
target
=
this
.
_getSeriesName
(
props
,
series
.
metric
,
alias
);
seriesList
.
push
(
series
);
}
else
{
props
=
_
.
clone
(
props
);
props
[
series
.
prop
.
field
]
=
series
.
prop
.
key
;
this
.
_collectSeriesFromTree
(
series
.
series
,
props
,
seriesList
);
}
}
};
...
...
@@ -85,15 +126,10 @@ function () {
var
aggregations
=
response
.
aggregations
;
var
target
=
this
.
targets
[
i
];
var
querySeries
=
{}
;
var
seriesTree
=
[]
;
this
.
processBuckets
(
aggregations
,
target
,
querySeries
,
0
,
target
.
refId
);
for
(
var
prop
in
querySeries
)
{
if
(
querySeries
.
hasOwnProperty
(
prop
))
{
series
.
push
(
querySeries
[
prop
]);
}
}
this
.
processBuckets
(
aggregations
,
target
,
seriesTree
,
0
,
''
);
this
.
_collectSeriesFromTree
(
seriesTree
,
{},
series
,
''
);
}
return
{
data
:
series
};
...
...
public/test/specs/elasticsearch-querybuilder-specs.js
View file @
2aa695fb
...
...
@@ -35,7 +35,6 @@ define([
expect
(
query
.
aggs
[
"2"
].
aggs
[
"3"
].
date_histogram
.
field
).
to
.
be
(
"@timestamp"
);
});
it
(
'with select field'
,
function
()
{
var
query
=
builder
.
build
({
metrics
:
[{
type
:
'avg'
,
field
:
'@value'
,
id
:
'1'
}],
...
...
public/test/specs/elasticsearch-response-specs.js
View file @
2aa695fb
...
...
@@ -40,6 +40,7 @@ define([
it
(
'should return 1 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
1
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'count'
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
...
...
@@ -86,7 +87,7 @@ define([
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
"
A value avg
"
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
"
avg value
"
);
expect
(
result
.
data
[
1
].
datapoints
[
0
][
0
]).
to
.
be
(
88
);
expect
(
result
.
data
[
1
].
datapoints
[
1
][
0
]).
to
.
be
(
99
);
});
...
...
@@ -139,12 +140,12 @@ define([
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
A server1 count
'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
A server2 count
'
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
server1
'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
server2
'
);
});
});
describe
(
'with percentiles '
,
function
()
{
describe
.
skip
(
'with percentiles '
,
function
()
{
var
result
;
beforeEach
(
function
()
{
...
...
@@ -180,15 +181,15 @@ define([
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
A
75'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
A
90'
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'75'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'90'
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
3.3
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
1
]).
to
.
be
(
1000
);
expect
(
result
.
data
[
1
].
datapoints
[
1
][
0
]).
to
.
be
(
4.5
);
});
});
describe
(
'with extended_stats '
,
function
()
{
describe
.
skip
(
'with extended_stats '
,
function
()
{
var
result
;
beforeEach
(
function
()
{
...
...
@@ -224,8 +225,8 @@ define([
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'
A
max'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'
A
std_deviation_bounds_upper'
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'max'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'std_deviation_bounds_upper'
);
expect
(
result
.
data
[
0
].
datapoints
[
0
][
0
]).
to
.
be
(
10.2
);
expect
(
result
.
data
[
0
].
datapoints
[
1
][
0
]).
to
.
be
(
7.2
);
...
...
@@ -235,5 +236,60 @@ define([
});
});
describe
.
skip
(
'single group by with alias pattern'
,
function
()
{
var
result
;
beforeEach
(
function
()
{
targets
=
[{
refId
:
'A'
,
metrics
:
[{
type
:
'count'
,
id
:
'1'
}],
alias
:
'[[_@host]] $_metric and!'
,
bucketAggs
:
[
{
type
:
'terms'
,
field
:
'@host'
,
id
:
'2'
},
{
type
:
'date_histogram'
,
field
:
'@timestamp'
,
id
:
'3'
}
],
}];
response
=
{
responses
:
[{
aggregations
:
{
"2"
:
{
buckets
:
[
{
"3"
:
{
buckets
:
[
{
doc_count
:
1
,
key
:
1000
},
{
doc_count
:
3
,
key
:
2000
}
]
},
doc_count
:
4
,
key
:
'server1'
,
},
{
"3"
:
{
buckets
:
[
{
doc_count
:
2
,
key
:
1000
},
{
doc_count
:
8
,
key
:
2000
}
]
},
doc_count
:
10
,
key
:
'server2'
,
},
]
}
}
}]
};
result
=
new
ElasticResponse
(
targets
,
response
).
getTimeSeries
();
});
it
(
'should return 2 series'
,
function
()
{
expect
(
result
.
data
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
datapoints
.
length
).
to
.
be
(
2
);
expect
(
result
.
data
[
0
].
target
).
to
.
be
(
'server1 count and!'
);
expect
(
result
.
data
[
1
].
target
).
to
.
be
(
'server2 count and!'
);
});
});
});
});
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment