Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
N
nexpie-grafana-theme
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Registry
Registry
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Kornkitt Poolsup
nexpie-grafana-theme
Commits
934d93ad
Unverified
Commit
934d93ad
authored
Feb 17, 2020
by
Andrej Ocenas
Committed by
GitHub
Feb 17, 2020
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Elastic: Map level field based on config. (#22182)
* Map level field based on config. * Fix type
parent
10fbabfb
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
193 additions
and
127 deletions
+193
-127
public/app/core/logs_model.ts
+1
-1
public/app/plugins/datasource/elasticsearch/elastic_response.ts
+115
-69
public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts
+77
-57
No files found.
public/app/core/logs_model.ts
View file @
934d93ad
...
...
@@ -312,7 +312,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
const
searchWords
=
series
.
meta
&&
series
.
meta
.
searchWords
?
series
.
meta
.
searchWords
:
[];
let
logLevel
=
LogLevel
.
unknown
;
if
(
logLevelField
)
{
if
(
logLevelField
&&
logLevelField
.
values
.
get
(
j
)
)
{
logLevel
=
getLogLevelFromKey
(
logLevelField
.
values
.
get
(
j
));
}
else
if
(
seriesLogLevel
)
{
logLevel
=
seriesLogLevel
;
...
...
public/app/plugins/datasource/elasticsearch/elastic_response.ts
View file @
934d93ad
...
...
@@ -416,7 +416,6 @@ export class ElasticResponse {
getLogs
(
logMessageField
?:
string
,
logLevelField
?:
string
):
DataQueryResponse
{
const
dataFrame
:
DataFrame
[]
=
[];
const
docs
:
any
[]
=
[];
for
(
let
n
=
0
;
n
<
this
.
response
.
responses
.
length
;
n
++
)
{
const
response
=
this
.
response
.
responses
[
n
];
...
...
@@ -424,78 +423,18 @@ export class ElasticResponse {
throw
this
.
getErrorFromElasticResponse
(
this
.
response
,
response
.
error
);
}
// We keep a list of all props so that we can create all the fields in the dataFrame, this can lead
// to wide sparse dataframes in case the scheme is different per document.
let
propNames
:
string
[]
=
[];
for
(
const
hit
of
response
.
hits
.
hits
)
{
const
flattened
=
hit
.
_source
?
flatten
(
hit
.
_source
,
null
)
:
{};
const
doc
=
{
_id
:
hit
.
_id
,
_type
:
hit
.
_type
,
_index
:
hit
.
_index
,
_source
:
{
...
flattened
},
...
flattened
,
};
for
(
const
propName
of
Object
.
keys
(
doc
))
{
if
(
propNames
.
indexOf
(
propName
)
===
-
1
)
{
propNames
.
push
(
propName
);
}
}
docs
.
push
(
doc
);
}
const
{
propNames
,
docs
}
=
flattenHits
(
response
.
hits
.
hits
);
if
(
docs
.
length
>
0
)
{
propNames
=
propNames
.
sort
();
const
series
=
new
MutableDataFrame
({
fields
:
[]
});
series
.
addField
({
name
:
this
.
targets
[
0
].
timeField
,
type
:
FieldType
.
time
,
});
if
(
logMessageField
)
{
series
.
addField
({
name
:
logMessageField
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
v
||
''
;
};
}
else
{
series
.
addField
({
name
:
'_source'
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
JSON
.
stringify
(
v
,
null
,
2
);
};
}
if
(
logLevelField
)
{
series
.
addField
({
name
:
'level'
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
v
||
''
;
};
}
for
(
const
propName
of
propNames
)
{
if
(
propName
===
this
.
targets
[
0
].
timeField
||
propName
===
'_source'
)
{
continue
;
}
series
.
addField
({
name
:
propName
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
v
||
''
;
};
}
const
series
=
createEmptyDataFrame
(
propNames
,
this
.
targets
[
0
].
timeField
,
logMessageField
,
logLevelField
);
// Add a row for each document
for
(
const
doc
of
docs
)
{
if
(
logLevelField
)
{
// Remap level field based on the datasource config. This field is then used in explore to figure out the
// log level. We may rewrite some actual data in the level field if they are different.
doc
[
'level'
]
=
doc
[
logLevelField
];
}
series
.
add
(
doc
);
}
...
...
@@ -522,3 +461,110 @@ export class ElasticResponse {
return
{
data
:
dataFrame
};
}
}
type
Doc
=
{
_id
:
string
;
_type
:
string
;
_index
:
string
;
_source
?:
any
;
};
/**
* Flatten the docs from response mainly the _source part which can be nested. This flattens it so that it is one level
* deep and the keys are: `level1Name.level2Name...`. Also returns list of all properties from all the docs (not all
* docs have to have the same keys).
* @param hits
*/
const
flattenHits
=
(
hits
:
Doc
[]):
{
docs
:
Array
<
Record
<
string
,
any
>>
;
propNames
:
string
[]
}
=>
{
const
docs
:
any
[]
=
[];
// We keep a list of all props so that we can create all the fields in the dataFrame, this can lead
// to wide sparse dataframes in case the scheme is different per document.
let
propNames
:
string
[]
=
[];
for
(
const
hit
of
hits
)
{
const
flattened
=
hit
.
_source
?
flatten
(
hit
.
_source
,
null
)
:
{};
const
doc
=
{
_id
:
hit
.
_id
,
_type
:
hit
.
_type
,
_index
:
hit
.
_index
,
_source
:
{
...
flattened
},
...
flattened
,
};
for
(
const
propName
of
Object
.
keys
(
doc
))
{
if
(
propNames
.
indexOf
(
propName
)
===
-
1
)
{
propNames
.
push
(
propName
);
}
}
docs
.
push
(
doc
);
}
propNames
.
sort
();
return
{
docs
,
propNames
};
};
/**
* Create empty dataframe but with created fields. Fields are based from propNames (should be from the response) and
* also from configuration specified fields for message, time, and level.
* @param propNames
* @param timeField
* @param logMessageField
* @param logLevelField
*/
const
createEmptyDataFrame
=
(
propNames
:
string
[],
timeField
:
string
,
logMessageField
?:
string
,
logLevelField
?:
string
):
MutableDataFrame
=>
{
const
series
=
new
MutableDataFrame
({
fields
:
[]
});
series
.
addField
({
name
:
timeField
,
type
:
FieldType
.
time
,
});
if
(
logMessageField
)
{
series
.
addField
({
name
:
logMessageField
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
v
||
''
;
};
}
else
{
series
.
addField
({
name
:
'_source'
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
JSON
.
stringify
(
v
,
null
,
2
);
};
}
if
(
logLevelField
)
{
series
.
addField
({
name
:
'level'
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
v
||
''
;
};
}
const
fieldNames
=
series
.
fields
.
map
(
field
=>
field
.
name
);
for
(
const
propName
of
propNames
)
{
// Do not duplicate fields. This can mean that we will shadow some fields.
if
(
fieldNames
.
includes
(
propName
))
{
continue
;
}
series
.
addField
({
name
:
propName
,
type
:
FieldType
.
string
,
}).
parse
=
(
v
:
any
)
=>
{
return
v
||
''
;
};
}
return
series
;
};
public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts
View file @
934d93ad
import
{
DataFrameView
,
KeyValue
,
MutableDataFrame
}
from
'@grafana/data'
;
import
{
DataFrameView
,
FieldCache
,
KeyValue
,
MutableDataFrame
}
from
'@grafana/data'
;
import
{
ElasticResponse
}
from
'../elastic_response'
;
import
flatten
from
'app/core/utils/flatten'
;
describe
(
'ElasticResponse'
,
()
=>
{
let
targets
;
...
...
@@ -827,71 +828,76 @@ describe('ElasticResponse', () => {
});
describe
(
'simple logs query and count'
,
()
=>
{
beforeEach
(()
=>
{
targets
=
[
const
targets
:
any
=
[
{
refId
:
'A'
,
metrics
:
[{
type
:
'count'
,
id
:
'1'
}],
bucketAggs
:
[{
type
:
'date_histogram'
,
settings
:
{
interval
:
'auto'
},
id
:
'2'
}],
context
:
'explore'
,
interval
:
'10s'
,
isLogsQuery
:
true
,
key
:
'Q-1561369883389-0.7611823271062786-0'
,
liveStreaming
:
false
,
maxDataPoints
:
1620
,
query
:
''
,
timeField
:
'@timestamp'
,
},
];
const
response
=
{
responses
:
[
{
refId
:
'A'
,
metrics
:
[{
type
:
'count'
,
id
:
'1'
}],
bucketAggs
:
[{
type
:
'date_histogram'
,
settings
:
{
interval
:
'auto'
},
id
:
'2'
}],
context
:
'explore'
,
interval
:
'10s'
,
isLogsQuery
:
true
,
key
:
'Q-1561369883389-0.7611823271062786-0'
,
liveStreaming
:
false
,
maxDataPoints
:
1620
,
query
:
''
,
timeField
:
'@timestamp'
,
},
];
response
=
{
responses
:
[
{
aggregations
:
{
'2'
:
{
buckets
:
[
{
doc_count
:
10
,
key
:
1000
,
},
{
doc_count
:
15
,
key
:
2000
,
},
],
},
},
hits
:
{
hits
:
[
aggregations
:
{
'2'
:
{
buckets
:
[
{
_id
:
'fdsfs'
,
_type
:
'_doc'
,
_index
:
'mock-index'
,
_source
:
{
'@timestamp'
:
'2019-06-24T09:51:19.765Z'
,
host
:
'djisaodjsoad'
,
message
:
'hello, i am a message'
,
},
doc_count
:
10
,
key
:
1000
,
},
{
_id
:
'kdospaidopa'
,
_type
:
'_doc'
,
_index
:
'mock-index'
,
_source
:
{
'@timestamp'
:
'2019-06-24T09:52:19.765Z'
,
host
:
'dsalkdakdop'
,
message
:
'hello, i am also message'
,
},
doc_count
:
15
,
key
:
2000
,
},
],
},
},
],
};
result
=
new
ElasticResponse
(
targets
,
response
).
getLogs
();
});
hits
:
{
hits
:
[
{
_id
:
'fdsfs'
,
_type
:
'_doc'
,
_index
:
'mock-index'
,
_source
:
{
'@timestamp'
:
'2019-06-24T09:51:19.765Z'
,
host
:
'djisaodjsoad'
,
message
:
'hello, i am a message'
,
level
:
'debug'
,
fields
:
{
lvl
:
'debug'
,
},
},
},
{
_id
:
'kdospaidopa'
,
_type
:
'_doc'
,
_index
:
'mock-index'
,
_source
:
{
'@timestamp'
:
'2019-06-24T09:52:19.765Z'
,
host
:
'dsalkdakdop'
,
message
:
'hello, i am also message'
,
level
:
'error'
,
fields
:
{
lvl
:
'info'
,
},
},
},
],
},
},
],
};
it
(
'should return histogram aggregation and documents'
,
()
=>
{
const
result
=
new
ElasticResponse
(
targets
,
response
).
getLogs
();
expect
(
result
.
data
.
length
).
toBe
(
2
);
const
logResults
=
result
.
data
[
0
]
as
MutableDataFrame
;
const
fields
=
logResults
.
fields
.
map
(
f
=>
{
...
...
@@ -911,7 +917,7 @@ describe('ElasticResponse', () => {
expect
(
r
.
_id
).
toEqual
(
response
.
responses
[
0
].
hits
.
hits
[
i
].
_id
);
expect
(
r
.
_type
).
toEqual
(
response
.
responses
[
0
].
hits
.
hits
[
i
].
_type
);
expect
(
r
.
_index
).
toEqual
(
response
.
responses
[
0
].
hits
.
hits
[
i
].
_index
);
expect
(
r
.
_source
).
toEqual
(
response
.
responses
[
0
].
hits
.
hits
[
i
].
_source
);
expect
(
r
.
_source
).
toEqual
(
flatten
(
response
.
responses
[
0
].
hits
.
hits
[
i
].
_source
,
null
)
);
}
// Make a map from the histogram results
...
...
@@ -927,5 +933,19 @@ describe('ElasticResponse', () => {
expect
(
hist
[
bucket
.
key
]).
toEqual
(
bucket
.
doc_count
);
});
});
it
(
'should map levels field'
,
()
=>
{
const
result
=
new
ElasticResponse
(
targets
,
response
).
getLogs
(
undefined
,
'level'
);
const
fieldCache
=
new
FieldCache
(
result
.
data
[
0
]);
const
field
=
fieldCache
.
getFieldByName
(
'level'
);
expect
(
field
.
values
.
toArray
()).
toEqual
([
'debug'
,
'error'
]);
});
it
(
'should re map levels field to new field'
,
()
=>
{
const
result
=
new
ElasticResponse
(
targets
,
response
).
getLogs
(
undefined
,
'fields.lvl'
);
const
fieldCache
=
new
FieldCache
(
result
.
data
[
0
]);
const
field
=
fieldCache
.
getFieldByName
(
'level'
);
expect
(
field
.
values
.
toArray
()).
toEqual
([
'debug'
,
'info'
]);
});
});
});
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment