Commit 9ebd99d5 by Ivana Huckova Committed by GitHub

Explore: Create unique ids and deduplicate Loki logs (#21493)

parent 8616f1be
......@@ -205,6 +205,7 @@
"@grafana/slate-react": "0.22.9-grafana",
"@reduxjs/toolkit": "1.2.1",
"@torkelo/react-select": "2.4.1",
"@types/md5": "^2.1.33",
"@types/react-loadable": "5.5.2",
"@types/react-virtualized-auto-sizer": "1.0.0",
"angular": "1.6.9",
......@@ -230,6 +231,7 @@
"lodash": "4.17.15",
"lru-cache": "^5.1.1",
"marked": "0.6.2",
"md5": "^2.2.1",
"memoize-one": "5.1.1",
"moment": "2.24.0",
"mousetrap": "1.6.3",
......
......@@ -307,6 +307,11 @@ describe('dataFrameToLogsModel', () => {
level: 'dbug',
},
},
{
name: 'id',
type: FieldType.string,
values: ['0'],
},
],
}),
toDataFrame({
......@@ -327,6 +332,11 @@ describe('dataFrameToLogsModel', () => {
level: 'err',
},
},
{
name: 'id',
type: FieldType.string,
values: ['1', '2'],
},
],
}),
];
......@@ -383,6 +393,11 @@ describe('dataFrameToLogsModel', () => {
level: 'dbug',
},
},
{
name: 'id',
type: FieldType.string,
values: ['0'],
},
],
}),
toDataFrame({
......@@ -402,6 +417,11 @@ describe('dataFrameToLogsModel', () => {
level: 'dbug',
},
},
{
name: 'id',
type: FieldType.string,
values: ['1'],
},
],
}),
toDataFrame({
......@@ -422,6 +442,11 @@ describe('dataFrameToLogsModel', () => {
level: 'err',
},
},
{
name: 'id',
type: FieldType.string,
values: ['2', '3'],
},
],
}),
];
......@@ -477,4 +502,98 @@ describe('dataFrameToLogsModel', () => {
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
expect(logsModel.rows[0].uid).toBe('0');
});
it('given multiple series with equal ids should return expected logs model', () => {
const series: DataFrame[] = [
toDataFrame({
fields: [
{
name: 'ts',
type: FieldType.time,
values: ['1970-01-01T00:00:00Z'],
},
{
name: 'line',
type: FieldType.string,
values: ['WARN boooo 1'],
labels: {
foo: 'bar',
baz: '1',
level: 'dbug',
},
},
{
name: 'id',
type: FieldType.string,
values: ['0'],
},
],
}),
toDataFrame({
fields: [
{
name: 'ts',
type: FieldType.time,
values: ['1970-01-01T00:00:01Z'],
},
{
name: 'line',
type: FieldType.string,
values: ['WARN boooo 2'],
labels: {
foo: 'bar',
baz: '2',
level: 'dbug',
},
},
{
name: 'id',
type: FieldType.string,
values: ['1'],
},
],
}),
toDataFrame({
fields: [
{
name: 'ts',
type: FieldType.time,
values: ['1970-01-01T00:00:01Z'],
},
{
name: 'line',
type: FieldType.string,
values: ['WARN boooo 2'],
labels: {
foo: 'bar',
baz: '2',
level: 'dbug',
},
},
{
name: 'id',
type: FieldType.string,
values: ['1'],
},
],
}),
];
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
expect(logsModel.hasUniqueLabels).toBeTruthy();
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([
{
entry: 'WARN boooo 1',
labels: { foo: 'bar' },
logLevel: LogLevel.debug,
uniqueLabels: { baz: '1' },
},
{
entry: 'WARN boooo 2',
labels: { foo: 'bar' },
logLevel: LogLevel.debug,
uniqueLabels: { baz: '2' },
},
]);
});
});
......@@ -28,7 +28,7 @@ import {
} from '@grafana/data';
import { getThemeColor } from 'app/core/utils/colors';
import { hasAnsiCodes } from 'app/core/utils/text';
import { sortInAscendingOrder } from 'app/core/utils/explore';
import { sortInAscendingOrder, deduplicateLogRowsById } from 'app/core/utils/explore';
import { getGraphSeriesModel } from 'app/plugins/panel/graph2/getGraphSeriesModel';
export const LogLevelColor = {
......@@ -341,6 +341,8 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
}
}
const deduplicatedLogRows = deduplicateLogRowsById(rows);
// Meta data to display in status
const meta: LogsMetaItem[] = [];
if (_.size(commonLabels) > 0) {
......@@ -356,7 +358,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
if (limits.length > 0) {
meta.push({
label: 'Limit',
value: `${limits[0].meta.limit} (${rows.length} returned)`,
value: `${limits[0].meta.limit} (${deduplicatedLogRows.length} returned)`,
kind: LogsMetaKind.String,
});
}
......@@ -364,7 +366,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
return {
hasUniqueLabels,
meta,
rows,
rows: deduplicatedLogRows,
};
}
......
......@@ -534,3 +534,7 @@ export function getIntervals(range: TimeRange, lowLimit: string, resolution: num
return kbn.calculateInterval(range, resolution, lowLimit);
}
export function deduplicateLogRowsById(rows: LogRowModel[]) {
return _.uniqBy(rows, 'uid');
}
......@@ -105,7 +105,6 @@ export class ResultProcessor {
const newResults = dataFrameToLogsModel(this.dataFrames, this.intervalMs, this.timeZone);
const sortOrder = refreshIntervalToSortOrder(this.state.refreshInterval);
const sortedNewResults = sortLogsResult(newResults, sortOrder);
const rows = sortedNewResults.rows;
const series = sortedNewResults.series;
return { ...sortedNewResults, rows, series };
......
......@@ -50,7 +50,7 @@ describe('Live Stream Tests', () => {
const last = { ...view.get(view.length - 1) };
expect(last).toEqual({
ts: '2019-08-28T20:50:40.118944705Z',
id: '2019-08-28T20:50:40.118944705Z_{filename="/var/log/sntpc.log", job="varlogs"}',
id: '81d963f31c276ad2ea1af38b38436237',
line: 'Kittens',
labels: { filename: '/var/log/sntpc.log' },
});
......
......@@ -55,10 +55,10 @@ describe('loki result transformer', () => {
expect(data[0].fields[1].labels['foo']).toEqual('bar');
expect(data[0].fields[0].values.get(0)).toEqual(legacyStreamResult[0].entries[0].ts);
expect(data[0].fields[1].values.get(0)).toEqual(legacyStreamResult[0].entries[0].line);
expect(data[0].fields[2].values.get(0)).toEqual('1970-01-01T00:00:00Z_{foo="bar"}');
expect(data[0].fields[2].values.get(0)).toEqual('2764544e18dbc3fcbeee21a573e8cd1b');
expect(data[1].fields[0].values.get(0)).toEqual(legacyStreamResult[1].entries[0].ts);
expect(data[1].fields[1].values.get(0)).toEqual(legacyStreamResult[1].entries[0].line);
expect(data[1].fields[2].values.get(0)).toEqual('1970-01-01T00:00:00Z_{bar="foo"}');
expect(data[1].fields[2].values.get(0)).toEqual('55b7a68547c4c1c88827f13f3cb680ed');
});
});
......@@ -97,10 +97,10 @@ describe('loki result transformer', () => {
expect(data[0].fields[1].labels['foo']).toEqual('bar');
expect(data[0].fields[0].values.get(0)).toEqual(legacyStreamResult[0].entries[0].ts);
expect(data[0].fields[1].values.get(0)).toEqual(legacyStreamResult[0].entries[0].line);
expect(data[0].fields[2].values.get(0)).toEqual('1970-01-01T00:00:00Z_{foo="bar"}');
expect(data[0].fields[2].values.get(0)).toEqual('dc1e83aa5cd718b42a3cff50fa7e3a6a');
expect(data[1].fields[0].values.get(0)).toEqual(legacyStreamResult[1].entries[0].ts);
expect(data[1].fields[1].values.get(0)).toEqual(legacyStreamResult[1].entries[0].line);
expect(data[1].fields[2].values.get(0)).toEqual('1970-01-01T00:00:00Z_{bar="foo"}');
expect(data[1].fields[2].values.get(0)).toEqual('952fa23552daebbb5747c4e52fb9497d');
});
});
......@@ -139,7 +139,7 @@ describe('loki result transformer', () => {
ts: '1970-01-01T00:00:00Z',
line: "foo: 'bar'",
labels: { foo: 'bar' },
id: '1970-01-01T00:00:00Z_{foo="bar"}',
id: '2764544e18dbc3fcbeee21a573e8cd1b',
});
});
});
......
import _ from 'lodash';
import md5 from 'md5';
import {
parseLabels,
......@@ -45,6 +46,7 @@ export function legacyLogStreamToDataFrame(
if (!labels && stream.labels) {
labels = parseLabels(stream.labels);
}
const times = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]);
......@@ -53,7 +55,7 @@ export function legacyLogStreamToDataFrame(
const ts = entry.ts || entry.timestamp;
times.add(ts);
lines.add(entry.line);
uids.add(`${ts}_${stream.labels}`);
uids.add(createUid(ts, stream.labels, entry.line));
}
if (reverse) {
......@@ -80,17 +82,17 @@ export function lokiStreamResultToDataFrame(stream: LokiStreamResult, reverse?:
const uids = new ArrayVector<string>([]);
for (const [ts, line] of stream.values) {
const labelsString = Object.entries(labels)
.map(([key, val]) => `${key}="${val}"`)
.join('');
times.add(
dateTime(Number.parseFloat(ts) / 1e6)
.utc()
.format()
);
lines.add(line);
uids.add(
`${ts}_{${Object.entries(labels)
.map(([key, val]) => `${key}="${val}"`)
.join('')}}`
);
uids.add(createUid(ts, labelsString, line));
}
if (reverse) {
......@@ -145,7 +147,7 @@ export function appendLegacyResponseToBufferedData(response: LokiLegacyStreamRes
data.values.ts.add(ts);
data.values.line.add(entry.line);
data.values.labels.add(unique);
data.values.id.add(`${ts}_${stream.labels}`);
data.values.id.add(createUid(ts, stream.labels, entry.line));
}
}
}
......@@ -174,18 +176,22 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
// Add each line
for (const [ts, line] of stream.values) {
const uniqueLabelsString = Object.entries(unique)
.map(([key, val]) => `${key}="${val}"`)
.join('');
data.values.ts.add(parseInt(ts, 10) / 1e6);
data.values.line.add(line);
data.values.labels.add(unique);
data.values.id.add(
`${ts}_${Object.entries(unique)
.map(([key, val]) => `${key}=${val}`)
.join('')}`
);
data.values.id.add(createUid(ts, uniqueLabelsString, line));
}
}
}
function createUid(ts: string, labelsString: string, line: string): string {
return md5(`${ts}_${labelsString}_${line}`);
}
function lokiMatrixToTimeSeries(matrixResult: LokiMatrixResult, options: TransformerOptions): TimeSeries {
return {
target: createMetricLabel(matrixResult.metric, options),
......
......@@ -4298,6 +4298,13 @@
resolved "https://registry.yarnpkg.com/@types/marked/-/marked-0.6.5.tgz#3cf2a56ef615dad24aaf99784ef90a9eba4e29d8"
integrity sha512-6kBKf64aVfx93UJrcyEZ+OBM5nGv4RLsI6sR1Ar34bpgvGVRoyTgpxn4ZmtxOM5aDTAaaznYuYUH8bUX3Nk3YA==
"@types/md5@^2.1.33":
version "2.1.33"
resolved "https://registry.yarnpkg.com/@types/md5/-/md5-2.1.33.tgz#8c8dba30df4ad0e92296424f08c4898dd808e8df"
integrity sha512-8+X960EtKLoSblhauxLKy3zzotagjoj3Jt1Tx9oaxUdZEPIBl+mkrUz6PNKpzJgkrKSN9YgkWTA29c0KnLshmA==
dependencies:
"@types/node" "*"
"@types/mime@*":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-2.0.1.tgz#dc488842312a7f075149312905b5e3c0b054c79d"
......@@ -6902,6 +6909,11 @@ chardet@^0.7.0:
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
charenc@~0.0.1:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
integrity sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=
check-more-types@2.24.0:
version "2.24.0"
resolved "https://registry.yarnpkg.com/check-more-types/-/check-more-types-2.24.0.tgz#1420ffb10fd444dcfc79b43891bbfffd32a84600"
......@@ -7861,6 +7873,11 @@ cross-spawn@^5.0.1:
shebang-command "^1.2.0"
which "^1.2.9"
crypt@~0.0.1:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=
crypto-browserify@^3.11.0:
version "3.12.0"
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
......@@ -12121,7 +12138,7 @@ is-boolean-object@^1.0.0:
resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.0.0.tgz#98f8b28030684219a95f375cfbd88ce3405dff93"
integrity sha1-mPiygDBoQhmpXzdc+9iM40Bd/5M=
is-buffer@^1.0.2, is-buffer@^1.1.5:
is-buffer@^1.0.2, is-buffer@^1.1.5, is-buffer@~1.1.1:
version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
......@@ -14340,6 +14357,15 @@ md5.js@^1.3.4:
inherits "^2.0.1"
safe-buffer "^5.1.2"
md5@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/md5/-/md5-2.2.1.tgz#53ab38d5fe3c8891ba465329ea23fac0540126f9"
integrity sha1-U6s41f48iJG6RlMp6iP6wFQBJvk=
dependencies:
charenc "~0.0.1"
crypt "~0.0.1"
is-buffer "~1.1.1"
mdast-squeeze-paragraphs@^3.0.0:
version "3.0.5"
resolved "https://registry.yarnpkg.com/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-3.0.5.tgz#f428b6b944f8faef454db9b58f170c4183cb2e61"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment