Commit 0fda3c4f by Andrej Ocenas Committed by GitHub

Explore: Fix context view in logs, where some rows may have been filtered out. (#21729)

* Fix timestamp formats and use uid to filter context rows

* Remove timestamps from tests
parent ed140346
......@@ -56,7 +56,6 @@ export interface LogRowModel {
logLevel: LogLevel;
raw: string;
searchWords?: string[];
timestamp: string; // ISO with nanosec precision
timeFromNow: string;
timeEpochMs: number;
timeLocal: string;
......
......@@ -20,7 +20,6 @@ const setup = (propOverrides?: Partial<Props>, rowOverrides?: Partial<LogRowMode
hasAnsi: false,
entry: '',
raw: '',
timestamp: '',
uid: '0',
labels: {},
...(rowOverrides || {}),
......
......@@ -3,12 +3,13 @@ import { getRowContexts } from './LogRowContextProvider';
describe('getRowContexts', () => {
describe('when called with a DataFrame and results are returned', () => {
it('then the result should be in correct format', async () => {
it('then the result should be in correct format and filtered', async () => {
const firstResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [3, 2, 1] },
{ name: 'line', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
{ name: 'id', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
],
});
const secondResult = new MutableDataFrame({
......@@ -16,25 +17,38 @@ describe('getRowContexts', () => {
fields: [
{ name: 'ts', type: FieldType.time, values: [6, 5, 4] },
{ name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
{ name: 'id', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
],
});
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) {
called = true;
return Promise.resolve({ data: [firstResult] });
}
return Promise.resolve({ data: [secondResult] });
};
const result = await getRowContexts(getRowContextMock, row, 10);
expect(result).toEqual({ data: [[['3', '2']], [['6', '5', '4']]], errors: ['', ''] });
});
it('then the result should be in correct format and filtered without uid', async () => {
const firstResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [3, 2, 1] },
{ name: 'line', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
],
});
const secondResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [6, 5, 4] },
{ name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
],
});
let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) {
......@@ -46,7 +60,7 @@ describe('getRowContexts', () => {
const result = await getRowContexts(getRowContextMock, row, 10);
expect(result).toEqual({ data: [[['3', '2', '1']], [['6', '5', '4']]], errors: ['', ''] });
expect(result).toEqual({ data: [[['3', '2', '1']], [['6', '5']]], errors: ['', ''] });
});
});
......@@ -54,23 +68,6 @@ describe('getRowContexts', () => {
it('then the result should be in correct format', async () => {
const firstError = new Error('Error 1');
const secondError = new Error('Error 2');
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
};
let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) {
......@@ -86,3 +83,19 @@ describe('getRowContexts', () => {
});
});
});
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
uid: '1',
};
import { LogRowModel, toDataFrame, Field } from '@grafana/data';
import { useState, useEffect } from 'react';
import { LogRowModel, toDataFrame, Field, FieldCache } from '@grafana/data';
import React, { useState, useEffect } from 'react';
import flatten from 'lodash/flatten';
import useAsync from 'react-use/lib/useAsync';
......@@ -45,7 +45,8 @@ export const getRowContexts = async (
limit,
}),
getRowContext(row, {
limit: limit + 1, // Lets add one more to the limit as we're filtering out one row see comment below
// The start time is inclusive so we will get the one row we are using as context entry
limit: limit + 1,
direction: 'FORWARD',
}),
];
......@@ -62,16 +63,33 @@ export const getRowContexts = async (
const data: any[] = [];
for (let index = 0; index < dataResult.data.length; index++) {
const dataFrame = toDataFrame(dataResult.data[index]);
const timestampField: Field<string> = dataFrame.fields.filter(field => field.name === 'ts')[0];
const fieldCache = new FieldCache(dataFrame);
const timestampField: Field<string> = fieldCache.getFieldByName('ts')!;
const idField: Field<string> | undefined = fieldCache.getFieldByName('id');
for (let fieldIndex = 0; fieldIndex < timestampField.values.length; fieldIndex++) {
const timestamp = timestampField.values.get(fieldIndex);
// We need to filter out the row we're basing our search from because of how start/end params work in Loki API
// see https://github.com/grafana/loki/issues/597#issuecomment-506408980
// the alternative to create our own add 1 nanosecond method to the a timestamp string would be quite complex
if (timestamp === row.timestamp) {
continue;
// TODO: this filtering is datasource dependant so it will make sense to move it there so the API is
// to return correct list of lines handling inclusive ranges or how to filter the correct line on the
// datasource.
// Filter out the row that is the one used as a focal point for the context as we will get it in one of the
// requests.
if (idField) {
// For Loki this means we filter only the one row. Issue is we could have other rows logged at the same
// ns which came before but they come in the response that search for logs after. This means right now
// we will show those as if they came after. This is not strictly correct but seems better than loosing them
// and making this correct would mean quite a bit of complexity to shuffle things around and messing up
//counts.
if (idField.values.get(fieldIndex) === row.uid) {
continue;
}
} else {
// Fallback to timestamp. This should not happen right now as this feature is implemented only for loki
// and that has ID. Later this branch could be used in other DS but mind that this could also filter out
// logs which were logged in the same timestamp and that can be a problem depending on the precision.
if (parseInt(timestampField.values.get(fieldIndex), 10) === row.timeEpochMs) {
continue;
}
}
const lineField: Field<string> = dataFrame.fields.filter(field => field.name === 'line')[0];
......
......@@ -109,7 +109,6 @@ const makeLog = (overrides: Partial<LogRowModel>): LogRowModel => {
hasAnsi: false,
labels: {},
raw: entry,
timestamp: '',
timeFromNow: '',
timeEpochMs: 1,
timeLocal: '',
......
......@@ -223,7 +223,6 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([
{
timestamp: '2019-04-26T09:28:11.352440161Z',
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'info',
......@@ -231,7 +230,6 @@ describe('dataFrameToLogsModel', () => {
uid: 'foo',
},
{
timestamp: '2019-04-26T14:42:50.991981292Z',
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'error',
......
......@@ -328,14 +328,13 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
timeFromNow: time.fromNow(),
timeEpochMs: time.valueOf(),
timeLocal: time.format(logTimeFormat),
timeUtc: toUtc(ts).format(logTimeFormat),
timeUtc: toUtc(time.valueOf()).format(logTimeFormat),
uniqueLabels,
hasAnsi,
searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message,
labels: stringField.labels,
timestamp: ts,
uid: idField ? idField.values.get(j) : j.toString(),
});
}
......
......@@ -15,7 +15,15 @@ import {
} from './explore';
import { ExploreUrlState, ExploreMode } from 'app/types/explore';
import store from 'app/core/store';
import { DataQueryError, LogsDedupStrategy, LogsModel, LogLevel, dateTime, MutableDataFrame } from '@grafana/data';
import {
DataQueryError,
LogsDedupStrategy,
LogsModel,
LogLevel,
dateTime,
MutableDataFrame,
LogRowModel,
} from '@grafana/data';
import { RefreshPicker } from '@grafana/ui';
const DEFAULT_EXPLORE_STATE: ExploreUrlState = {
......@@ -372,11 +380,10 @@ describe('refreshIntervalToSortOrder', () => {
});
describe('sortLogsResult', () => {
const firstRow = {
const firstRow: LogRowModel = {
rowIndex: 0,
entryFieldIndex: 0,
dataFrame: new MutableDataFrame(),
timestamp: '2019-01-01T21:00:0.0000000Z',
entry: '',
hasAnsi: false,
labels: {},
......@@ -389,17 +396,16 @@ describe('sortLogsResult', () => {
uid: '1',
};
const sameAsFirstRow = firstRow;
const secondRow = {
const secondRow: LogRowModel = {
rowIndex: 1,
entryFieldIndex: 0,
dataFrame: new MutableDataFrame(),
timestamp: '2019-01-01T22:00:0.0000000Z',
entry: '',
hasAnsi: false,
labels: {},
logLevel: LogLevel.info,
raw: '',
timeEpochMs: 0,
timeEpochMs: 10,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
......
......@@ -474,11 +474,11 @@ export const getRefIds = (value: any): string[] => {
};
export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp < b.timestamp) {
if (a.timeEpochMs < b.timeEpochMs) {
return -1;
}
if (a.timestamp > b.timestamp) {
if (a.timeEpochMs > b.timeEpochMs) {
return 1;
}
......@@ -486,11 +486,11 @@ export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
};
const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp > b.timestamp) {
if (a.timeEpochMs > b.timeEpochMs) {
return -1;
}
if (a.timestamp < b.timestamp) {
if (a.timeEpochMs < b.timeEpochMs) {
return 1;
}
......
......@@ -70,7 +70,6 @@ const makeLog = (overides: Partial<LogRowModel>): LogRowModel => {
hasAnsi: false,
labels: {},
raw: entry,
timestamp: '',
timeFromNow: '',
timeEpochMs: 1,
timeLocal: '',
......
......@@ -187,7 +187,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 300,
uid: '2',
uniqueLabels: {},
},
......@@ -205,7 +204,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 200,
uid: '1',
uniqueLabels: {},
},
......@@ -223,7 +221,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 100,
uid: '0',
uniqueLabels: {},
},
......
import LokiDatasource, { RangeQueryOptions } from './datasource';
import { LokiQuery, LokiResultType, LokiResponse, LokiLegacyStreamResponse } from './types';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
import { AnnotationQueryRequest, DataSourceApi, DataFrame, dateTime, TimeRange } from '@grafana/data';
import { AnnotationQueryRequest, DataSourceApi, DataFrame, dateTime, TimeRange, FieldCache } from '@grafana/data';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { CustomVariable } from 'app/features/templating/custom_variable';
import { makeMockLokiDatasource } from './mocks';
......@@ -211,7 +211,8 @@ describe('LokiDatasource', () => {
const res = await ds.query(options).toPromise();
const dataFrame = res.data[0] as DataFrame;
expect(dataFrame.fields[1].values.get(0)).toBe('hello');
const fieldCache = new FieldCache(dataFrame);
expect(fieldCache.getFieldByName('line').values.get(0)).toBe('hello');
expect(dataFrame.meta.limit).toBe(20);
expect(dataFrame.meta.searchWords).toEqual(['foo']);
});
......
......@@ -4,7 +4,7 @@ import { Observable, from, merge, of, iif, defer } from 'rxjs';
import { map, filter, catchError, switchMap, mergeMap } from 'rxjs/operators';
// Services & Utils
import { dateMath } from '@grafana/data';
import { DataFrame, dateMath, FieldCache } from '@grafana/data';
import { addLabelToSelector, keepSelectorFilters } from 'app/plugins/datasource/prometheus/add_label_to_query';
import { DatasourceRequestOptions } from 'app/core/services/backend_srv';
import { getBackendSrv } from '@grafana/runtime';
......@@ -466,7 +466,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return Math.ceil(date.valueOf() * 1e6);
}
getLogRowContext = (row: LogRowModel, options?: LokiContextQueryOptions) => {
getLogRowContext = (row: LogRowModel, options?: LokiContextQueryOptions): Promise<{ data: DataFrame[] }> => {
const target = this.prepareLogRowContextQueryTarget(
row,
(options && options.limit) || 10,
......@@ -518,8 +518,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
.map(label => `${label}="${row.labels[label]}"`)
.join(',');
const contextTimeBuffer = 2 * 60 * 60 * 1000 * 1e6; // 2h buffer
const timeEpochNs = row.timeEpochMs * 1e6;
const contextTimeBuffer = 2 * 60 * 60 * 1000; // 2h buffer
const commonTargetOptions = {
limit,
query: `{${query}}`,
......@@ -527,18 +526,27 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
direction,
};
const fieldCache = new FieldCache(row.dataFrame);
const nsField = fieldCache.getFieldByName('tsNs')!;
const nsTimestamp = nsField.values.get(row.rowIndex);
if (direction === 'BACKWARD') {
return {
...commonTargetOptions,
start: timeEpochNs - contextTimeBuffer,
end: timeEpochNs, // using RFC3339Nano format to avoid precision loss
// convert to ns, we loose some precision here but it is not that important at the far points of the context
start: row.timeEpochMs - contextTimeBuffer + '000000',
end: nsTimestamp,
direction,
};
} else {
return {
...commonTargetOptions,
start: timeEpochNs, // start param in Loki API is inclusive so we'll have to filter out the row that this request is based from
end: timeEpochNs + contextTimeBuffer,
// start param in Loki API is inclusive so we'll have to filter out the row that this request is based from
// and any other that were logged in the same ns but before the row. Right now these rows will be lost
// because the are before but came it he response that should return only rows after.
start: nsTimestamp,
// convert to ns, we loose some precision here but it is not that important at the far points of the context
end: row.timeEpochMs + contextTimeBuffer + '000000',
};
}
};
......
......@@ -67,6 +67,7 @@ export class LiveStreams {
const data = new CircularDataFrame({ capacity: target.size });
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
data.addField({ name: 'tsNs', type: FieldType.time, config: { title: 'Time ns' } });
data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query);
data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line
data.addField({ name: 'id', type: FieldType.string });
......
......@@ -28,13 +28,13 @@ const streamResult: LokiStreamResult[] = [
stream: {
foo: 'bar',
},
values: [['1970-01-01T00:00:00Z', "foo: 'bar'"]],
values: [['1579857562021616000', "foo: 'bar'"]],
},
{
stream: {
bar: 'foo',
},
values: [['1970-01-01T00:00:00Z', "bar: 'foo'"]],
values: [['1579857562031616000', "bar: 'foo'"]],
},
];
......@@ -95,12 +95,12 @@ describe('loki result transformer', () => {
expect(data.length).toBe(2);
expect(data[0].fields[1].labels['foo']).toEqual('bar');
expect(data[0].fields[0].values.get(0)).toEqual(legacyStreamResult[0].entries[0].ts);
expect(data[0].fields[1].values.get(0)).toEqual(legacyStreamResult[0].entries[0].line);
expect(data[0].fields[2].values.get(0)).toEqual('dc1e83aa5cd718b42a3cff50fa7e3a6a');
expect(data[1].fields[0].values.get(0)).toEqual(legacyStreamResult[1].entries[0].ts);
expect(data[1].fields[1].values.get(0)).toEqual(legacyStreamResult[1].entries[0].line);
expect(data[1].fields[2].values.get(0)).toEqual('952fa23552daebbb5747c4e52fb9497d');
expect(data[0].fields[0].values.get(0)).toEqual('2020-01-24T09:19:22.021Z');
expect(data[0].fields[1].values.get(0)).toEqual(streamResult[0].values[0][1]);
expect(data[0].fields[2].values.get(0)).toEqual('2b431b8a98b80b3b2c2f4cd2444ae6cb');
expect(data[1].fields[0].values.get(0)).toEqual('2020-01-24T09:19:22.031Z');
expect(data[1].fields[1].values.get(0)).toEqual(streamResult[1].values[0][1]);
expect(data[1].fields[2].values.get(0)).toEqual('75d73d66cff40f9d1a1f2d5a0bf295d0');
});
});
......
......@@ -10,9 +10,9 @@ import {
ArrayVector,
MutableDataFrame,
findUniqueLabels,
dateTime,
FieldConfig,
DataFrameView,
dateTime,
} from '@grafana/data';
import templateSrv from 'app/features/templating/template_srv';
import TableModel from 'app/core/table_model';
......@@ -35,7 +35,7 @@ import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
import { of } from 'rxjs';
/**
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries.
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries and older version of Loki.
*/
export function legacyLogStreamToDataFrame(
stream: LokiLegacyStreamResult,
......@@ -48,67 +48,80 @@ export function legacyLogStreamToDataFrame(
}
const times = new ArrayVector<string>([]);
const timesNs = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]);
for (const entry of stream.entries) {
const ts = entry.ts || entry.timestamp;
// iso string with nano precision, will be truncated but is parse-able
times.add(ts);
// So this matches new format, we are loosing precision here, which sucks but no easy way to keep it and this
// is for old pre 1.0.0 version Loki so probably does not affect that much.
timesNs.add(dateTime(ts).valueOf() + '000000');
lines.add(entry.line);
uids.add(createUid(ts, stream.labels, entry.line));
}
if (reverse) {
times.buffer = times.buffer.reverse();
lines.buffer = lines.buffer.reverse();
}
return {
refId,
fields: [
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
{ name: 'line', type: FieldType.string, config: {}, values: lines, labels }, // Line
{ name: 'id', type: FieldType.string, config: {}, values: uids },
],
length: times.length,
};
return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
}
/**
* Transforms LokiStreamResult structure into a dataFrame. Used when doing standard queries and newer version of Loki.
*/
export function lokiStreamResultToDataFrame(stream: LokiStreamResult, reverse?: boolean, refId?: string): DataFrame {
const labels: Labels = stream.stream;
const labelsString = Object.entries(labels)
.map(([key, val]) => `${key}="${val}"`)
.sort()
.join('');
const times = new ArrayVector<string>([]);
const timesNs = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]);
for (const [ts, line] of stream.values) {
const labelsString = Object.entries(labels)
.map(([key, val]) => `${key}="${val}"`)
.join('');
times.add(
dateTime(Number.parseFloat(ts) / 1e6)
.utc()
.format()
);
// num ns epoch in string, we convert it to iso string here so it matches old format
times.add(new Date(parseInt(ts.substr(0, ts.length - 6), 10)).toISOString());
timesNs.add(ts);
lines.add(line);
uids.add(createUid(ts, labelsString, line));
}
if (reverse) {
times.buffer = times.buffer.reverse();
lines.buffer = lines.buffer.reverse();
}
return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
}
return {
/**
* Constructs dataFrame with supplied fields and other data. Also makes sure it is properly reversed if needed.
*/
function constructDataFrame(
times: ArrayVector<string>,
timesNs: ArrayVector<string>,
lines: ArrayVector<string>,
uids: ArrayVector<string>,
labels: Labels,
reverse?: boolean,
refId?: string
) {
const dataFrame = {
refId,
fields: [
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
{ name: 'line', type: FieldType.string, config: {}, values: lines, labels }, // Line
{ name: 'id', type: FieldType.string, config: {}, values: uids },
{ name: 'tsNs', type: FieldType.time, config: { title: 'Time ns' }, values: timesNs }, // Time
],
length: times.length,
};
if (reverse) {
const mutableDataFrame = new MutableDataFrame(dataFrame);
mutableDataFrame.reverse();
return mutableDataFrame;
}
return dataFrame;
}
/**
......@@ -173,17 +186,18 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
for (const stream of streams) {
// Find unique labels
const unique = findUniqueLabels(stream.stream, baseLabels);
const allLabelsString = Object.entries(stream.stream)
.map(([key, val]) => `${key}="${val}"`)
.sort()
.join('');
// Add each line
for (const [ts, line] of stream.values) {
const uniqueLabelsString = Object.entries(unique)
.map(([key, val]) => `${key}="${val}"`)
.join('');
data.values.ts.add(parseInt(ts, 10) / 1e6);
data.values.ts.add(ts.substr(0, ts.length - 6));
data.values.tsNs.add(ts);
data.values.line.add(line);
data.values.labels.add(unique);
data.values.id.add(createUid(ts, uniqueLabelsString, line));
data.values.id.add(createUid(ts, allLabelsString, line));
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment