Commit 0fda3c4f by Andrej Ocenas Committed by GitHub

Explore: Fix context view in logs, where some rows may have been filtered out. (#21729)

* Fix timestamp formats and use uid to filter context rows

* Remove timestamps from tests
parent ed140346
...@@ -56,7 +56,6 @@ export interface LogRowModel { ...@@ -56,7 +56,6 @@ export interface LogRowModel {
logLevel: LogLevel; logLevel: LogLevel;
raw: string; raw: string;
searchWords?: string[]; searchWords?: string[];
timestamp: string; // ISO with nanosec precision
timeFromNow: string; timeFromNow: string;
timeEpochMs: number; timeEpochMs: number;
timeLocal: string; timeLocal: string;
......
...@@ -20,7 +20,6 @@ const setup = (propOverrides?: Partial<Props>, rowOverrides?: Partial<LogRowMode ...@@ -20,7 +20,6 @@ const setup = (propOverrides?: Partial<Props>, rowOverrides?: Partial<LogRowMode
hasAnsi: false, hasAnsi: false,
entry: '', entry: '',
raw: '', raw: '',
timestamp: '',
uid: '0', uid: '0',
labels: {}, labels: {},
...(rowOverrides || {}), ...(rowOverrides || {}),
......
...@@ -3,12 +3,13 @@ import { getRowContexts } from './LogRowContextProvider'; ...@@ -3,12 +3,13 @@ import { getRowContexts } from './LogRowContextProvider';
describe('getRowContexts', () => { describe('getRowContexts', () => {
describe('when called with a DataFrame and results are returned', () => { describe('when called with a DataFrame and results are returned', () => {
it('then the result should be in correct format', async () => { it('then the result should be in correct format and filtered', async () => {
const firstResult = new MutableDataFrame({ const firstResult = new MutableDataFrame({
refId: 'B', refId: 'B',
fields: [ fields: [
{ name: 'ts', type: FieldType.time, values: [3, 2, 1] }, { name: 'ts', type: FieldType.time, values: [3, 2, 1] },
{ name: 'line', type: FieldType.string, values: ['3', '2', '1'], labels: {} }, { name: 'line', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
{ name: 'id', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
], ],
}); });
const secondResult = new MutableDataFrame({ const secondResult = new MutableDataFrame({
...@@ -16,25 +17,38 @@ describe('getRowContexts', () => { ...@@ -16,25 +17,38 @@ describe('getRowContexts', () => {
fields: [ fields: [
{ name: 'ts', type: FieldType.time, values: [6, 5, 4] }, { name: 'ts', type: FieldType.time, values: [6, 5, 4] },
{ name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} }, { name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
{ name: 'id', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
], ],
}); });
const row: LogRowModel = { let called = false;
entryFieldIndex: 0, const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
rowIndex: 0, if (!called) {
dataFrame: new MutableDataFrame(), called = true;
entry: '4', return Promise.resolve({ data: [firstResult] });
labels: (null as any) as Labels, }
hasAnsi: false, return Promise.resolve({ data: [secondResult] });
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
}; };
const result = await getRowContexts(getRowContextMock, row, 10);
expect(result).toEqual({ data: [[['3', '2']], [['6', '5', '4']]], errors: ['', ''] });
});
it('then the result should be in correct format and filtered without uid', async () => {
const firstResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [3, 2, 1] },
{ name: 'line', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
],
});
const secondResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [6, 5, 4] },
{ name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
],
});
let called = false; let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => { const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) { if (!called) {
...@@ -46,7 +60,7 @@ describe('getRowContexts', () => { ...@@ -46,7 +60,7 @@ describe('getRowContexts', () => {
const result = await getRowContexts(getRowContextMock, row, 10); const result = await getRowContexts(getRowContextMock, row, 10);
expect(result).toEqual({ data: [[['3', '2', '1']], [['6', '5', '4']]], errors: ['', ''] }); expect(result).toEqual({ data: [[['3', '2', '1']], [['6', '5']]], errors: ['', ''] });
}); });
}); });
...@@ -54,23 +68,6 @@ describe('getRowContexts', () => { ...@@ -54,23 +68,6 @@ describe('getRowContexts', () => {
it('then the result should be in correct format', async () => { it('then the result should be in correct format', async () => {
const firstError = new Error('Error 1'); const firstError = new Error('Error 1');
const secondError = new Error('Error 2'); const secondError = new Error('Error 2');
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
};
let called = false; let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => { const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) { if (!called) {
...@@ -86,3 +83,19 @@ describe('getRowContexts', () => { ...@@ -86,3 +83,19 @@ describe('getRowContexts', () => {
}); });
}); });
}); });
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
uid: '1',
};
import { LogRowModel, toDataFrame, Field } from '@grafana/data'; import { LogRowModel, toDataFrame, Field, FieldCache } from '@grafana/data';
import { useState, useEffect } from 'react'; import React, { useState, useEffect } from 'react';
import flatten from 'lodash/flatten'; import flatten from 'lodash/flatten';
import useAsync from 'react-use/lib/useAsync'; import useAsync from 'react-use/lib/useAsync';
...@@ -45,7 +45,8 @@ export const getRowContexts = async ( ...@@ -45,7 +45,8 @@ export const getRowContexts = async (
limit, limit,
}), }),
getRowContext(row, { getRowContext(row, {
limit: limit + 1, // Lets add one more to the limit as we're filtering out one row see comment below // The start time is inclusive so we will get the one row we are using as context entry
limit: limit + 1,
direction: 'FORWARD', direction: 'FORWARD',
}), }),
]; ];
...@@ -62,16 +63,33 @@ export const getRowContexts = async ( ...@@ -62,16 +63,33 @@ export const getRowContexts = async (
const data: any[] = []; const data: any[] = [];
for (let index = 0; index < dataResult.data.length; index++) { for (let index = 0; index < dataResult.data.length; index++) {
const dataFrame = toDataFrame(dataResult.data[index]); const dataFrame = toDataFrame(dataResult.data[index]);
const timestampField: Field<string> = dataFrame.fields.filter(field => field.name === 'ts')[0]; const fieldCache = new FieldCache(dataFrame);
const timestampField: Field<string> = fieldCache.getFieldByName('ts')!;
const idField: Field<string> | undefined = fieldCache.getFieldByName('id');
for (let fieldIndex = 0; fieldIndex < timestampField.values.length; fieldIndex++) { for (let fieldIndex = 0; fieldIndex < timestampField.values.length; fieldIndex++) {
const timestamp = timestampField.values.get(fieldIndex); // TODO: this filtering is datasource dependant so it will make sense to move it there so the API is
// to return correct list of lines handling inclusive ranges or how to filter the correct line on the
// We need to filter out the row we're basing our search from because of how start/end params work in Loki API // datasource.
// see https://github.com/grafana/loki/issues/597#issuecomment-506408980
// the alternative to create our own add 1 nanosecond method to the a timestamp string would be quite complex // Filter out the row that is the one used as a focal point for the context as we will get it in one of the
if (timestamp === row.timestamp) { // requests.
continue; if (idField) {
// For Loki this means we filter only the one row. Issue is we could have other rows logged at the same
// ns which came before but they come in the response that search for logs after. This means right now
// we will show those as if they came after. This is not strictly correct but seems better than loosing them
// and making this correct would mean quite a bit of complexity to shuffle things around and messing up
//counts.
if (idField.values.get(fieldIndex) === row.uid) {
continue;
}
} else {
// Fallback to timestamp. This should not happen right now as this feature is implemented only for loki
// and that has ID. Later this branch could be used in other DS but mind that this could also filter out
// logs which were logged in the same timestamp and that can be a problem depending on the precision.
if (parseInt(timestampField.values.get(fieldIndex), 10) === row.timeEpochMs) {
continue;
}
} }
const lineField: Field<string> = dataFrame.fields.filter(field => field.name === 'line')[0]; const lineField: Field<string> = dataFrame.fields.filter(field => field.name === 'line')[0];
......
...@@ -109,7 +109,6 @@ const makeLog = (overrides: Partial<LogRowModel>): LogRowModel => { ...@@ -109,7 +109,6 @@ const makeLog = (overrides: Partial<LogRowModel>): LogRowModel => {
hasAnsi: false, hasAnsi: false,
labels: {}, labels: {},
raw: entry, raw: entry,
timestamp: '',
timeFromNow: '', timeFromNow: '',
timeEpochMs: 1, timeEpochMs: 1,
timeLocal: '', timeLocal: '',
......
...@@ -223,7 +223,6 @@ describe('dataFrameToLogsModel', () => { ...@@ -223,7 +223,6 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.rows).toHaveLength(2); expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([ expect(logsModel.rows).toMatchObject([
{ {
timestamp: '2019-04-26T09:28:11.352440161Z',
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server', entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' }, labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'info', logLevel: 'info',
...@@ -231,7 +230,6 @@ describe('dataFrameToLogsModel', () => { ...@@ -231,7 +230,6 @@ describe('dataFrameToLogsModel', () => {
uid: 'foo', uid: 'foo',
}, },
{ {
timestamp: '2019-04-26T14:42:50.991981292Z',
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7', entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' }, labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'error', logLevel: 'error',
......
...@@ -328,14 +328,13 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi ...@@ -328,14 +328,13 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
timeFromNow: time.fromNow(), timeFromNow: time.fromNow(),
timeEpochMs: time.valueOf(), timeEpochMs: time.valueOf(),
timeLocal: time.format(logTimeFormat), timeLocal: time.format(logTimeFormat),
timeUtc: toUtc(ts).format(logTimeFormat), timeUtc: toUtc(time.valueOf()).format(logTimeFormat),
uniqueLabels, uniqueLabels,
hasAnsi, hasAnsi,
searchWords, searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message, entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message, raw: message,
labels: stringField.labels, labels: stringField.labels,
timestamp: ts,
uid: idField ? idField.values.get(j) : j.toString(), uid: idField ? idField.values.get(j) : j.toString(),
}); });
} }
......
...@@ -15,7 +15,15 @@ import { ...@@ -15,7 +15,15 @@ import {
} from './explore'; } from './explore';
import { ExploreUrlState, ExploreMode } from 'app/types/explore'; import { ExploreUrlState, ExploreMode } from 'app/types/explore';
import store from 'app/core/store'; import store from 'app/core/store';
import { DataQueryError, LogsDedupStrategy, LogsModel, LogLevel, dateTime, MutableDataFrame } from '@grafana/data'; import {
DataQueryError,
LogsDedupStrategy,
LogsModel,
LogLevel,
dateTime,
MutableDataFrame,
LogRowModel,
} from '@grafana/data';
import { RefreshPicker } from '@grafana/ui'; import { RefreshPicker } from '@grafana/ui';
const DEFAULT_EXPLORE_STATE: ExploreUrlState = { const DEFAULT_EXPLORE_STATE: ExploreUrlState = {
...@@ -372,11 +380,10 @@ describe('refreshIntervalToSortOrder', () => { ...@@ -372,11 +380,10 @@ describe('refreshIntervalToSortOrder', () => {
}); });
describe('sortLogsResult', () => { describe('sortLogsResult', () => {
const firstRow = { const firstRow: LogRowModel = {
rowIndex: 0, rowIndex: 0,
entryFieldIndex: 0, entryFieldIndex: 0,
dataFrame: new MutableDataFrame(), dataFrame: new MutableDataFrame(),
timestamp: '2019-01-01T21:00:0.0000000Z',
entry: '', entry: '',
hasAnsi: false, hasAnsi: false,
labels: {}, labels: {},
...@@ -389,17 +396,16 @@ describe('sortLogsResult', () => { ...@@ -389,17 +396,16 @@ describe('sortLogsResult', () => {
uid: '1', uid: '1',
}; };
const sameAsFirstRow = firstRow; const sameAsFirstRow = firstRow;
const secondRow = { const secondRow: LogRowModel = {
rowIndex: 1, rowIndex: 1,
entryFieldIndex: 0, entryFieldIndex: 0,
dataFrame: new MutableDataFrame(), dataFrame: new MutableDataFrame(),
timestamp: '2019-01-01T22:00:0.0000000Z',
entry: '', entry: '',
hasAnsi: false, hasAnsi: false,
labels: {}, labels: {},
logLevel: LogLevel.info, logLevel: LogLevel.info,
raw: '', raw: '',
timeEpochMs: 0, timeEpochMs: 10,
timeFromNow: '', timeFromNow: '',
timeLocal: '', timeLocal: '',
timeUtc: '', timeUtc: '',
......
...@@ -474,11 +474,11 @@ export const getRefIds = (value: any): string[] => { ...@@ -474,11 +474,11 @@ export const getRefIds = (value: any): string[] => {
}; };
export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => { export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp < b.timestamp) { if (a.timeEpochMs < b.timeEpochMs) {
return -1; return -1;
} }
if (a.timestamp > b.timestamp) { if (a.timeEpochMs > b.timeEpochMs) {
return 1; return 1;
} }
...@@ -486,11 +486,11 @@ export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => { ...@@ -486,11 +486,11 @@ export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
}; };
const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => { const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp > b.timestamp) { if (a.timeEpochMs > b.timeEpochMs) {
return -1; return -1;
} }
if (a.timestamp < b.timestamp) { if (a.timeEpochMs < b.timeEpochMs) {
return 1; return 1;
} }
......
...@@ -70,7 +70,6 @@ const makeLog = (overides: Partial<LogRowModel>): LogRowModel => { ...@@ -70,7 +70,6 @@ const makeLog = (overides: Partial<LogRowModel>): LogRowModel => {
hasAnsi: false, hasAnsi: false,
labels: {}, labels: {},
raw: entry, raw: entry,
timestamp: '',
timeFromNow: '', timeFromNow: '',
timeEpochMs: 1, timeEpochMs: 1,
timeLocal: '', timeLocal: '',
......
...@@ -187,7 +187,6 @@ describe('ResultProcessor', () => { ...@@ -187,7 +187,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked', timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked', timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked', timeUtc: 'format() jest mocked',
timestamp: 300,
uid: '2', uid: '2',
uniqueLabels: {}, uniqueLabels: {},
}, },
...@@ -205,7 +204,6 @@ describe('ResultProcessor', () => { ...@@ -205,7 +204,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked', timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked', timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked', timeUtc: 'format() jest mocked',
timestamp: 200,
uid: '1', uid: '1',
uniqueLabels: {}, uniqueLabels: {},
}, },
...@@ -223,7 +221,6 @@ describe('ResultProcessor', () => { ...@@ -223,7 +221,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked', timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked', timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked', timeUtc: 'format() jest mocked',
timestamp: 100,
uid: '0', uid: '0',
uniqueLabels: {}, uniqueLabels: {},
}, },
......
import LokiDatasource, { RangeQueryOptions } from './datasource'; import LokiDatasource, { RangeQueryOptions } from './datasource';
import { LokiQuery, LokiResultType, LokiResponse, LokiLegacyStreamResponse } from './types'; import { LokiQuery, LokiResultType, LokiResponse, LokiLegacyStreamResponse } from './types';
import { getQueryOptions } from 'test/helpers/getQueryOptions'; import { getQueryOptions } from 'test/helpers/getQueryOptions';
import { AnnotationQueryRequest, DataSourceApi, DataFrame, dateTime, TimeRange } from '@grafana/data'; import { AnnotationQueryRequest, DataSourceApi, DataFrame, dateTime, TimeRange, FieldCache } from '@grafana/data';
import { TemplateSrv } from 'app/features/templating/template_srv'; import { TemplateSrv } from 'app/features/templating/template_srv';
import { CustomVariable } from 'app/features/templating/custom_variable'; import { CustomVariable } from 'app/features/templating/custom_variable';
import { makeMockLokiDatasource } from './mocks'; import { makeMockLokiDatasource } from './mocks';
...@@ -211,7 +211,8 @@ describe('LokiDatasource', () => { ...@@ -211,7 +211,8 @@ describe('LokiDatasource', () => {
const res = await ds.query(options).toPromise(); const res = await ds.query(options).toPromise();
const dataFrame = res.data[0] as DataFrame; const dataFrame = res.data[0] as DataFrame;
expect(dataFrame.fields[1].values.get(0)).toBe('hello'); const fieldCache = new FieldCache(dataFrame);
expect(fieldCache.getFieldByName('line').values.get(0)).toBe('hello');
expect(dataFrame.meta.limit).toBe(20); expect(dataFrame.meta.limit).toBe(20);
expect(dataFrame.meta.searchWords).toEqual(['foo']); expect(dataFrame.meta.searchWords).toEqual(['foo']);
}); });
......
...@@ -4,7 +4,7 @@ import { Observable, from, merge, of, iif, defer } from 'rxjs'; ...@@ -4,7 +4,7 @@ import { Observable, from, merge, of, iif, defer } from 'rxjs';
import { map, filter, catchError, switchMap, mergeMap } from 'rxjs/operators'; import { map, filter, catchError, switchMap, mergeMap } from 'rxjs/operators';
// Services & Utils // Services & Utils
import { dateMath } from '@grafana/data'; import { DataFrame, dateMath, FieldCache } from '@grafana/data';
import { addLabelToSelector, keepSelectorFilters } from 'app/plugins/datasource/prometheus/add_label_to_query'; import { addLabelToSelector, keepSelectorFilters } from 'app/plugins/datasource/prometheus/add_label_to_query';
import { DatasourceRequestOptions } from 'app/core/services/backend_srv'; import { DatasourceRequestOptions } from 'app/core/services/backend_srv';
import { getBackendSrv } from '@grafana/runtime'; import { getBackendSrv } from '@grafana/runtime';
...@@ -466,7 +466,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> { ...@@ -466,7 +466,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return Math.ceil(date.valueOf() * 1e6); return Math.ceil(date.valueOf() * 1e6);
} }
getLogRowContext = (row: LogRowModel, options?: LokiContextQueryOptions) => { getLogRowContext = (row: LogRowModel, options?: LokiContextQueryOptions): Promise<{ data: DataFrame[] }> => {
const target = this.prepareLogRowContextQueryTarget( const target = this.prepareLogRowContextQueryTarget(
row, row,
(options && options.limit) || 10, (options && options.limit) || 10,
...@@ -518,8 +518,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> { ...@@ -518,8 +518,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
.map(label => `${label}="${row.labels[label]}"`) .map(label => `${label}="${row.labels[label]}"`)
.join(','); .join(',');
const contextTimeBuffer = 2 * 60 * 60 * 1000 * 1e6; // 2h buffer const contextTimeBuffer = 2 * 60 * 60 * 1000; // 2h buffer
const timeEpochNs = row.timeEpochMs * 1e6;
const commonTargetOptions = { const commonTargetOptions = {
limit, limit,
query: `{${query}}`, query: `{${query}}`,
...@@ -527,18 +526,27 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> { ...@@ -527,18 +526,27 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
direction, direction,
}; };
const fieldCache = new FieldCache(row.dataFrame);
const nsField = fieldCache.getFieldByName('tsNs')!;
const nsTimestamp = nsField.values.get(row.rowIndex);
if (direction === 'BACKWARD') { if (direction === 'BACKWARD') {
return { return {
...commonTargetOptions, ...commonTargetOptions,
start: timeEpochNs - contextTimeBuffer, // convert to ns, we loose some precision here but it is not that important at the far points of the context
end: timeEpochNs, // using RFC3339Nano format to avoid precision loss start: row.timeEpochMs - contextTimeBuffer + '000000',
end: nsTimestamp,
direction, direction,
}; };
} else { } else {
return { return {
...commonTargetOptions, ...commonTargetOptions,
start: timeEpochNs, // start param in Loki API is inclusive so we'll have to filter out the row that this request is based from // start param in Loki API is inclusive so we'll have to filter out the row that this request is based from
end: timeEpochNs + contextTimeBuffer, // and any other that were logged in the same ns but before the row. Right now these rows will be lost
// because the are before but came it he response that should return only rows after.
start: nsTimestamp,
// convert to ns, we loose some precision here but it is not that important at the far points of the context
end: row.timeEpochMs + contextTimeBuffer + '000000',
}; };
} }
}; };
......
...@@ -67,6 +67,7 @@ export class LiveStreams { ...@@ -67,6 +67,7 @@ export class LiveStreams {
const data = new CircularDataFrame({ capacity: target.size }); const data = new CircularDataFrame({ capacity: target.size });
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } }); data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
data.addField({ name: 'tsNs', type: FieldType.time, config: { title: 'Time ns' } });
data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query); data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query);
data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line
data.addField({ name: 'id', type: FieldType.string }); data.addField({ name: 'id', type: FieldType.string });
......
...@@ -28,13 +28,13 @@ const streamResult: LokiStreamResult[] = [ ...@@ -28,13 +28,13 @@ const streamResult: LokiStreamResult[] = [
stream: { stream: {
foo: 'bar', foo: 'bar',
}, },
values: [['1970-01-01T00:00:00Z', "foo: 'bar'"]], values: [['1579857562021616000', "foo: 'bar'"]],
}, },
{ {
stream: { stream: {
bar: 'foo', bar: 'foo',
}, },
values: [['1970-01-01T00:00:00Z', "bar: 'foo'"]], values: [['1579857562031616000', "bar: 'foo'"]],
}, },
]; ];
...@@ -95,12 +95,12 @@ describe('loki result transformer', () => { ...@@ -95,12 +95,12 @@ describe('loki result transformer', () => {
expect(data.length).toBe(2); expect(data.length).toBe(2);
expect(data[0].fields[1].labels['foo']).toEqual('bar'); expect(data[0].fields[1].labels['foo']).toEqual('bar');
expect(data[0].fields[0].values.get(0)).toEqual(legacyStreamResult[0].entries[0].ts); expect(data[0].fields[0].values.get(0)).toEqual('2020-01-24T09:19:22.021Z');
expect(data[0].fields[1].values.get(0)).toEqual(legacyStreamResult[0].entries[0].line); expect(data[0].fields[1].values.get(0)).toEqual(streamResult[0].values[0][1]);
expect(data[0].fields[2].values.get(0)).toEqual('dc1e83aa5cd718b42a3cff50fa7e3a6a'); expect(data[0].fields[2].values.get(0)).toEqual('2b431b8a98b80b3b2c2f4cd2444ae6cb');
expect(data[1].fields[0].values.get(0)).toEqual(legacyStreamResult[1].entries[0].ts); expect(data[1].fields[0].values.get(0)).toEqual('2020-01-24T09:19:22.031Z');
expect(data[1].fields[1].values.get(0)).toEqual(legacyStreamResult[1].entries[0].line); expect(data[1].fields[1].values.get(0)).toEqual(streamResult[1].values[0][1]);
expect(data[1].fields[2].values.get(0)).toEqual('952fa23552daebbb5747c4e52fb9497d'); expect(data[1].fields[2].values.get(0)).toEqual('75d73d66cff40f9d1a1f2d5a0bf295d0');
}); });
}); });
......
...@@ -10,9 +10,9 @@ import { ...@@ -10,9 +10,9 @@ import {
ArrayVector, ArrayVector,
MutableDataFrame, MutableDataFrame,
findUniqueLabels, findUniqueLabels,
dateTime,
FieldConfig, FieldConfig,
DataFrameView, DataFrameView,
dateTime,
} from '@grafana/data'; } from '@grafana/data';
import templateSrv from 'app/features/templating/template_srv'; import templateSrv from 'app/features/templating/template_srv';
import TableModel from 'app/core/table_model'; import TableModel from 'app/core/table_model';
...@@ -35,7 +35,7 @@ import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils'; ...@@ -35,7 +35,7 @@ import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
import { of } from 'rxjs'; import { of } from 'rxjs';
/** /**
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries. * Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries and older version of Loki.
*/ */
export function legacyLogStreamToDataFrame( export function legacyLogStreamToDataFrame(
stream: LokiLegacyStreamResult, stream: LokiLegacyStreamResult,
...@@ -48,67 +48,80 @@ export function legacyLogStreamToDataFrame( ...@@ -48,67 +48,80 @@ export function legacyLogStreamToDataFrame(
} }
const times = new ArrayVector<string>([]); const times = new ArrayVector<string>([]);
const timesNs = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]); const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]); const uids = new ArrayVector<string>([]);
for (const entry of stream.entries) { for (const entry of stream.entries) {
const ts = entry.ts || entry.timestamp; const ts = entry.ts || entry.timestamp;
// iso string with nano precision, will be truncated but is parse-able
times.add(ts); times.add(ts);
// So this matches new format, we are loosing precision here, which sucks but no easy way to keep it and this
// is for old pre 1.0.0 version Loki so probably does not affect that much.
timesNs.add(dateTime(ts).valueOf() + '000000');
lines.add(entry.line); lines.add(entry.line);
uids.add(createUid(ts, stream.labels, entry.line)); uids.add(createUid(ts, stream.labels, entry.line));
} }
if (reverse) { return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
times.buffer = times.buffer.reverse();
lines.buffer = lines.buffer.reverse();
}
return {
refId,
fields: [
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
{ name: 'line', type: FieldType.string, config: {}, values: lines, labels }, // Line
{ name: 'id', type: FieldType.string, config: {}, values: uids },
],
length: times.length,
};
} }
/**
* Transforms LokiStreamResult structure into a dataFrame. Used when doing standard queries and newer version of Loki.
*/
export function lokiStreamResultToDataFrame(stream: LokiStreamResult, reverse?: boolean, refId?: string): DataFrame { export function lokiStreamResultToDataFrame(stream: LokiStreamResult, reverse?: boolean, refId?: string): DataFrame {
const labels: Labels = stream.stream; const labels: Labels = stream.stream;
const labelsString = Object.entries(labels)
.map(([key, val]) => `${key}="${val}"`)
.sort()
.join('');
const times = new ArrayVector<string>([]); const times = new ArrayVector<string>([]);
const timesNs = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]); const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]); const uids = new ArrayVector<string>([]);
for (const [ts, line] of stream.values) { for (const [ts, line] of stream.values) {
const labelsString = Object.entries(labels) // num ns epoch in string, we convert it to iso string here so it matches old format
.map(([key, val]) => `${key}="${val}"`) times.add(new Date(parseInt(ts.substr(0, ts.length - 6), 10)).toISOString());
.join(''); timesNs.add(ts);
times.add(
dateTime(Number.parseFloat(ts) / 1e6)
.utc()
.format()
);
lines.add(line); lines.add(line);
uids.add(createUid(ts, labelsString, line)); uids.add(createUid(ts, labelsString, line));
} }
if (reverse) { return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
times.buffer = times.buffer.reverse(); }
lines.buffer = lines.buffer.reverse();
}
return { /**
* Constructs dataFrame with supplied fields and other data. Also makes sure it is properly reversed if needed.
*/
function constructDataFrame(
times: ArrayVector<string>,
timesNs: ArrayVector<string>,
lines: ArrayVector<string>,
uids: ArrayVector<string>,
labels: Labels,
reverse?: boolean,
refId?: string
) {
const dataFrame = {
refId, refId,
fields: [ fields: [
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time { name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
{ name: 'line', type: FieldType.string, config: {}, values: lines, labels }, // Line { name: 'line', type: FieldType.string, config: {}, values: lines, labels }, // Line
{ name: 'id', type: FieldType.string, config: {}, values: uids }, { name: 'id', type: FieldType.string, config: {}, values: uids },
{ name: 'tsNs', type: FieldType.time, config: { title: 'Time ns' }, values: timesNs }, // Time
], ],
length: times.length, length: times.length,
}; };
if (reverse) {
const mutableDataFrame = new MutableDataFrame(dataFrame);
mutableDataFrame.reverse();
return mutableDataFrame;
}
return dataFrame;
} }
/** /**
...@@ -173,17 +186,18 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M ...@@ -173,17 +186,18 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
for (const stream of streams) { for (const stream of streams) {
// Find unique labels // Find unique labels
const unique = findUniqueLabels(stream.stream, baseLabels); const unique = findUniqueLabels(stream.stream, baseLabels);
const allLabelsString = Object.entries(stream.stream)
.map(([key, val]) => `${key}="${val}"`)
.sort()
.join('');
// Add each line // Add each line
for (const [ts, line] of stream.values) { for (const [ts, line] of stream.values) {
const uniqueLabelsString = Object.entries(unique) data.values.ts.add(ts.substr(0, ts.length - 6));
.map(([key, val]) => `${key}="${val}"`) data.values.tsNs.add(ts);
.join('');
data.values.ts.add(parseInt(ts, 10) / 1e6);
data.values.line.add(line); data.values.line.add(line);
data.values.labels.add(unique); data.values.labels.add(unique);
data.values.id.add(createUid(ts, uniqueLabelsString, line)); data.values.id.add(createUid(ts, allLabelsString, line));
} }
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment