Commit 0490dbb8 by Andrej Ocenas Committed by GitHub

Explore: Generate log row uid (#18994)

parent ccba986b
import { Field, DataFrame, FieldType, guessFieldTypeForField } from '../index';
interface FieldWithIndex extends Field {
export interface FieldWithIndex extends Field {
index: number;
}
......
......@@ -48,6 +48,7 @@ export interface LogRowModel {
timeEpochMs: number;
timeLocal: string;
timeUtc: string;
uid: string;
uniqueLabels?: Labels;
}
......
......@@ -33,6 +33,7 @@ describe('getRowContexts', () => {
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
};
let called = false;
......@@ -65,6 +66,7 @@ describe('getRowContexts', () => {
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
};
let called = false;
......
......@@ -105,7 +105,7 @@ class UnThemedLogRows extends PureComponent<Props, State> {
!deferLogs && // Only inject highlighterExpression in the first set for performance reasons
firstRows.map((row, index) => (
<LogRow
key={index}
key={row.uid}
getRows={getRows}
getRowContext={getRowContext}
highlighterExpressions={highlighterExpressions}
......@@ -122,7 +122,7 @@ class UnThemedLogRows extends PureComponent<Props, State> {
renderAll &&
lastRows.map((row, index) => (
<LogRow
key={PREVIEW_LIMIT + index}
key={row.uid}
getRows={getRows}
getRowContext={getRowContext}
row={row}
......
......@@ -21,6 +21,7 @@ import {
NullValueMode,
toDataFrame,
FieldCache,
FieldWithIndex,
} from '@grafana/data';
import { getThemeColor } from 'app/core/utils/colors';
import { hasAnsiCodes } from 'app/core/utils/text';
......@@ -249,6 +250,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
const timeField = fieldCache.getFirstFieldOfType(FieldType.time);
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
const logLevelField = fieldCache.getFieldByName('level');
const idField = getIdField(fieldCache);
let seriesLogLevel: LogLevel | undefined = undefined;
if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
......@@ -291,6 +293,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
raw: message,
labels: series.labels,
timestamp: ts,
uid: idField ? idField.values.get(j) : j.toString(),
});
}
}
......@@ -321,3 +324,14 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
rows,
};
}
function getIdField(fieldCache: FieldCache): FieldWithIndex | undefined {
const idFieldNames = ['id'];
for (const fieldName of idFieldNames) {
const idField = fieldCache.getFieldByName(fieldName);
if (idField) {
return idField;
}
}
return undefined;
}
......@@ -217,6 +217,11 @@ describe('dataFrameToLogsModel', () => {
't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
],
},
{
name: 'id',
type: FieldType.string,
values: ['foo', 'bar'],
},
],
meta: {
limit: 1000,
......@@ -233,6 +238,7 @@ describe('dataFrameToLogsModel', () => {
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'info',
uniqueLabels: {},
uid: 'foo',
},
{
timestamp: '2019-04-26T14:42:50.991981292Z',
......@@ -240,6 +246,7 @@ describe('dataFrameToLogsModel', () => {
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'error',
uniqueLabels: {},
uid: 'bar',
},
]);
......@@ -367,7 +374,6 @@ describe('dataFrameToLogsModel', () => {
kind: LogsMetaKind.LabelsMap,
});
});
//
it('given multiple series with equal times should return expected logs model', () => {
const series: DataFrame[] = [
toDataFrame({
......@@ -459,4 +465,26 @@ describe('dataFrameToLogsModel', () => {
},
]);
});
it('should fallback to row index if no id', () => {
const series: DataFrame[] = [
toDataFrame({
labels: { foo: 'bar' },
fields: [
{
name: 'ts',
type: FieldType.time,
values: ['1970-01-01T00:00:00Z'],
},
{
name: 'line',
type: FieldType.string,
values: ['WARN boooo 1'],
},
],
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.rows[0].uid).toBe('0');
});
});
......@@ -383,6 +383,7 @@ describe('sortLogsResult', () => {
timeFromNow: '',
timeLocal: '',
timeUtc: '',
uid: '1',
};
const sameAsFirstRow = firstRow;
const secondRow = {
......@@ -396,6 +397,7 @@ describe('sortLogsResult', () => {
timeFromNow: '',
timeLocal: '',
timeUtc: '',
uid: '2',
};
describe('when called with SortOrder.Descending', () => {
......
import React, { PureComponent } from 'react';
import { css, cx } from 'emotion';
import tinycolor from 'tinycolor2';
import { last } from 'lodash';
import { Themeable, withTheme, GrafanaTheme, getLogRowStyles } from '@grafana/ui';
import { LogsModel, LogRowModel, TimeZone } from '@grafana/data';
......@@ -21,7 +20,7 @@ const getStyles = (theme: GrafanaTheme) => ({
margin-top: auto !important;
}
`,
logsRowFresh: css`
logsRowFade: css`
label: logs-row-fresh;
color: ${theme.colors.text};
background-color: ${tinycolor(theme.colors.blueLight)
......@@ -39,9 +38,6 @@ const getStyles = (theme: GrafanaTheme) => ({
}
}
`,
logsRowOld: css`
label: logs-row-old;
`,
logsRowsIndicator: css`
font-size: ${theme.typography.size.md};
padding-top: ${theme.spacing.sm};
......@@ -64,7 +60,6 @@ export interface Props extends Themeable {
interface State {
logsResultToRender?: LogsModel;
lastTimestamp: number;
}
class LiveLogs extends PureComponent<Props, State> {
......@@ -76,7 +71,6 @@ class LiveLogs extends PureComponent<Props, State> {
super(props);
this.state = {
logsResultToRender: props.logsResult,
lastTimestamp: 0,
};
}
......@@ -106,10 +100,6 @@ class LiveLogs extends PureComponent<Props, State> {
// our state, but we do not show the updates, this allows us start again showing correct result after resuming
// without creating a gap in the log results.
logsResultToRender: nextProps.logsResult,
lastTimestamp:
state.logsResultToRender && last(state.logsResultToRender.rows)
? last(state.logsResultToRender.rows).timeEpochMs
: 0,
};
} else {
return null;
......@@ -141,15 +131,6 @@ class LiveLogs extends PureComponent<Props, State> {
return rowsToRender;
};
/**
* Check if row is fresh so we can apply special styling. This is bit naive and does not take into account rows
* which arrive out of order. Because loki datasource sends full data instead of deltas we need to compare the
* data and this is easier than doing some intersection of some uuid of each row (which we do not have now anyway)
*/
isFresh = (row: LogRowModel): boolean => {
return row.timeEpochMs > this.state.lastTimestamp;
};
render() {
const { theme, timeZone, onPause, onResume, isPaused } = this.props;
const styles = getStyles(theme);
......@@ -163,23 +144,20 @@ class LiveLogs extends PureComponent<Props, State> {
className={cx(['logs-rows', styles.logsRowsLive])}
ref={this.scrollContainerRef}
>
{this.rowsToRender().map((row: LogRowModel, index) => {
{this.rowsToRender().map((row: LogRowModel) => {
return (
<div
className={cx(logsRow, this.isFresh(row) ? styles.logsRowFresh : styles.logsRowOld)}
key={`${row.timeEpochMs}-${index}`}
>
<div className={cx(logsRow, styles.logsRowFade)} key={row.uid}>
{showUtc && (
<div className={cx([logsRowLocalTime])} title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>
<div className={cx(logsRowLocalTime)} title={`Local: ${row.timeLocal} (${row.timeFromNow})`}>
{row.timeUtc}
</div>
)}
{!showUtc && (
<div className={cx([logsRowLocalTime])} title={`${row.timeUtc} (${row.timeFromNow})`}>
<div className={cx(logsRowLocalTime)} title={`${row.timeUtc} (${row.timeFromNow})`}>
{row.timeLocal}
</div>
)}
<div className={cx([logsRowMessage])}>{row.entry}</div>
<div className={cx(logsRowMessage)}>{row.entry}</div>
</div>
);
})}
......
......@@ -156,6 +156,7 @@ describe('ResultProcessor', () => {
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 300,
uid: '2',
uniqueLabels: {},
},
{
......@@ -170,6 +171,7 @@ describe('ResultProcessor', () => {
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 200,
uid: '1',
uniqueLabels: {},
},
{
......@@ -184,6 +186,7 @@ describe('ResultProcessor', () => {
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 100,
uid: '0',
uniqueLabels: {},
},
],
......
......@@ -168,6 +168,16 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return series;
};
/**
* Runs live queries which in this case means creating a websocket and listening on it for new logs.
* This returns a bit different dataFrame than runQueries as it returns single dataframe even if there are multiple
* Loki streams, sets only common labels on dataframe.labels and has additional dataframe.fields.labels for unique
* labels per row.
*
* @param options
* @param observer Callback that will be called with new data. Is optional but only because we run this function
* even if there are no live targets defined in the options which would mean this is noop and observer is not called.
*/
runLiveQuery = (options: DataQueryRequest<LokiQuery>, target: LokiQuery): Observable<DataQueryResponse> => {
const liveTarget = this.prepareLiveTarget(target, options);
const stream = this.streams.getStream(liveTarget);
......
......@@ -50,6 +50,7 @@ describe('Live Stream Tests', () => {
const last = { ...view.get(view.length - 1) };
expect(last).toEqual({
ts: '2019-08-28T20:50:40.118944705Z',
id: '2019-08-28T20:50:40.118944705Z_{filename="/var/log/sntpc.log", job="varlogs"}',
line: 'Kittens',
labels: { filename: '/var/log/sntpc.log' },
});
......
......@@ -31,6 +31,7 @@ export class LiveStreams {
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
data.addField({ name: 'line', type: FieldType.string });
data.addField({ name: 'labels', type: FieldType.other });
data.addField({ name: 'id', type: FieldType.string });
stream = webSocket(target.url).pipe(
finalize(() => {
......
import { logStreamToDataFrame } from './result_transformer';
import { logStreamToDataFrame, appendResponseToBufferedData } from './result_transformer';
import { FieldType, MutableDataFrame } from '@grafana/data';
import { LokiLogsStream } from './types';
describe('convert loki response to DataFrame', () => {
const streams = [
{
labels: '{foo="bar"}',
entries: [
{
line: "foo: 'bar'",
ts: '1970-01-01T00:00:00Z',
},
],
},
{
labels: '{bar="foo"}',
entries: [
{
line: "bar: 'foo'",
ts: '1970-01-01T00:00:00Z',
},
],
},
];
const streams: LokiLogsStream[] = [
{
labels: '{foo="bar"}',
entries: [
{
line: "foo: 'bar'",
ts: '1970-01-01T00:00:00Z',
},
],
},
{
labels: '{bar="foo"}',
entries: [
{
line: "bar: 'foo'",
ts: '1970-01-01T00:00:00Z',
},
],
},
];
describe('logStreamToDataFrame', () => {
it('converts streams to series', () => {
const data = streams.map(stream => logStreamToDataFrame(stream));
......@@ -28,7 +31,27 @@ describe('convert loki response to DataFrame', () => {
expect(data[0].labels['foo']).toEqual('bar');
expect(data[0].fields[0].values.get(0)).toEqual(streams[0].entries[0].ts);
expect(data[0].fields[1].values.get(0)).toEqual(streams[0].entries[0].line);
expect(data[0].fields[2].values.get(0)).toEqual('1970-01-01T00:00:00Z_{foo="bar"}');
expect(data[1].fields[0].values.get(0)).toEqual(streams[1].entries[0].ts);
expect(data[1].fields[1].values.get(0)).toEqual(streams[1].entries[0].line);
expect(data[1].fields[2].values.get(0)).toEqual('1970-01-01T00:00:00Z_{bar="foo"}');
});
});
describe('appendResponseToBufferedData', () => {
it('appends response', () => {
const data = new MutableDataFrame();
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
data.addField({ name: 'line', type: FieldType.string });
data.addField({ name: 'labels', type: FieldType.other });
data.addField({ name: 'id', type: FieldType.string });
appendResponseToBufferedData({ streams }, data);
expect(data.get(0)).toEqual({
ts: '1970-01-01T00:00:00Z',
line: "foo: 'bar'",
labels: { foo: 'bar' },
id: '1970-01-01T00:00:00Z_{foo="bar"}',
});
});
});
......@@ -19,10 +19,13 @@ export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean,
}
const times = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]);
for (const entry of stream.entries) {
times.add(entry.ts || entry.timestamp);
const ts = entry.ts || entry.timestamp;
times.add(ts);
lines.add(entry.line);
uids.add(`${ts}_${stream.labels}`);
}
if (reverse) {
......@@ -36,6 +39,7 @@ export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean,
fields: [
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
{ name: 'line', type: FieldType.string, config: {}, values: lines }, // Line
{ name: 'id', type: FieldType.string, config: {}, values: uids },
],
length: times.length,
};
......@@ -45,22 +49,26 @@ export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean,
* Transform LokiResponse data and appends it to MutableDataFrame. Used for streaming where the dataFrame can be
* a CircularDataFrame creating a fixed size rolling buffer.
* TODO: Probably could be unified with the logStreamToDataFrame function.
* @param response
* @param data Needs to have ts, line, labels, id as fields
*/
export function appendResponseToBufferedData(response: LokiResponse, data: MutableDataFrame) {
// Should we do anythign with: response.dropped_entries?
// Should we do anything with: response.dropped_entries?
const streams: LokiLogsStream[] = response.streams;
if (streams && streams.length) {
for (const stream of streams) {
// Find unique labels
const labels = parseLabels(stream.labels);
const unique = findUniqueLabels(labels, data.labels);
const unique = findUniqueLabels(labels, data.labels || {});
// Add each line
for (const entry of stream.entries) {
data.values.ts.add(entry.ts || entry.timestamp);
const ts = entry.ts || entry.timestamp;
data.values.ts.add(ts);
data.values.line.add(entry.line);
data.values.labels.add(unique);
data.values.id.add(`${ts}_${stream.labels}`);
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment