Commit bf3313c8 by kay delaney Committed by GitHub

Datasource/Loki: Remove code dealing with legacy Loki endpoints (#23437)

parent 6715cf22
import LokiDatasource, { RangeQueryOptions } from './datasource';
import { LokiLegacyStreamResponse, LokiQuery, LokiResponse, LokiResultType } from './types';
import { LokiQuery, LokiResponse, LokiResultType } from './types';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
import {
AnnotationQueryRequest,
......@@ -29,18 +29,6 @@ describe('LokiDatasource', () => {
url: 'myloggingurl',
};
const legacyTestResp: { data: LokiLegacyStreamResponse; status: number } = {
data: {
streams: [
{
entries: [{ ts: '2019-02-01T10:27:37.498180581Z', line: 'hello' }],
labels: '{}',
},
],
},
status: 404, // for simulating legacy endpoint
};
const testResp: { data: LokiResponse } = {
data: {
data: {
......@@ -106,33 +94,12 @@ describe('LokiDatasource', () => {
});
});
describe('when running range query with fallback', () => {
let ds: LokiDatasource;
beforeEach(() => {
const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
const customSettings = { ...instanceSettings, jsonData: customData };
ds = new LokiDatasource(customSettings, templateSrvMock);
datasourceRequestMock.mockImplementation(() => Promise.resolve(legacyTestResp));
});
test('should try latest endpoint but fall back to legacy endpoint if it cannot be reached', async () => {
const options = getQueryOptions<LokiQuery>({
targets: [{ expr: '{job="grafana"}', refId: 'B' }],
exploreMode: ExploreMode.Logs,
});
ds.runLegacyQuery = jest.fn();
await ds.runRangeQueryWithFallback(options.targets[0], options).toPromise();
expect(ds.runLegacyQuery).toBeCalled();
});
});
describe('when querying', () => {
let ds: LokiDatasource;
let testLimit: any;
beforeAll(() => {
testLimit = makeLimitTest(instanceSettings, datasourceRequestMock, templateSrvMock, legacyTestResp);
testLimit = makeLimitTest(instanceSettings, datasourceRequestMock, templateSrvMock, testResp);
});
beforeEach(() => {
......@@ -149,13 +116,11 @@ describe('LokiDatasource', () => {
});
ds.runInstantQuery = jest.fn(() => of({ data: [] }));
ds.runLegacyQuery = jest.fn();
ds.runRangeQueryWithFallback = jest.fn(() => of({ data: [] }));
ds.runRangeQuery = jest.fn(() => of({ data: [] }));
await ds.query(options).toPromise();
expect(ds.runInstantQuery).toBeCalled();
expect(ds.runLegacyQuery).not.toBeCalled();
expect(ds.runRangeQueryWithFallback).toBeCalled();
expect(ds.runRangeQuery).toBeCalled();
});
test('should just run range query when in logs mode', async () => {
......@@ -165,11 +130,11 @@ describe('LokiDatasource', () => {
});
ds.runInstantQuery = jest.fn(() => of({ data: [] }));
ds.runRangeQueryWithFallback = jest.fn(() => of({ data: [] }));
ds.runRangeQuery = jest.fn(() => of({ data: [] }));
await ds.query(options).toPromise();
expect(ds.runInstantQuery).not.toBeCalled();
expect(ds.runRangeQueryWithFallback).toBeCalled();
expect(ds.runRangeQuery).toBeCalled();
});
test('should use default max lines when no limit given', () => {
......@@ -207,8 +172,8 @@ describe('LokiDatasource', () => {
datasourceRequestMock.mockImplementation(
jest
.fn()
.mockReturnValueOnce(Promise.resolve(legacyTestResp))
.mockReturnValueOnce(Promise.resolve(omit(legacyTestResp, 'status')))
.mockReturnValueOnce(Promise.resolve(testResp))
.mockReturnValueOnce(Promise.resolve(omit(testResp, 'data.status')))
);
const options = getQueryOptions<LokiQuery>({
......@@ -381,31 +346,32 @@ describe('LokiDatasource', () => {
it('should transform the loki data to annotation response', async () => {
const ds = new LokiDatasource(instanceSettings, templateSrvMock);
datasourceRequestMock.mockImplementation(
jest
.fn()
.mockReturnValueOnce(
Promise.resolve({
data: [],
status: 404,
})
)
.mockReturnValueOnce(
Promise.resolve({
jest.fn().mockReturnValueOnce(
Promise.resolve({
data: {
data: {
streams: [
resultType: LokiResultType.Stream,
result: [
{
entries: [{ ts: '2019-02-01T10:27:37.498180581Z', line: 'hello' }],
labels: '{label="value"}',
stream: {
label: 'value',
},
values: [['1549016857498000000', 'hello']],
},
{
entries: [{ ts: '2019-02-01T12:27:37.498180581Z', line: 'hello 2' }],
labels: '{label2="value2"}',
stream: {
label2: 'value2',
},
values: [['1549024057498000000', 'hello 2']],
},
],
},
})
)
status: 'success',
},
})
)
);
const query = makeAnnotationQueryRequest();
const res = await ds.annotationQuery(query);
......
......@@ -158,7 +158,7 @@ describe('Request URL', () => {
const instance = new LanguageProvider(datasourceWithLabels, { initialRange: rangeMock });
await instance.refreshLogLabels(rangeMock, true);
const expectedUrl = '/api/prom/label';
const expectedUrl = '/loki/api/v1/label';
expect(datasourceSpy).toHaveBeenCalledWith(expectedUrl, rangeToParams(rangeMock));
});
});
......
......@@ -381,7 +381,7 @@ export default class LokiLanguageProvider extends LanguageProvider {
* @param absoluteRange Fetches
*/
async fetchLogLabels(absoluteRange: AbsoluteTimeRange): Promise<any> {
const url = '/api/prom/label';
const url = '/loki/api/v1/label';
try {
this.logLabelFetchTs = Date.now();
const rangeParams = absoluteRange ? rangeToParams(absoluteRange) : {};
......@@ -442,7 +442,7 @@ export default class LokiLanguageProvider extends LanguageProvider {
}
async fetchLabelValues(key: string, absoluteRange: AbsoluteTimeRange): Promise<string[]> {
const url = `/api/prom/label/${key}/values`;
const url = `/loki/api/v1/label/${key}/values`;
let values: string[] = [];
const rangeParams: { start?: number; end?: number } = absoluteRange ? rangeToParams(absoluteRange) : {};
const { start, end } = rangeParams;
......
......@@ -3,6 +3,7 @@ import * as rxJsWebSocket from 'rxjs/webSocket';
import { LiveStreams } from './live_streams';
import { DataFrame, DataFrameView, formatLabels, Labels } from '@grafana/data';
import { noop } from 'lodash';
import { LokiTailResponse } from './types';
let fakeSocket: Subject<any>;
jest.mock('rxjs/webSocket', () => {
......@@ -17,16 +18,11 @@ describe('Live Stream Tests', () => {
jest.restoreAllMocks();
});
const msg0: any = {
const msg0: LokiTailResponse = {
streams: [
{
labels: '{filename="/var/log/sntpc.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:50:40.118944705Z',
line: 'Kittens',
},
],
stream: { filename: '/var/log/sntpc.log', job: 'varlogs' },
values: [['1567025440118944705', 'Kittens']],
},
],
dropped_entries: null,
......@@ -36,21 +32,22 @@ describe('Live Stream Tests', () => {
fakeSocket = new Subject<any>();
const labels: Labels = { job: 'varlogs' };
const target = makeTarget('fake', labels);
const stream = new LiveStreams().getLegacyStream(target);
const stream = new LiveStreams().getStream(target);
expect.assertions(4);
const tests = [
(val: DataFrame[]) => {
expect(val[0].length).toEqual(7);
expect(val[0].fields[1].labels).toEqual(labels);
expect(val[0].fields[2].labels).toEqual(labels);
},
(val: DataFrame[]) => {
expect(val[0].length).toEqual(8);
const view = new DataFrameView(val[0]);
const last = { ...view.get(view.length - 1) };
expect(last).toEqual({
ts: '2019-08-28T20:50:40.118944705Z',
id: '81d963f31c276ad2ea1af38b38436237',
ts: '2019-08-28T20:50:40.118Z',
tsNs: '1567025440118944705',
id: '8c50d09800ce8dda69a2ff25405c9f65',
line: 'Kittens',
labels: { filename: '/var/log/sntpc.log' },
});
......@@ -74,21 +71,21 @@ describe('Live Stream Tests', () => {
it('returns the same subscription if the url matches existing one', () => {
fakeSocket = new Subject<any>();
const liveStreams = new LiveStreams();
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const stream2 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const stream1 = liveStreams.getStream(makeTarget('url_to_match'));
const stream2 = liveStreams.getStream(makeTarget('url_to_match'));
expect(stream1).toBe(stream2);
});
it('returns new subscription when the previous unsubscribed', () => {
fakeSocket = new Subject<any>();
const liveStreams = new LiveStreams();
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const stream1 = liveStreams.getStream(makeTarget('url_to_match'));
const subscription = stream1.subscribe({
next: noop,
});
subscription.unsubscribe();
const stream2 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const stream2 = liveStreams.getStream(makeTarget('url_to_match'));
expect(stream1).not.toBe(stream2);
});
......@@ -101,7 +98,7 @@ describe('Live Stream Tests', () => {
spy.and.returnValue(fakeSocket);
const liveStreams = new LiveStreams();
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const stream1 = liveStreams.getStream(makeTarget('url_to_match'));
const subscription = stream1.subscribe({
next: noop,
});
......@@ -128,78 +125,39 @@ function makeTarget(url: string, labels?: Labels) {
// Added this at the end so the top is more readable
//----------------------------------------------------------------
const initialRawResponse: any = {
const initialRawResponse: LokiTailResponse = {
streams: [
{
labels: '{filename="/var/log/docker.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:43:38.215447855Z',
line:
'2019-08-28T20:43:38Z docker time="2019-08-28T20:43:38.147149490Z" ' +
'level=debug msg="[resolver] received AAAA record \\"::1\\" for \\"localhost.\\" from udp:192.168.65.1"',
},
],
},
{
labels: '{filename="/var/log/docker.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:43:38.215450388Z',
line:
'2019-08-28T20:43:38Z docker time="2019-08-28T20:43:38.147224630Z" ' +
stream: {
filename: '/var/log/docker.log',
job: 'varlogs',
},
values: [
[
'1567025018215000000',
'level=debug msg="[resolver] received AAAA record \\"::1\\" for \\"localhost.\\" from udp:192.168.65.1"',
],
[
'1567025018215000000',
'2019-08-28T20:43:38Z docker time="2019-08-28T20:43:38.147224630Z" ' +
'level=debug msg="[resolver] received AAAA record \\"fe80::1\\" for \\"localhost.\\" from udp:192.168.65.1"',
},
],
},
{
labels: '{filename="/var/log/sntpc.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:43:40.452525099Z',
line: '2019-08-28T20:43:40Z sntpc sntpc[1]: offset=-0.022171, delay=0.000463',
},
],
},
{
labels: '{filename="/var/log/sntpc.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:44:10.297164454Z',
line: '2019-08-28T20:44:10Z sntpc sntpc[1]: offset=-0.022327, delay=0.000527',
},
],
},
{
labels: '{filename="/var/log/lifecycle-server.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:44:38.152248647Z',
line:
'2019-08-28T20:44:38Z lifecycle-server time="2019-08-28T20:44:38.095444834Z" ' +
],
['1567025020452000000', '2019-08-28T20:43:40Z sntpc sntpc[1]: offset=-0.022171, delay=0.000463'],
['1567025050297000000', '2019-08-28T20:44:10Z sntpc sntpc[1]: offset=-0.022327, delay=0.000527'],
[
'1567025078152000000',
'2019-08-28T20:44:38Z lifecycle-server time="2019-08-28T20:44:38.095444834Z" ' +
'level=debug msg="Name To resolve: localhost."',
},
],
},
{
labels: '{filename="/var/log/lifecycle-server.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:44:38.15225554Z',
line:
'2019-08-28T20:44:38Z lifecycle-server time="2019-08-28T20:44:38.095896074Z" ' +
],
[
'1567025078152000000',
'2019-08-28T20:44:38Z lifecycle-server time="2019-08-28T20:44:38.095896074Z" ' +
'level=debug msg="[resolver] query localhost. (A) from 172.22.0.4:53748, forwarding to udp:192.168.65.1"',
},
],
},
{
labels: '{filename="/var/log/docker.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:44:38.152271475Z',
line:
'2019-08-28T20:44:38Z docker time="2019-08-28T20:44:38.095444834Z" level=debug msg="Name To resolve: localhost."',
},
],
[
'1567025078152000000',
'2019-08-28T20:44:38Z docker time="2019-08-28T20:44:38.095444834Z" level=debug msg="Name To resolve: localhost."',
],
],
},
],
......
import { DataFrame, FieldType, parseLabels, KeyValue, CircularDataFrame } from '@grafana/data';
import { Observable } from 'rxjs';
import { webSocket } from 'rxjs/webSocket';
import { LokiLegacyStreamResponse, LokiTailResponse } from './types';
import { LokiTailResponse } from './types';
import { finalize, map } from 'rxjs/operators';
import { appendLegacyResponseToBufferedData, appendResponseToBufferedData } from './result_transformer';
import { appendResponseToBufferedData } from './result_transformer';
/**
* Maps directly to a query in the UI (refId is key)
*/
export interface LegacyTarget {
export interface LokiLiveTarget {
query: string;
regexp: string;
url: string;
refId: string;
size: number;
}
export interface LiveTarget {
query: string;
delay_for?: string;
limit?: string;
start?: string;
}
/**
* Cache of websocket streams that can be returned as observable. In case there already is a stream for particular
* target it is returned and on subscription returns the latest dataFrame.
......@@ -30,35 +22,7 @@ export interface LiveTarget {
export class LiveStreams {
private streams: KeyValue<Observable<DataFrame[]>> = {};
getLegacyStream(target: LegacyTarget): Observable<DataFrame[]> {
let stream = this.streams[target.url];
if (stream) {
return stream;
}
const data = new CircularDataFrame({ capacity: target.size });
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
data.addField({ name: 'line', type: FieldType.string }).labels = parseLabels(target.query);
data.addField({ name: 'labels', type: FieldType.other }); // The labels for each line
data.addField({ name: 'id', type: FieldType.string });
stream = webSocket(target.url).pipe(
finalize(() => {
delete this.streams[target.url];
}),
map((response: LokiLegacyStreamResponse) => {
appendLegacyResponseToBufferedData(response, data);
return [data];
})
);
this.streams[target.url] = stream;
return stream;
}
getStream(target: LegacyTarget): Observable<DataFrame[]> {
getStream(target: LokiLiveTarget): Observable<DataFrame[]> {
let stream = this.streams[target.url];
if (stream) {
......
import { LokiDatasource, LOKI_ENDPOINT, LEGACY_LOKI_ENDPOINT } from './datasource';
import { LokiDatasource, LOKI_ENDPOINT } from './datasource';
import { DataSourceSettings } from '@grafana/data';
import { LokiOptions } from './types';
import { createDatasourceSettings } from '../../../features/datasources/mocks';
......@@ -16,25 +16,20 @@ interface SeriesForSelector {
}
export function makeMockLokiDatasource(labelsAndValues: Labels, series?: SeriesForSelector): LokiDatasource {
const legacyLokiLabelsAndValuesEndpointRegex = /^\/api\/prom\/label\/(\w*)\/values/;
const lokiLabelsAndValuesEndpointRegex = /^\/loki\/api\/v1\/label\/(\w*)\/values/;
const lokiSeriesEndpointRegex = /^\/loki\/api\/v1\/series/;
const legacyLokiLabelsEndpoint = `${LEGACY_LOKI_ENDPOINT}/label`;
const lokiLabelsEndpoint = `${LOKI_ENDPOINT}/label`;
const labels = Object.keys(labelsAndValues);
return {
metadataRequest: (url: string, params?: { [key: string]: string }) => {
if (url === legacyLokiLabelsEndpoint || url === lokiLabelsEndpoint) {
if (url === lokiLabelsEndpoint) {
return labels;
} else {
const legacyLabelsMatch = url.match(legacyLokiLabelsAndValuesEndpointRegex);
const labelsMatch = url.match(lokiLabelsAndValuesEndpointRegex);
const seriesMatch = url.match(lokiSeriesEndpointRegex);
if (legacyLabelsMatch) {
return labelsAndValues[legacyLabelsMatch[1]] || [];
} else if (labelsMatch) {
if (labelsMatch) {
return labelsAndValues[labelsMatch[1]] || [];
} else if (seriesMatch) {
return series[params.match] || [];
......
import { CircularDataFrame, FieldCache, FieldType, MutableDataFrame } from '@grafana/data';
import { LokiLegacyStreamResult, LokiStreamResult, LokiTailResponse } from './types';
import { LokiStreamResult, LokiTailResponse } from './types';
import * as ResultTransformer from './result_transformer';
import { enhanceDataFrame } from './result_transformer';
const legacyStreamResult: LokiLegacyStreamResult[] = [
{
labels: '{foo="bar"}',
entries: [
{
line: "foo: 'bar'",
ts: '1970-01-01T00:00:00Z',
},
],
},
{
labels: '{bar="foo"}',
entries: [
{
line: "bar: 'foo'",
ts: '1970-01-01T00:00:00Z',
},
],
},
];
const streamResult: LokiStreamResult[] = [
{
stream: {
......@@ -48,48 +27,6 @@ describe('loki result transformer', () => {
jest.clearAllMocks();
});
describe('legacyLogStreamToDataFrame', () => {
it('converts streams to series', () => {
const data = legacyStreamResult.map(stream => ResultTransformer.legacyLogStreamToDataFrame(stream));
expect(data.length).toBe(2);
expect(data[0].fields[1].labels!['foo']).toEqual('bar');
expect(data[0].fields[0].values.get(0)).toEqual(legacyStreamResult[0].entries[0].ts);
expect(data[0].fields[1].values.get(0)).toEqual(legacyStreamResult[0].entries[0].line);
expect(data[0].fields[2].values.get(0)).toEqual('2764544e18dbc3fcbeee21a573e8cd1b');
expect(data[1].fields[0].values.get(0)).toEqual(legacyStreamResult[1].entries[0].ts);
expect(data[1].fields[1].values.get(0)).toEqual(legacyStreamResult[1].entries[0].line);
expect(data[1].fields[2].values.get(0)).toEqual('55b7a68547c4c1c88827f13f3cb680ed');
});
});
describe('lokiLegacyStreamsToDataframes', () => {
it('should enhance data frames', () => {
jest.spyOn(ResultTransformer, 'enhanceDataFrame');
const dataFrames = ResultTransformer.lokiLegacyStreamsToDataframes(
{ streams: legacyStreamResult },
{ refId: 'A' },
500,
{
derivedFields: [
{
matcherRegex: 'tracer=(w+)',
name: 'test',
url: 'example.com',
},
],
}
);
expect(ResultTransformer.enhanceDataFrame).toBeCalled();
dataFrames.forEach(frame => {
expect(
frame.fields.filter(field => field.name === 'test' && field.type === 'string').length
).toBeGreaterThanOrEqual(1);
});
});
});
describe('lokiStreamResultToDataFrame', () => {
it('converts streams to series', () => {
const data = streamResult.map(stream => ResultTransformer.lokiStreamResultToDataFrame(stream));
......@@ -128,22 +65,6 @@ describe('loki result transformer', () => {
});
describe('appendResponseToBufferedData', () => {
it('should append response', () => {
const data = new MutableDataFrame();
data.addField({ name: 'ts', type: FieldType.time, config: { title: 'Time' } });
data.addField({ name: 'line', type: FieldType.string });
data.addField({ name: 'labels', type: FieldType.other });
data.addField({ name: 'id', type: FieldType.string });
ResultTransformer.appendLegacyResponseToBufferedData({ streams: legacyStreamResult }, data);
expect(data.get(0)).toEqual({
ts: '1970-01-01T00:00:00Z',
line: "foo: 'bar'",
labels: { foo: 'bar' },
id: '2764544e18dbc3fcbeee21a573e8cd1b',
});
});
it('should return a dataframe with ts in iso format', () => {
const tailResponse: LokiTailResponse = {
streams: [
......
import _ from 'lodash';
import md5 from 'md5';
import { of } from 'rxjs';
import {
parseLabels,
FieldType,
TimeSeries,
Labels,
......@@ -12,18 +12,17 @@ import {
findUniqueLabels,
FieldConfig,
DataFrameView,
dateTime,
} from '@grafana/data';
import templateSrv from 'app/features/templating/template_srv';
import TableModel from 'app/core/table_model';
import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
import {
LokiLegacyStreamResult,
LokiRangeQueryRequest,
LokiResponse,
LokiMatrixResult,
LokiVectorResult,
TransformerOptions,
LokiLegacyStreamResponse,
LokiResultType,
LokiStreamResult,
LokiTailResponse,
......@@ -31,41 +30,6 @@ import {
LokiOptions,
} from './types';
import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
import { of } from 'rxjs';
/**
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries and older version of Loki.
*/
export function legacyLogStreamToDataFrame(
stream: LokiLegacyStreamResult,
reverse?: boolean,
refId?: string
): DataFrame {
let labels: Labels = stream.parsedLabels;
if (!labels && stream.labels) {
labels = parseLabels(stream.labels);
}
const times = new ArrayVector<string>([]);
const timesNs = new ArrayVector<string>([]);
const lines = new ArrayVector<string>([]);
const uids = new ArrayVector<string>([]);
for (const entry of stream.entries) {
const ts = entry.ts || entry.timestamp;
// iso string with nano precision, will be truncated but is parse-able
times.add(ts);
// So this matches new format, we are losing precision here, which sucks but no easy way to keep it and this
// is for old pre 1.0.0 version Loki so probably does not affect that much.
timesNs.add(dateTime(ts).valueOf() + '000000');
lines.add(entry.line);
uids.add(createUid(ts, stream.labels, entry.line));
}
return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
}
/**
* Transforms LokiStreamResult structure into a dataFrame. Used when doing standard queries and newer version of Loki.
*/
......@@ -131,40 +95,6 @@ function constructDataFrame(
* @param response
* @param data Needs to have ts, line, labels, id as fields
*/
export function appendLegacyResponseToBufferedData(response: LokiLegacyStreamResponse, data: MutableDataFrame) {
// Should we do anything with: response.dropped_entries?
const streams: LokiLegacyStreamResult[] = response.streams;
if (!streams || !streams.length) {
return;
}
let baseLabels: Labels = {};
for (const f of data.fields) {
if (f.type === FieldType.string) {
if (f.labels) {
baseLabels = f.labels;
}
break;
}
}
for (const stream of streams) {
// Find unique labels
const labels = parseLabels(stream.labels);
const unique = findUniqueLabels(labels, baseLabels);
// Add each line
for (const entry of stream.entries) {
const ts = entry.ts || entry.timestamp;
data.values.ts.add(ts);
data.values.line.add(entry.line);
data.values.labels.add(unique);
data.values.id.add(createUid(ts, stream.labels, entry.line));
}
}
}
export function appendResponseToBufferedData(response: LokiTailResponse, data: MutableDataFrame) {
// Should we do anything with: response.dropped_entries?
......@@ -347,38 +277,6 @@ export function lokiStreamsToDataframes(
return series;
}
export function lokiLegacyStreamsToDataframes(
data: LokiLegacyStreamResult | LokiLegacyStreamResponse,
target: { refId: string; query?: string; regexp?: string },
limit: number,
config: LokiOptions,
reverse = false
): DataFrame[] {
if (Object.keys(data).length === 0) {
return [];
}
if (isLokiLogsStream(data)) {
return [legacyLogStreamToDataFrame(data, false, target.refId)];
}
const series: DataFrame[] = data.streams.map(stream => {
const dataFrame = legacyLogStreamToDataFrame(stream, reverse);
enhanceDataFrame(dataFrame, config);
return {
...dataFrame,
refId: target.refId,
meta: {
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)),
limit,
},
};
});
return series;
}
/**
* Adds new fields and DataLinks to DataFrame based on DataSource instance config.
*/
......@@ -493,9 +391,3 @@ export function processRangeQueryResponse(
throw new Error(`Unknown result type "${(response.data as any).resultType}".`);
}
}
export function isLokiLogsStream(
data: LokiLegacyStreamResult | LokiLegacyStreamResponse
): data is LokiLegacyStreamResult {
return !data.hasOwnProperty('streams');
}
import { Labels, DataQuery, DataSourceJsonData } from '@grafana/data';
export interface LokiLegacyQueryRequest {
query: string;
limit?: number;
start?: number;
end?: number;
direction?: 'BACKWARD' | 'FORWARD';
regexp?: string;
refId: string;
}
import { DataQuery, DataSourceJsonData } from '@grafana/data';
export interface LokiInstantQueryRequest {
query: string;
......@@ -89,18 +78,6 @@ export interface LokiStreamResponse {
};
}
export interface LokiLegacyStreamResult {
labels: string;
entries: LokiLogsStreamEntry[];
search?: string;
parsedLabels?: Labels;
uniqueLabels?: Labels;
}
export interface LokiLegacyStreamResponse {
streams: LokiLegacyStreamResult[];
}
export interface LokiTailResponse {
streams: LokiStreamResult[];
dropped_entries?: Array<{
......@@ -109,14 +86,12 @@ export interface LokiTailResponse {
}>;
}
export type LokiResult = LokiVectorResult | LokiMatrixResult | LokiStreamResult | LokiLegacyStreamResult;
export type LokiResult = LokiVectorResult | LokiMatrixResult | LokiStreamResult;
export type LokiResponse = LokiVectorResponse | LokiMatrixResponse | LokiStreamResponse;
export interface LokiLogsStreamEntry {
line: string;
ts: string;
// Legacy, was renamed to ts
timestamp?: string;
}
export interface LokiExpression {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment