diff --git a/CHANGELOG.md b/CHANGELOG.md index df96628f5bba..a77cadfb76e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # 6.1.0 (unreleased) -# 6.1.0-beta1 (unreleased) +# 6.1.0-beta1 (2019-03-27) ### New Features * **Prometheus**: adhoc filter support [#8253](https://github.com/grafana/grafana/issues/8253), thx [@mtanda](https://github.com/mtanda) diff --git a/conf/defaults.ini b/conf/defaults.ini index bb4157213917..ab0cc83cccd5 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -141,7 +141,7 @@ cookie_name = grafana_sess # If you use session in https only, default is false cookie_secure = false -# Session life time, default is 86400 +# Session life time, default is 86400 (means 86400 seconds or 24 hours) session_life_time = 86400 gc_interval_time = 86400 @@ -521,6 +521,16 @@ nodata_or_nullvalues = no_data # This limit will protect the server from render overloading and make sure notifications are sent out quickly concurrent_render_limit = 5 +# Default setting for alert calculation timeout. Default value is 30 +evaluation_timeout_seconds = 30 + +# Default setting for alert notification timeout. Default value is 30 +notification_timeout_seconds = 30 + +# Default setting for max attempts to sending alert notifications. Default value is 3 +max_attempts = 3 + + #################################### Explore ############################# [explore] # Enable the Explore section diff --git a/conf/sample.ini b/conf/sample.ini index 321c1120693e..8d3cc0c2a1cf 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -132,7 +132,7 @@ log_queries = # If you use session in https only, default is false ;cookie_secure = false -# Session life time, default is 86400 +# Session life time, default is 86400 (means 86400 seconds or 24 hours) ;session_life_time = 86400 #################################### Data proxy ########################### @@ -446,6 +446,16 @@ log_queries = # This limit will protect the server from render overloading and make sure notifications are sent out quickly ;concurrent_render_limit = 5 + +# Default setting for alert calculation timeout. Default value is 30 +;evaluation_timeout_seconds = 30 + +# Default setting for alert notification timeout. Default value is 30 +;notification_timeout_seconds = 30 + +# Default setting for max attempts to sending alert notifications. Default value is 3 +;max_attempts = 3 + #################################### Explore ############################# [explore] # Enable the Explore section diff --git a/docs/sources/http_api/alerting_notification_channels.md b/docs/sources/http_api/alerting_notification_channels.md index 633bd58a5c74..b8db1595aaa1 100644 --- a/docs/sources/http_api/alerting_notification_channels.md +++ b/docs/sources/http_api/alerting_notification_channels.md @@ -152,6 +152,7 @@ Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { + "uid": "new-alert-notification", // optional "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, @@ -170,7 +171,7 @@ Content-Type: application/json { "id": 1, - "uid": "cIBgcSjkk", + "uid": "new-alert-notification", "name": "new alert notification", "type": "email", "isDefault": false, @@ -198,6 +199,7 @@ Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { + "uid": "new-alert-notification", // optional "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, @@ -217,7 +219,7 @@ Content-Type: application/json { "id": 1, - "uid": "cIBgcSjkk", + "uid": "new-alert-notification", "name": "new alert notification", "type": "email", "isDefault": false, @@ -247,7 +249,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "id": 1, - "uid": "cIBgcSjkk", + "uid": "new-alert-notification", // optional "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, @@ -267,7 +269,7 @@ Content-Type: application/json { "id": 1, - "uid": "cIBgcSjkk", + "uid": "new-alert-notification", "name": "new alert notification", "type": "email", "isDefault": false, diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index a3dc0c13cf33..ae96ac44eb76 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -650,6 +650,20 @@ Alert notifications can include images, but rendering many images at the same ti This limit will protect the server from render overloading and make sure notifications are sent out quickly. Default value is `5`. + +### evaluation_timeout_seconds + +Default setting for alert calculation timeout. Default value is `30` + +### notification_timeout_seconds + +Default setting for alert notification timeout. Default value is `30` + +### max_attempts + +Default setting for max attempts to sending alert notifications. Default value is `3` + + ## [panels] ### enable_alpha diff --git a/packages/grafana-ui/src/components/Input/Input.tsx b/packages/grafana-ui/src/components/Input/Input.tsx index b9b4e186ef20..b75a8c177605 100644 --- a/packages/grafana-ui/src/components/Input/Input.tsx +++ b/packages/grafana-ui/src/components/Input/Input.tsx @@ -72,7 +72,7 @@ export class Input extends PureComponent { const inputElementProps = this.populateEventPropsWithStatus(restProps, validationEvents); return ( -
+
{error && !hideErrorMessage && {error}}
diff --git a/packages/grafana-ui/src/components/Input/__snapshots__/Input.test.tsx.snap b/packages/grafana-ui/src/components/Input/__snapshots__/Input.test.tsx.snap index a79fc828de60..453e9cb59cf0 100644 --- a/packages/grafana-ui/src/components/Input/__snapshots__/Input.test.tsx.snap +++ b/packages/grafana-ui/src/components/Input/__snapshots__/Input.test.tsx.snap @@ -1,7 +1,13 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`Input renders correctly 1`] = ` -
+
diff --git a/packages/grafana-ui/src/components/Switch/Switch.tsx b/packages/grafana-ui/src/components/Switch/Switch.tsx index 4f534c97a3ae..e2a256825192 100644 --- a/packages/grafana-ui/src/components/Switch/Switch.tsx +++ b/packages/grafana-ui/src/components/Switch/Switch.tsx @@ -1,6 +1,5 @@ import React, { PureComponent } from 'react'; import uniqueId from 'lodash/uniqueId'; -import { Input } from '@grafana/ui'; export interface Props { label: string; @@ -39,7 +38,7 @@ export class Switch extends PureComponent { diff --git a/packages/grafana-ui/src/components/ThresholdsEditor/__snapshots__/ThresholdsEditor.test.tsx.snap b/packages/grafana-ui/src/components/ThresholdsEditor/__snapshots__/ThresholdsEditor.test.tsx.snap index 91b2577dec29..bcdf81ed134c 100644 --- a/packages/grafana-ui/src/components/ThresholdsEditor/__snapshots__/ThresholdsEditor.test.tsx.snap +++ b/packages/grafana-ui/src/components/ThresholdsEditor/__snapshots__/ThresholdsEditor.test.tsx.snap @@ -467,7 +467,13 @@ exports[`Render should render with base threshold 1`] = ` type="text" value="Base" > -
+
{ + it('returns no labels on empty labels string', () => { + expect(parseLabels('')).toEqual({}); + expect(parseLabels('{}')).toEqual({}); + }); + + it('returns labels on labels string', () => { + expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: 'bar', baz: '42' }); + }); +}); + +describe('formatLabels()', () => { + it('returns no labels on empty label set', () => { + expect(formatLabels({})).toEqual(''); + expect(formatLabels({}, 'foo')).toEqual('foo'); + }); + + it('returns label string on label set', () => { + expect(formatLabels({ foo: 'bar', baz: '42' })).toEqual('{baz="42", foo="bar"}'); + }); +}); + +describe('findCommonLabels()', () => { + it('returns no common labels on empty sets', () => { + expect(findCommonLabels([{}])).toEqual({}); + expect(findCommonLabels([{}, {}])).toEqual({}); + }); + + it('returns no common labels on differing sets', () => { + expect(findCommonLabels([{ foo: 'bar' }, {}])).toEqual({}); + expect(findCommonLabels([{}, { foo: 'bar' }])).toEqual({}); + expect(findCommonLabels([{ baz: '42' }, { foo: 'bar' }])).toEqual({}); + expect(findCommonLabels([{ foo: '42', baz: 'bar' }, { foo: 'bar' }])).toEqual({}); + }); + + it('returns the single labels set as common labels', () => { + expect(findCommonLabels([{ foo: 'bar' }])).toEqual({ foo: 'bar' }); + }); +}); + +describe('findUniqueLabels()', () => { + it('returns no uncommon labels on empty sets', () => { + expect(findUniqueLabels({}, {})).toEqual({}); + }); + + it('returns all labels given no common labels', () => { + expect(findUniqueLabels({ foo: '"bar"' }, {})).toEqual({ foo: '"bar"' }); + }); + + it('returns all labels except the common labels', () => { + expect(findUniqueLabels({ foo: '"bar"', baz: '"42"' }, { foo: '"bar"' })).toEqual({ baz: '"42"' }); + }); +}); diff --git a/packages/grafana-ui/src/utils/labels.ts b/packages/grafana-ui/src/utils/labels.ts new file mode 100644 index 000000000000..c2a94a1aaa4d --- /dev/null +++ b/packages/grafana-ui/src/utils/labels.ts @@ -0,0 +1,75 @@ +import { Labels } from '../types/data'; + +/** + * Regexp to extract Prometheus-style labels + */ +const labelRegexp = /\b(\w+)(!?=~?)"([^"\n]*?)"/g; + +/** + * Returns a map of label keys to value from an input selector string. + * + * Example: `parseLabels('{job="foo", instance="bar"}) // {job: "foo", instance: "bar"}` + */ +export function parseLabels(labels: string): Labels { + const labelsByKey: Labels = {}; + labels.replace(labelRegexp, (_, key, operator, value) => { + labelsByKey[key] = value; + return ''; + }); + return labelsByKey; +} + +/** + * Returns a map labels that are common to the given label sets. + */ +export function findCommonLabels(labelsSets: Labels[]): Labels { + return labelsSets.reduce( + (acc, labels) => { + if (!labels) { + throw new Error('Need parsed labels to find common labels.'); + } + if (!acc) { + // Initial set + acc = { ...labels }; + } else { + // Remove incoming labels that are missing or not matching in value + Object.keys(labels).forEach(key => { + if (acc[key] === undefined || acc[key] !== labels[key]) { + delete acc[key]; + } + }); + // Remove common labels that are missing from incoming label set + Object.keys(acc).forEach(key => { + if (labels[key] === undefined) { + delete acc[key]; + } + }); + } + return acc; + }, + (undefined as unknown) as Labels + ); +} + +/** + * Returns a map of labels that are in `labels`, but not in `commonLabels`. + */ +export function findUniqueLabels(labels: Labels, commonLabels: Labels): Labels { + const uncommonLabels: Labels = { ...labels }; + Object.keys(commonLabels).forEach(key => { + delete uncommonLabels[key]; + }); + return uncommonLabels; +} + +/** + * Serializes the given labels to a string. + */ +export function formatLabels(labels: Labels, defaultValue = ''): string { + if (!labels || Object.keys(labels).length === 0) { + return defaultValue; + } + const labelKeys = Object.keys(labels).sort(); + const cleanSelector = labelKeys.map(key => `${key}="${labels[key]}"`).join(', '); + return ['{', cleanSelector, '}'].join(''); +} diff --git a/packages/grafana-ui/src/utils/logs.test.ts b/packages/grafana-ui/src/utils/logs.test.ts new file mode 100644 index 000000000000..51c526b7d98a --- /dev/null +++ b/packages/grafana-ui/src/utils/logs.test.ts @@ -0,0 +1,27 @@ +import { LogLevel } from '../types/logs'; +import { getLogLevel } from './logs'; + +describe('getLoglevel()', () => { + it('returns no log level on empty line', () => { + expect(getLogLevel('')).toBe(LogLevel.unknown); + }); + + it('returns no log level on when level is part of a word', () => { + expect(getLogLevel('this is information')).toBe(LogLevel.unknown); + }); + + it('returns same log level for long and short version', () => { + expect(getLogLevel('[Warn]')).toBe(LogLevel.warning); + expect(getLogLevel('[Warning]')).toBe(LogLevel.warning); + expect(getLogLevel('[Warn]')).toBe('warning'); + }); + + it('returns log level on line contains a log level', () => { + expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn); + expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn); + }); + + it('returns first log level found', () => { + expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn); + }); +}); diff --git a/packages/grafana-ui/src/utils/logs.ts b/packages/grafana-ui/src/utils/logs.ts new file mode 100644 index 000000000000..fb8c7977e2ad --- /dev/null +++ b/packages/grafana-ui/src/utils/logs.ts @@ -0,0 +1,35 @@ +import { LogLevel } from '../types/logs'; +import { SeriesData, FieldType } from '../types/data'; + +/** + * Returns the log level of a log line. + * Parse the line for level words. If no level is found, it returns `LogLevel.unknown`. + * + * Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn` + */ +export function getLogLevel(line: string): LogLevel { + if (!line) { + return LogLevel.unknown; + } + for (const key of Object.keys(LogLevel)) { + const regexp = new RegExp(`\\b${key}\\b`, 'i'); + if (regexp.test(line)) { + const level = (LogLevel as any)[key]; + if (level) { + return level; + } + } + } + return LogLevel.unknown; +} + +export function addLogLevelToSeries(series: SeriesData, lineIndex: number): SeriesData { + return { + ...series, // Keeps Tags, RefID etc + fields: [...series.fields, { name: 'LogLevel', type: FieldType.string }], + rows: series.rows.map(row => { + const line = row[lineIndex]; + return [...row, getLogLevel(line)]; + }), + }; +} diff --git a/packages/grafana-ui/src/utils/processSeriesData.test.ts b/packages/grafana-ui/src/utils/processSeriesData.test.ts index 1767a02d7289..03b1a889db25 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.test.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.test.ts @@ -1,5 +1,12 @@ -import { toSeriesData, guessFieldTypes, guessFieldTypeFromValue } from './processSeriesData'; -import { FieldType } from '../types/data'; +import { + isSeriesData, + toLegacyResponseData, + isTableData, + toSeriesData, + guessFieldTypes, + guessFieldTypeFromValue, +} from './processSeriesData'; +import { FieldType, TimeSeries } from '../types/data'; import moment from 'moment'; describe('toSeriesData', () => { @@ -63,3 +70,33 @@ describe('toSeriesData', () => { expect(norm.fields[3].type).toBe(FieldType.time); // based on name }); }); + +describe('SerisData backwards compatibility', () => { + it('converts TimeSeries to series and back again', () => { + const timeseries = { + target: 'Field Name', + datapoints: [[100, 1], [200, 2]], + }; + const series = toSeriesData(timeseries); + expect(isSeriesData(timeseries)).toBeFalsy(); + expect(isSeriesData(series)).toBeTruthy(); + + const roundtrip = toLegacyResponseData(series) as TimeSeries; + expect(isSeriesData(roundtrip)).toBeFalsy(); + expect(roundtrip.target).toBe(timeseries.target); + }); + + it('converts TableData to series and back again', () => { + const table = { + columns: [{ text: 'a', unit: 'ms' }, { text: 'b', unit: 'zz' }, { text: 'c', unit: 'yy' }], + rows: [[100, 1, 'a'], [200, 2, 'a']], + }; + const series = toSeriesData(table); + expect(isTableData(table)).toBeTruthy(); + expect(isSeriesData(series)).toBeTruthy(); + + const roundtrip = toLegacyResponseData(series) as TimeSeries; + expect(isTableData(roundtrip)).toBeTruthy(); + expect(roundtrip).toMatchObject(table); + }); +}); diff --git a/packages/grafana-ui/src/utils/processSeriesData.ts b/packages/grafana-ui/src/utils/processSeriesData.ts index e0cd065d3868..d573947a8609 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.ts @@ -35,6 +35,7 @@ function convertTimeSeriesToSeriesData(timeSeries: TimeSeries): SeriesData { }, ], rows: timeSeries.datapoints, + labels: timeSeries.tags, }; } @@ -88,7 +89,7 @@ export function guessFieldTypeFromValue(v: any): FieldType { /** * Looks at the data to guess the column type. This ignores any existing setting */ -function guessFieldTypeFromTable(series: SeriesData, index: number): FieldType | undefined { +export function guessFieldTypeFromSeries(series: SeriesData, index: number): FieldType | undefined { const column = series.fields[index]; // 1. Use the column name to guess @@ -128,7 +129,7 @@ export const guessFieldTypes = (series: SeriesData): SeriesData => { // Replace it with a calculated version return { ...field, - type: guessFieldTypeFromTable(series, index), + type: guessFieldTypeFromSeries(series, index), }; }), }; @@ -157,6 +158,32 @@ export const toSeriesData = (data: any): SeriesData => { throw new Error('Unsupported data format'); }; +export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData => { + const { fields, rows } = series; + + if (fields.length === 2) { + const type = guessFieldTypeFromSeries(series, 1); + if (type === FieldType.time) { + return { + target: fields[0].name || series.name, + datapoints: rows, + unit: fields[0].unit, + } as TimeSeries; + } + } + + return { + columns: fields.map(f => { + return { + text: f.name, + filterable: f.filterable, + unit: f.unit, + }; + }), + rows, + }; +}; + export function sortSeriesData(data: SeriesData, sortIndex?: number, reverse = false): SeriesData { if (isNumber(sortIndex)) { const copy = { diff --git a/pkg/models/alert_notifications.go b/pkg/models/alert_notifications.go index 3b8f071c75b5..1d445b5eb72e 100644 --- a/pkg/models/alert_notifications.go +++ b/pkg/models/alert_notifications.go @@ -54,6 +54,7 @@ type CreateAlertNotificationCommand struct { type UpdateAlertNotificationCommand struct { Id int64 `json:"id" binding:"Required"` + Uid string `json:"uid"` Name string `json:"name" binding:"Required"` Type string `json:"type" binding:"Required"` SendReminder bool `json:"sendReminder"` @@ -68,6 +69,7 @@ type UpdateAlertNotificationCommand struct { type UpdateAlertNotificationWithUidCommand struct { Uid string `json:"-"` + NewUid string `json:"uid"` Name string `json:"name" binding:"Required"` Type string `json:"type" binding:"Required"` SendReminder bool `json:"sendReminder"` diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index 22cbe2456b70..db8079583878 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -104,10 +104,6 @@ func (e *AlertingService) runJobDispatcher(grafanaCtx context.Context) error { var ( unfinishedWorkTimeout = time.Second * 5 - // TODO: Make alertTimeout and alertMaxAttempts configurable in the config file. - alertTimeout = time.Second * 30 - resultHandleTimeout = time.Second * 30 - alertMaxAttempts = 3 ) func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *Job) error { @@ -117,7 +113,7 @@ func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *J } }() - cancelChan := make(chan context.CancelFunc, alertMaxAttempts*2) + cancelChan := make(chan context.CancelFunc, setting.AlertingMaxAttempts*2) attemptChan := make(chan int, 1) // Initialize with first attemptID=1 @@ -161,7 +157,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel } }() - alertCtx, cancelFn := context.WithTimeout(context.Background(), alertTimeout) + alertCtx, cancelFn := context.WithTimeout(context.Background(), setting.AlertingEvaluationTimeout) cancelChan <- cancelFn span := opentracing.StartSpan("alert execution") alertCtx = opentracing.ContextWithSpan(alertCtx, span) @@ -197,7 +193,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel tlog.Error(evalContext.Error), tlog.String("message", "alerting execution attempt failed"), ) - if attemptID < alertMaxAttempts { + if attemptID < setting.AlertingMaxAttempts { span.Finish() e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) attemptChan <- (attemptID + 1) @@ -206,7 +202,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel } // create new context with timeout for notifications - resultHandleCtx, resultHandleCancelFn := context.WithTimeout(context.Background(), resultHandleTimeout) + resultHandleCtx, resultHandleCancelFn := context.WithTimeout(context.Background(), setting.AlertingNotificationTimeout) cancelChan <- resultHandleCancelFn // override the context used for evaluation with a new context for notifications. diff --git a/pkg/services/alerting/engine_integration_test.go b/pkg/services/alerting/engine_integration_test.go index aa518baae247..3d54bdc3b4af 100644 --- a/pkg/services/alerting/engine_integration_test.go +++ b/pkg/services/alerting/engine_integration_test.go @@ -11,20 +11,22 @@ import ( "testing" "time" + "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" ) func TestEngineTimeouts(t *testing.T) { Convey("Alerting engine timeout tests", t, func() { engine := NewEngine() + setting.AlertingNotificationTimeout = 30 * time.Second + setting.AlertingMaxAttempts = 3 engine.resultHandler = &FakeResultHandler{} job := &Job{Running: true, Rule: &Rule{}} Convey("Should trigger as many retries as needed", func() { Convey("pended alert for datasource -> result handler should be worked", func() { // reduce alert timeout to test quickly - originAlertTimeout := alertTimeout - alertTimeout = 2 * time.Second + setting.AlertingEvaluationTimeout = 30 * time.Second transportTimeoutInterval := 2 * time.Second serverBusySleepDuration := 1 * time.Second @@ -39,7 +41,7 @@ func TestEngineTimeouts(t *testing.T) { So(resultHandler.ResultHandleSucceed, ShouldEqual, true) // initialize for other tests. - alertTimeout = originAlertTimeout + setting.AlertingEvaluationTimeout = 2 * time.Second engine.resultHandler = &FakeResultHandler{} }) }) diff --git a/pkg/services/alerting/engine_test.go b/pkg/services/alerting/engine_test.go index 63108bbb9aae..2e2ed0c9b163 100644 --- a/pkg/services/alerting/engine_test.go +++ b/pkg/services/alerting/engine_test.go @@ -6,7 +6,9 @@ import ( "math" "testing" + "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" + "time" ) type FakeEvalHandler struct { @@ -37,6 +39,9 @@ func (handler *FakeResultHandler) Handle(evalContext *EvalContext) error { func TestEngineProcessJob(t *testing.T) { Convey("Alerting engine job processing", t, func() { engine := NewEngine() + setting.AlertingEvaluationTimeout = 30 * time.Second + setting.AlertingNotificationTimeout = 30 * time.Second + setting.AlertingMaxAttempts = 3 engine.resultHandler = &FakeResultHandler{} job := &Job{Running: true, Rule: &Rule{}} @@ -45,9 +50,9 @@ func TestEngineProcessJob(t *testing.T) { Convey("error + not last attempt -> retry", func() { engine.evalHandler = NewFakeEvalHandler(0) - for i := 1; i < alertMaxAttempts; i++ { + for i := 1; i < setting.AlertingMaxAttempts; i++ { attemptChan := make(chan int, 1) - cancelChan := make(chan context.CancelFunc, alertMaxAttempts) + cancelChan := make(chan context.CancelFunc, setting.AlertingMaxAttempts) engine.processJob(i, attemptChan, cancelChan, job) nextAttemptID, more := <-attemptChan @@ -61,9 +66,9 @@ func TestEngineProcessJob(t *testing.T) { Convey("error + last attempt -> no retry", func() { engine.evalHandler = NewFakeEvalHandler(0) attemptChan := make(chan int, 1) - cancelChan := make(chan context.CancelFunc, alertMaxAttempts) + cancelChan := make(chan context.CancelFunc, setting.AlertingMaxAttempts) - engine.processJob(alertMaxAttempts, attemptChan, cancelChan, job) + engine.processJob(setting.AlertingMaxAttempts, attemptChan, cancelChan, job) nextAttemptID, more := <-attemptChan So(nextAttemptID, ShouldEqual, 0) @@ -74,7 +79,7 @@ func TestEngineProcessJob(t *testing.T) { Convey("no error -> no retry", func() { engine.evalHandler = NewFakeEvalHandler(1) attemptChan := make(chan int, 1) - cancelChan := make(chan context.CancelFunc, alertMaxAttempts) + cancelChan := make(chan context.CancelFunc, setting.AlertingMaxAttempts) engine.processJob(1, attemptChan, cancelChan, job) nextAttemptID, more := <-attemptChan @@ -88,7 +93,7 @@ func TestEngineProcessJob(t *testing.T) { Convey("Should trigger as many retries as needed", func() { Convey("never success -> max retries number", func() { - expectedAttempts := alertMaxAttempts + expectedAttempts := setting.AlertingMaxAttempts evalHandler := NewFakeEvalHandler(0) engine.evalHandler = evalHandler @@ -106,7 +111,7 @@ func TestEngineProcessJob(t *testing.T) { }) Convey("some errors before success -> some retries", func() { - expectedAttempts := int(math.Ceil(float64(alertMaxAttempts) / 2)) + expectedAttempts := int(math.Ceil(float64(setting.AlertingMaxAttempts) / 2)) evalHandler := NewFakeEvalHandler(expectedAttempts) engine.evalHandler = evalHandler diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index 1a717ae2b542..1a0a910a5d32 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -127,7 +127,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { renderOpts := rendering.Opts{ Width: 1000, Height: 500, - Timeout: time.Duration(float64(alertTimeout) * 0.9), + Timeout: time.Duration(setting.AlertingEvaluationTimeout.Seconds() * 0.9), OrgId: context.Rule.OrgId, OrgRole: m.ROLE_ADMIN, ConcurrentLimit: setting.AlertingRenderLimit, diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go index b90e3c8d20b5..7c3764191616 100644 --- a/pkg/services/sqlstore/alert_notification.go +++ b/pkg/services/sqlstore/alert_notification.go @@ -317,6 +317,10 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error { current.SendReminder = cmd.SendReminder current.DisableResolveMessage = cmd.DisableResolveMessage + if cmd.Uid != "" { + current.Uid = cmd.Uid + } + if current.SendReminder { if cmd.Frequency == "" { return m.ErrNotificationFrequencyNotFound @@ -356,8 +360,13 @@ func UpdateAlertNotificationWithUid(cmd *m.UpdateAlertNotificationWithUidCommand return fmt.Errorf("Cannot update, alert notification uid %s doesn't exist", cmd.Uid) } + if cmd.NewUid == "" { + cmd.NewUid = cmd.Uid + } + updateNotification := &m.UpdateAlertNotificationCommand{ Id: current.Id, + Uid: cmd.NewUid, Name: cmd.Name, Type: cmd.Type, SendReminder: cmd.SendReminder, diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 8c6d8c54f111..c97b85d68e5f 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -179,6 +179,10 @@ var ( AlertingErrorOrTimeout string AlertingNoDataOrNullValues string + AlertingEvaluationTimeout time.Duration + AlertingNotificationTimeout time.Duration + AlertingMaxAttempts int + // Explore UI ExploreEnabled bool @@ -760,6 +764,10 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting") AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data") + AlertingEvaluationTimeout = alerting.Key("evaluation_timeout_seconds").MustDuration(time.Second * 30) + AlertingNotificationTimeout = alerting.Key("notification_timeout_seconds").MustDuration(time.Second * 30) + AlertingMaxAttempts = alerting.Key("max_attempts").MustInt(3) + explore := iniFile.Section("explore") ExploreEnabled = explore.Key("enabled").MustBool(true) diff --git a/public/app/core/logs_model.ts b/public/app/core/logs_model.ts index 115ef3ee8739..9d1ca2f44b79 100644 --- a/public/app/core/logs_model.ts +++ b/public/app/core/logs_model.ts @@ -1,30 +1,8 @@ import _ from 'lodash'; -import { colors, TimeSeries } from '@grafana/ui'; +import { colors, TimeSeries, Labels, LogLevel } from '@grafana/ui'; import { getThemeColor } from 'app/core/utils/colors'; -/** - * Mapping of log level abbreviation to canonical log level. - * Supported levels are reduce to limit color variation. - */ -export enum LogLevel { - emerg = 'critical', - alert = 'critical', - crit = 'critical', - critical = 'critical', - warn = 'warning', - warning = 'warning', - err = 'error', - eror = 'error', - error = 'error', - info = 'info', - notice = 'info', - dbug = 'debug', - debug = 'debug', - trace = 'trace', - unknown = 'unknown', -} - export const LogLevelColor = { [LogLevel.critical]: colors[7], [LogLevel.warning]: colors[1], @@ -46,7 +24,7 @@ export interface LogRowModel { entry: string; hasAnsi: boolean; key: string; // timestamp + labels - labels: LogsStreamLabels; + labels: Labels; logLevel: LogLevel; raw: string; searchWords?: string[]; @@ -54,7 +32,7 @@ export interface LogRowModel { timeFromNow: string; timeEpochMs: number; timeLocal: string; - uniqueLabels?: LogsStreamLabels; + uniqueLabels?: Labels; } export interface LogLabelStatsModel { @@ -72,7 +50,7 @@ export enum LogsMetaKind { export interface LogsMetaItem { label: string; - value: string | number | LogsStreamLabels; + value: string | number | Labels; kind: LogsMetaKind; } @@ -88,8 +66,8 @@ export interface LogsStream { labels: string; entries: LogsStreamEntry[]; search?: string; - parsedLabels?: LogsStreamLabels; - uniqueLabels?: LogsStreamLabels; + parsedLabels?: Labels; + uniqueLabels?: Labels; } export interface LogsStreamEntry { @@ -99,10 +77,6 @@ export interface LogsStreamEntry { timestamp?: string; } -export interface LogsStreamLabels { - [key: string]: string; -} - export enum LogsDedupDescription { none = 'No de-duplication', exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.', diff --git a/public/app/features/dashboard/state/PanelModel.ts b/public/app/features/dashboard/state/PanelModel.ts index 9f54f8e98ee5..41ef56e0e016 100644 --- a/public/app/features/dashboard/state/PanelModel.ts +++ b/public/app/features/dashboard/state/PanelModel.ts @@ -6,7 +6,7 @@ import { Emitter } from 'app/core/utils/emitter'; import { getNextRefIdChar } from 'app/core/utils/query'; // Types -import { DataQuery, TimeSeries, Threshold, ScopedVars, TableData } from '@grafana/ui'; +import { DataQuery, Threshold, ScopedVars, DataQueryResponseData } from '@grafana/ui'; import { PanelPlugin } from 'app/types'; import config from 'app/core/config'; @@ -97,7 +97,7 @@ export class PanelModel { thresholds?: any; pluginVersion?: string; - snapshotData?: TimeSeries[] | [TableData]; + snapshotData?: DataQueryResponseData[]; timeFrom?: any; timeShift?: any; hideTimeOverride?: any; diff --git a/public/app/features/explore/LogLabels.tsx b/public/app/features/explore/LogLabels.tsx index 24d6e1ec23c6..f89836055c53 100644 --- a/public/app/features/explore/LogLabels.tsx +++ b/public/app/features/explore/LogLabels.tsx @@ -1,11 +1,12 @@ import React, { PureComponent } from 'react'; -import { LogsStreamLabels, LogRowModel } from 'app/core/logs_model'; +import { LogRowModel } from 'app/core/logs_model'; import { LogLabel } from './LogLabel'; +import { Labels } from '@grafana/ui'; interface Props { getRows?: () => LogRowModel[]; - labels: LogsStreamLabels; + labels: Labels; plain?: boolean; onClickLabel?: (label: string, value: string) => void; } diff --git a/public/app/features/explore/Logs.tsx b/public/app/features/explore/Logs.tsx index e5a0f7629040..486af10ff91b 100644 --- a/public/app/features/explore/Logs.tsx +++ b/public/app/features/explore/Logs.tsx @@ -2,10 +2,10 @@ import _ from 'lodash'; import React, { PureComponent } from 'react'; import * as rangeUtil from 'app/core/utils/rangeutil'; -import { RawTimeRange, Switch } from '@grafana/ui'; +import { RawTimeRange, Switch, LogLevel } from '@grafana/ui'; import TimeSeries from 'app/core/time_series2'; -import { LogsDedupDescription, LogsDedupStrategy, LogsModel, LogLevel, LogsMetaKind } from 'app/core/logs_model'; +import { LogsDedupDescription, LogsDedupStrategy, LogsModel, LogsMetaKind } from 'app/core/logs_model'; import ToggleButtonGroup, { ToggleButton } from 'app/core/components/ToggleButtonGroup/ToggleButtonGroup'; diff --git a/public/app/features/explore/LogsContainer.tsx b/public/app/features/explore/LogsContainer.tsx index 6c2c5cd96e2a..bb4833f42009 100644 --- a/public/app/features/explore/LogsContainer.tsx +++ b/public/app/features/explore/LogsContainer.tsx @@ -1,10 +1,10 @@ import React, { PureComponent } from 'react'; import { hot } from 'react-hot-loader'; import { connect } from 'react-redux'; -import { RawTimeRange, TimeRange } from '@grafana/ui'; +import { RawTimeRange, TimeRange, LogLevel } from '@grafana/ui'; import { ExploreId, ExploreItemState } from 'app/types/explore'; -import { LogsModel, LogsDedupStrategy, LogLevel } from 'app/core/logs_model'; +import { LogsModel, LogsDedupStrategy } from 'app/core/logs_model'; import { StoreState } from 'app/types'; import { toggleLogs, changeDedupStrategy } from './state/actions'; diff --git a/public/app/features/explore/state/actionTypes.ts b/public/app/features/explore/state/actionTypes.ts index 360e509aab46..e6c606f7117a 100644 --- a/public/app/features/explore/state/actionTypes.ts +++ b/public/app/features/explore/state/actionTypes.ts @@ -7,6 +7,7 @@ import { DataSourceSelectItem, DataSourceApi, QueryFixAction, + LogLevel, } from '@grafana/ui/src/types'; import { ExploreId, @@ -18,7 +19,6 @@ import { ExploreUIState, } from 'app/types/explore'; import { actionCreatorFactory, noPayloadActionCreatorFactory, ActionOf } from 'app/core/redux/actionCreatorFactory'; -import { LogLevel } from 'app/core/logs_model'; /** Higher order actions * diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 3e217369b158..e596d4d3856e 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -6,6 +6,7 @@ import { PanelCtrl } from 'app/features/panel/panel_ctrl'; import { getExploreUrl } from 'app/core/utils/explore'; import { applyPanelTimeOverrides, getResolution } from 'app/features/dashboard/utils/panel'; import { ContextSrv } from 'app/core/services/context_srv'; +import { toLegacyResponseData, isSeriesData } from '@grafana/ui'; class MetricsPanelCtrl extends PanelCtrl { scope: any; @@ -188,7 +189,14 @@ class MetricsPanelCtrl extends PanelCtrl { result = { data: [] }; } - this.events.emit('data-received', result.data); + // Make sure the data is TableData | TimeSeries + const data = result.data.map(v => { + if (isSeriesData(v)) { + return toLegacyResponseData(v); + } + return v; + }); + this.events.emit('data-received', data); } handleDataStream(stream) { diff --git a/public/app/features/templating/DefaultVariableQueryEditor.tsx b/public/app/features/templating/DefaultVariableQueryEditor.tsx index f1326799115f..7e69b3b8a00d 100644 --- a/public/app/features/templating/DefaultVariableQueryEditor.tsx +++ b/public/app/features/templating/DefaultVariableQueryEditor.tsx @@ -8,13 +8,13 @@ export default class DefaultVariableQueryEditor extends PureComponent) => { + this.setState({ value: event.currentTarget.value }); + }; - handleBlur(event) { - this.props.onChange(event.target.value, event.target.value); - } + onBlur = (event: React.FormEvent) => { + this.props.onChange(event.currentTarget.value, event.currentTarget.value); + }; render() { return ( @@ -24,8 +24,8 @@ export default class DefaultVariableQueryEditor extends PureComponent diff --git a/public/app/plugins/datasource/elasticsearch/elastic_response.ts b/public/app/plugins/datasource/elasticsearch/elastic_response.ts index 49f33e2963a2..52fecd79e8b6 100644 --- a/public/app/plugins/datasource/elasticsearch/elastic_response.ts +++ b/public/app/plugins/datasource/elasticsearch/elastic_response.ts @@ -156,6 +156,14 @@ export class ElasticResponse { } break; } + case 'percentiles': { + const percentiles = bucket[metric.id].values; + + for (const percentileName in percentiles) { + addMetricValue(values, `p${percentileName} ${metric.field}`, percentiles[percentileName]); + } + break; + } default: { let metricName = this.getMetricName(metric.type); const otherMetrics = _.filter(target.metrics, { type: metric.type }); diff --git a/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts b/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts index bedc71a0b584..1c6bcc863323 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts @@ -582,6 +582,59 @@ describe('ElasticResponse', () => { }); }); + describe('No group by time with percentiles ', () => { + let result; + + beforeEach(() => { + targets = [ + { + refId: 'A', + metrics: [{ type: 'percentiles', field: 'value', settings: { percents: [75, 90] }, id: '1' }], + bucketAggs: [{ type: 'term', field: 'id', id: '3' }], + }, + ]; + response = { + responses: [ + { + aggregations: { + '3': { + buckets: [ + { + '1': { values: { '75': 3.3, '90': 5.5 } }, + doc_count: 10, + key: 'id1', + }, + { + '1': { values: { '75': 2.3, '90': 4.5 } }, + doc_count: 15, + key: 'id2', + }, + ], + }, + }, + }, + ], + }; + + result = new ElasticResponse(targets, response).getTimeSeries(); + }); + + it('should return table', () => { + expect(result.data.length).toBe(1); + expect(result.data[0].type).toBe('table'); + expect(result.data[0].columns[0].text).toBe('id'); + expect(result.data[0].columns[1].text).toBe('p75 value'); + expect(result.data[0].columns[2].text).toBe('p90 value'); + expect(result.data[0].rows.length).toBe(2); + expect(result.data[0].rows[0][0]).toBe('id1'); + expect(result.data[0].rows[0][1]).toBe(3.3); + expect(result.data[0].rows[0][2]).toBe(5.5); + expect(result.data[0].rows[1][0]).toBe('id2'); + expect(result.data[0].rows[1][1]).toBe(2.3); + expect(result.data[0].rows[1][2]).toBe(4.5); + }); + }); + describe('Multiple metrics of same type', () => { beforeEach(() => { targets = [ diff --git a/public/app/plugins/datasource/graphite/graphite_query.ts b/public/app/plugins/datasource/graphite/graphite_query.ts index adbcde69ad75..fb3cb69f6ceb 100644 --- a/public/app/plugins/datasource/graphite/graphite_query.ts +++ b/public/app/plugins/datasource/graphite/graphite_query.ts @@ -19,6 +19,7 @@ export default class GraphiteQuery { this.datasource = datasource; this.target = target; this.templateSrv = templateSrv; + this.scopedVars = scopedVars; this.parseTarget(); this.removeTagValue = '-- remove tag --'; @@ -162,7 +163,9 @@ export default class GraphiteQuery { updateModelTarget(targets) { const wrapFunction = (target: string, func: any) => { - return func.render(target, this.templateSrv.replace); + return func.render(target, (value: string) => { + return this.templateSrv.replace(value, this.scopedVars); + }); }; if (!this.target.textEditor) { diff --git a/public/app/plugins/datasource/loki/result_transformer.test.ts b/public/app/plugins/datasource/loki/result_transformer.test.ts index bf4331620034..7f9c9186ea64 100644 --- a/public/app/plugins/datasource/loki/result_transformer.test.ts +++ b/public/app/plugins/datasource/loki/result_transformer.test.ts @@ -1,92 +1,6 @@ -import { LogLevel, LogsStream } from 'app/core/logs_model'; +import { LogsStream } from 'app/core/logs_model'; -import { - findCommonLabels, - findUniqueLabels, - formatLabels, - getLogLevel, - mergeStreamsToLogs, - parseLabels, -} from './result_transformer'; - -describe('getLoglevel()', () => { - it('returns no log level on empty line', () => { - expect(getLogLevel('')).toBe(LogLevel.unknown); - }); - - it('returns no log level on when level is part of a word', () => { - expect(getLogLevel('this is information')).toBe(LogLevel.unknown); - }); - - it('returns same log level for long and short version', () => { - expect(getLogLevel('[Warn]')).toBe(LogLevel.warning); - expect(getLogLevel('[Warning]')).toBe(LogLevel.warning); - expect(getLogLevel('[Warn]')).toBe('warning'); - }); - - it('returns log level on line contains a log level', () => { - expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn); - expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn); - }); - - it('returns first log level found', () => { - expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn); - }); -}); - -describe('parseLabels()', () => { - it('returns no labels on empty labels string', () => { - expect(parseLabels('')).toEqual({}); - expect(parseLabels('{}')).toEqual({}); - }); - - it('returns labels on labels string', () => { - expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: 'bar', baz: '42' }); - }); -}); - -describe('formatLabels()', () => { - it('returns no labels on empty label set', () => { - expect(formatLabels({})).toEqual(''); - expect(formatLabels({}, 'foo')).toEqual('foo'); - }); - - it('returns label string on label set', () => { - expect(formatLabels({ foo: 'bar', baz: '42' })).toEqual('{baz="42", foo="bar"}'); - }); -}); - -describe('findCommonLabels()', () => { - it('returns no common labels on empty sets', () => { - expect(findCommonLabels([{}])).toEqual({}); - expect(findCommonLabels([{}, {}])).toEqual({}); - }); - - it('returns no common labels on differing sets', () => { - expect(findCommonLabels([{ foo: 'bar' }, {}])).toEqual({}); - expect(findCommonLabels([{}, { foo: 'bar' }])).toEqual({}); - expect(findCommonLabels([{ baz: '42' }, { foo: 'bar' }])).toEqual({}); - expect(findCommonLabels([{ foo: '42', baz: 'bar' }, { foo: 'bar' }])).toEqual({}); - }); - - it('returns the single labels set as common labels', () => { - expect(findCommonLabels([{ foo: 'bar' }])).toEqual({ foo: 'bar' }); - }); -}); - -describe('findUniqueLabels()', () => { - it('returns no uncommon labels on empty sets', () => { - expect(findUniqueLabels({}, {})).toEqual({}); - }); - - it('returns all labels given no common labels', () => { - expect(findUniqueLabels({ foo: '"bar"' }, {})).toEqual({ foo: '"bar"' }); - }); - - it('returns all labels except the common labels', () => { - expect(findUniqueLabels({ foo: '"bar"', baz: '"42"' }, { foo: '"bar"' })).toEqual({ baz: '"42"' }); - }); -}); +import { mergeStreamsToLogs, logStreamToSeriesData, seriesDataToLogStream } from './result_transformer'; describe('mergeStreamsToLogs()', () => { it('returns empty logs given no streams', () => { @@ -201,3 +115,37 @@ describe('mergeStreamsToLogs()', () => { ]); }); }); + +describe('convert SeriesData to/from LogStream', () => { + const streams = [ + { + labels: '{foo="bar"}', + entries: [ + { + line: "foo: 'bar'", + ts: '1970-01-01T00:00:00Z', + }, + ], + }, + { + labels: '{bar="foo"}', + entries: [ + { + line: "bar: 'foo'", + ts: '1970-01-01T00:00:00Z', + }, + ], + }, + ]; + it('converts streams to series', () => { + const data = streams.map(stream => logStreamToSeriesData(stream)); + + expect(data.length).toBe(2); + expect(data[0].labels['foo']).toEqual('bar'); + expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts); + + const roundtrip = data.map(series => seriesDataToLogStream(series)); + expect(roundtrip.length).toBe(2); + expect(roundtrip[0].labels).toEqual(streams[0].labels); + }); +}); diff --git a/public/app/plugins/datasource/loki/result_transformer.ts b/public/app/plugins/datasource/loki/result_transformer.ts index c8598387de4d..4e450b51751a 100644 --- a/public/app/plugins/datasource/loki/result_transformer.ts +++ b/public/app/plugins/datasource/loki/result_transformer.ts @@ -2,120 +2,27 @@ import ansicolor from 'vendor/ansicolor/ansicolor'; import _ from 'lodash'; import moment from 'moment'; -import { - LogLevel, - LogsMetaItem, - LogsModel, - LogRowModel, - LogsStream, - LogsStreamEntry, - LogsStreamLabels, - LogsMetaKind, -} from 'app/core/logs_model'; +import { LogsMetaItem, LogsModel, LogRowModel, LogsStream, LogsStreamEntry, LogsMetaKind } from 'app/core/logs_model'; import { hasAnsiCodes } from 'app/core/utils/text'; import { DEFAULT_MAX_LINES } from './datasource'; -/** - * Returns the log level of a log line. - * Parse the line for level words. If no level is found, it returns `LogLevel.unknown`. - * - * Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn` - */ -export function getLogLevel(line: string): LogLevel { - if (!line) { - return LogLevel.unknown; - } - let level: LogLevel; - Object.keys(LogLevel).forEach(key => { - if (!level) { - const regexp = new RegExp(`\\b${key}\\b`, 'i'); - if (regexp.test(line)) { - level = LogLevel[key]; - } - } - }); - if (!level) { - level = LogLevel.unknown; - } - return level; -} - -/** - * Regexp to extract Prometheus-style labels - */ -const labelRegexp = /\b(\w+)(!?=~?)"([^"\n]*?)"/g; - -/** - * Returns a map of label keys to value from an input selector string. - * - * Example: `parseLabels('{job="foo", instance="bar"}) // {job: "foo", instance: "bar"}` - */ -export function parseLabels(labels: string): LogsStreamLabels { - const labelsByKey: LogsStreamLabels = {}; - labels.replace(labelRegexp, (_, key, operator, value) => { - labelsByKey[key] = value; - return ''; - }); - return labelsByKey; -} - -/** - * Returns a map labels that are common to the given label sets. - */ -export function findCommonLabels(labelsSets: LogsStreamLabels[]): LogsStreamLabels { - return labelsSets.reduce((acc, labels) => { - if (!labels) { - throw new Error('Need parsed labels to find common labels.'); - } - if (!acc) { - // Initial set - acc = { ...labels }; - } else { - // Remove incoming labels that are missing or not matching in value - Object.keys(labels).forEach(key => { - if (acc[key] === undefined || acc[key] !== labels[key]) { - delete acc[key]; - } - }); - // Remove common labels that are missing from incoming label set - Object.keys(acc).forEach(key => { - if (labels[key] === undefined) { - delete acc[key]; - } - }); - } - return acc; - }, undefined); -} - -/** - * Returns a map of labels that are in `labels`, but not in `commonLabels`. - */ -export function findUniqueLabels(labels: LogsStreamLabels, commonLabels: LogsStreamLabels): LogsStreamLabels { - const uncommonLabels: LogsStreamLabels = { ...labels }; - Object.keys(commonLabels).forEach(key => { - delete uncommonLabels[key]; - }); - return uncommonLabels; -} - -/** - * Serializes the given labels to a string. - */ -export function formatLabels(labels: LogsStreamLabels, defaultValue = ''): string { - if (!labels || Object.keys(labels).length === 0) { - return defaultValue; - } - const labelKeys = Object.keys(labels).sort(); - const cleanSelector = labelKeys.map(key => `${key}="${labels[key]}"`).join(', '); - return ['{', cleanSelector, '}'].join(''); -} +import { + parseLabels, + SeriesData, + findUniqueLabels, + Labels, + findCommonLabels, + getLogLevel, + FieldType, + formatLabels, + guessFieldTypeFromSeries, +} from '@grafana/ui'; export function processEntry( entry: LogsStreamEntry, labels: string, - parsedLabels: LogsStreamLabels, - uniqueLabels: LogsStreamLabels, + parsedLabels: Labels, + uniqueLabels: Labels, search: string ): LogRowModel { const { line } = entry; @@ -201,3 +108,48 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_MAX_LI rows: sortedRows, }; } + +export function logStreamToSeriesData(stream: LogsStream): SeriesData { + let labels: Labels = stream.parsedLabels; + if (!labels && stream.labels) { + labels = parseLabels(stream.labels); + } + return { + labels, + fields: [{ name: 'ts', type: FieldType.time }, { name: 'line', type: FieldType.string }], + rows: stream.entries.map(entry => { + return [entry.ts || entry.timestamp, entry.line]; + }), + }; +} + +export function seriesDataToLogStream(series: SeriesData): LogsStream { + let timeIndex = -1; + let lineIndex = -1; + for (let i = 0; i < series.fields.length; i++) { + const field = series.fields[i]; + const type = field.type || guessFieldTypeFromSeries(series, i); + if (timeIndex < 0 && type === FieldType.time) { + timeIndex = i; + } + if (lineIndex < 0 && type === FieldType.string) { + lineIndex = i; + } + } + if (timeIndex < 0) { + throw new Error('Series does not have a time field'); + } + if (lineIndex < 0) { + throw new Error('Series does not have a line field'); + } + return { + labels: formatLabels(series.labels), + parsedLabels: series.labels, + entries: series.rows.map(row => { + return { + line: row[lineIndex], + ts: row[timeIndex], + }; + }), + }; +} diff --git a/public/app/plugins/datasource/stackdriver/components/__snapshots__/QueryEditor.test.tsx.snap b/public/app/plugins/datasource/stackdriver/components/__snapshots__/QueryEditor.test.tsx.snap index 9d573dc63659..942f45f41166 100644 --- a/public/app/plugins/datasource/stackdriver/components/__snapshots__/QueryEditor.test.tsx.snap +++ b/public/app/plugins/datasource/stackdriver/components/__snapshots__/QueryEditor.test.tsx.snap @@ -398,7 +398,13 @@ Array [ > Alias By -
+
Project -
+
{ const { showLines, showBars, showPoints } = this.props.options; const graphs: GraphSeriesXY[] = []; - for (const series of data) { - const timeColumn = getFirstTimeField(series); - if (timeColumn < 0) { - continue; - } + if (data) { + for (const series of data) { + const timeColumn = getFirstTimeField(series); + if (timeColumn < 0) { + continue; + } + + for (let i = 0; i < series.fields.length; i++) { + const field = series.fields[i]; - for (let i = 0; i < series.fields.length; i++) { - const field = series.fields[i]; - - // Show all numeric columns - if (field.type === FieldType.number) { - // Use external calculator just to make sure it works :) - const points = getFlotPairs({ - series, - xIndex: timeColumn, - yIndex: i, - nullValueMode: NullValueMode.Null, - }); - - if (points.length > 0) { - graphs.push({ - label: field.name, - data: points, - color: colors[graphs.length % colors.length], + // Show all numeric columns + if (field.type === FieldType.number) { + // Use external calculator just to make sure it works :) + const points = getFlotPairs({ + series, + xIndex: timeColumn, + yIndex: i, + nullValueMode: NullValueMode.Null, }); + + if (points.length > 0) { + graphs.push({ + label: field.name, + data: points, + color: colors[graphs.length % colors.length], + }); + } } } } } + if (graphs.length < 1) { + return ( +
+

No data found in response

+
+ ); + } + return (