['settings'];\n};\n\nexport type MetricsConfiguration = {\n [P in MetricAggregationType]: MetricConfiguration;\n};\n\nexport type BucketsConfiguration = {\n [P in BucketAggregationType]: BucketConfiguration
;\n};\n\nexport type QueryTypeConfiguration = {\n [P in ElasticsearchQueryType]: { label: string };\n};\n\nexport type FormatConfiguration = {\n [P in PPLFormatType]: { label: string };\n};\n\nexport interface ElasticsearchAggregation {\n id: string;\n type: MetricAggregationType | BucketAggregationType;\n settings?: unknown;\n field?: string;\n hide: boolean;\n}\n\nexport interface ElasticsearchQuery extends DataQuery {\n isLogsQuery?: boolean;\n alias?: string;\n query?: string;\n bucketAggs?: BucketAggregation[];\n metrics?: MetricAggregation[];\n timeField?: string;\n queryType?: ElasticsearchQueryType;\n format?: PPLFormatType;\n}\n\nexport type DataLinkConfig = {\n field: string;\n url: string;\n datasourceUid?: string;\n};\n\nexport enum ElasticsearchQueryType {\n Lucene = 'lucene',\n PPL = 'PPL',\n}\n\nexport interface PromQuery extends DataQuery {\n expr: string;\n format?: string;\n instant?: boolean;\n range?: boolean;\n hinting?: boolean;\n interval?: string;\n intervalFactor?: number;\n legendFormat?: string;\n valueWithRefId?: boolean;\n requestId?: string;\n showingGraph?: boolean;\n showingTable?: boolean;\n}\n","import {\n isMetricAggregationWithField,\n MetricAggregation,\n} from './components/QueryEditor/MetricAggregationsEditor/aggregations';\nimport { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';\n\nexport const describeMetric = (metric: MetricAggregation) => {\n if (!isMetricAggregationWithField(metric)) {\n return metricAggregationConfig[metric.type].label;\n }\n\n // TODO: field might be undefined\n return `${metricAggregationConfig[metric.type].label} ${metric.field}`;\n};\n\n/**\n * Utility function to clean up aggregations settings objects.\n * It removes nullish values and empty strings, array and objects\n * recursing over nested objects (not arrays).\n * @param obj\n */\nexport const removeEmpty = (obj: T): Partial =>\n Object.entries(obj).reduce((acc, [key, value]) => {\n // Removing nullish values (null & undefined)\n if (value == null) {\n return { ...acc };\n }\n\n // Removing empty arrays (This won't recurse the array)\n if (Array.isArray(value) && value.length === 0) {\n return { ...acc };\n }\n\n // Removing empty strings\n if (value?.length === 0) {\n return { ...acc };\n }\n\n // Recursing over nested objects\n if (!Array.isArray(value) && typeof value === 'object') {\n const cleanObj = removeEmpty(value);\n\n if (Object.keys(cleanObj).length === 0) {\n return { ...acc };\n }\n\n return { ...acc, [key]: cleanObj };\n }\n\n return {\n ...acc,\n [key]: value,\n };\n }, {});\n","import _ from 'lodash';\nimport flatten from './dependencies/flatten';\nimport * as queryDef from './query_def';\nimport TableModel from './dependencies/table_model';\nimport {\n dateTime,\n DataQueryResponse,\n DataFrame,\n toDataFrame,\n FieldType,\n MutableDataFrame,\n PreferredVisualisationType,\n} from '@grafana/data';\nimport { ElasticsearchAggregation, ElasticsearchQuery, ElasticsearchQueryType } from './types';\nimport {\n ExtendedStatMetaType,\n isMetricAggregationWithField,\n} from './components/QueryEditor/MetricAggregationsEditor/aggregations';\nimport { describeMetric } from './utils';\nimport { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';\n\nexport class ElasticResponse {\n constructor(\n private targets: ElasticsearchQuery[],\n private response: any,\n private targetType: ElasticsearchQueryType = ElasticsearchQueryType.Lucene\n ) {\n this.targets = targets;\n this.response = response;\n this.targetType = targetType;\n }\n\n processMetrics(esAgg: any, target: ElasticsearchQuery, seriesList: any, props: any) {\n let newSeries: any;\n\n for (let y = 0; y < target.metrics!.length; y++) {\n const metric = target.metrics![y];\n if (metric.hide) {\n continue;\n }\n\n switch (metric.type) {\n case 'count': {\n newSeries = { datapoints: [], metric: 'count', props, refId: target.refId };\n for (let i = 0; i < esAgg.buckets.length; i++) {\n const bucket = esAgg.buckets[i];\n const value = bucket.doc_count;\n newSeries.datapoints.push([value, bucket.key]);\n }\n seriesList.push(newSeries);\n break;\n }\n case 'percentiles': {\n if (esAgg.buckets.length === 0) {\n break;\n }\n\n const firstBucket = esAgg.buckets[0];\n const percentiles = firstBucket[metric.id].values;\n\n for (const percentileName in percentiles) {\n newSeries = {\n datapoints: [],\n metric: 'p' + percentileName,\n props: props,\n field: metric.field,\n refId: target.refId,\n };\n\n for (let i = 0; i < esAgg.buckets.length; i++) {\n const bucket = esAgg.buckets[i];\n const values = bucket[metric.id].values;\n newSeries.datapoints.push([values[percentileName], bucket.key]);\n }\n seriesList.push(newSeries);\n }\n\n break;\n }\n case 'extended_stats': {\n for (const statName in metric.meta) {\n if (!metric.meta[statName as ExtendedStatMetaType]) {\n continue;\n }\n\n newSeries = {\n datapoints: [],\n metric: statName,\n props: props,\n field: metric.field,\n refId: target.refId,\n };\n\n for (let i = 0; i < esAgg.buckets.length; i++) {\n const bucket = esAgg.buckets[i];\n const stats = bucket[metric.id];\n\n // add stats that are in nested obj to top level obj\n stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;\n stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;\n\n newSeries.datapoints.push([stats[statName], bucket.key]);\n }\n\n seriesList.push(newSeries);\n }\n\n break;\n }\n default: {\n newSeries = {\n datapoints: [],\n metric: metric.type,\n metricId: metric.id,\n props: props,\n refId: target.refId,\n };\n\n if (isMetricAggregationWithField(metric)) {\n newSeries.field = metric.field;\n }\n\n for (let i = 0; i < esAgg.buckets.length; i++) {\n const bucket = esAgg.buckets[i];\n const value = bucket[metric.id];\n\n if (value !== undefined) {\n if (value.normalized_value) {\n newSeries.datapoints.push([value.normalized_value, bucket.key]);\n } else {\n newSeries.datapoints.push([value.value, bucket.key]);\n }\n }\n }\n seriesList.push(newSeries);\n break;\n }\n }\n }\n }\n\n processAggregationDocs(\n esAgg: any,\n aggDef: ElasticsearchAggregation,\n target: ElasticsearchQuery,\n table: any,\n props: any\n ) {\n // add columns\n if (table.columns.length === 0) {\n for (const propKey of _.keys(props)) {\n table.addColumn({ text: propKey, filterable: true });\n }\n table.addColumn({ text: aggDef.field, filterable: true });\n }\n\n // helper func to add values to value array\n const addMetricValue = (values: any[], metricName: string, value: any) => {\n table.addColumn({ text: metricName });\n values.push(value);\n };\n const buckets = _.isArray(esAgg.buckets) ? esAgg.buckets : [esAgg.buckets];\n for (const bucket of buckets) {\n const values = [];\n\n for (const propValues of _.values(props)) {\n values.push(propValues);\n }\n\n // add bucket key (value)\n values.push(bucket.key);\n\n for (const metric of target.metrics || []) {\n switch (metric.type) {\n case 'count': {\n addMetricValue(values, this.getMetricName(metric.type), bucket.doc_count);\n break;\n }\n case 'extended_stats': {\n for (const statName in metric.meta) {\n if (!metric.meta[statName as ExtendedStatMetaType]) {\n continue;\n }\n\n const stats = bucket[metric.id];\n // add stats that are in nested obj to top level obj\n stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;\n stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;\n\n addMetricValue(values, this.getMetricName(statName as ExtendedStatMetaType), stats[statName]);\n }\n break;\n }\n case 'percentiles': {\n const percentiles = bucket[metric.id].values;\n\n for (const percentileName in percentiles) {\n addMetricValue(values, `p${percentileName} ${metric.field}`, percentiles[percentileName]);\n }\n break;\n }\n default: {\n let metricName = this.getMetricName(metric.type);\n const otherMetrics = _.filter(target.metrics, { type: metric.type });\n\n // if more of the same metric type include field field name in property\n if (otherMetrics.length > 1) {\n if (isMetricAggregationWithField(metric)) {\n metricName += ' ' + metric.field;\n }\n\n if (metric.type === 'bucket_script') {\n //Use the formula in the column name\n metricName = metric.settings?.script || '';\n }\n }\n\n addMetricValue(values, metricName, bucket[metric.id].value);\n break;\n }\n }\n }\n\n table.rows.push(values);\n }\n }\n\n // This is quite complex\n // need to recurse down the nested buckets to build series\n processBuckets(aggs: any, target: ElasticsearchQuery, seriesList: any, table: TableModel, props: any, depth: number) {\n let bucket, aggDef: any, esAgg, aggId;\n const maxDepth = target.bucketAggs!.length - 1;\n\n for (aggId in aggs) {\n aggDef = _.find(target.bucketAggs, { id: aggId });\n esAgg = aggs[aggId];\n\n if (!aggDef) {\n continue;\n }\n\n if (depth === maxDepth) {\n if (aggDef.type === 'date_histogram') {\n this.processMetrics(esAgg, target, seriesList, props);\n } else {\n this.processAggregationDocs(esAgg, aggDef, target, table, props);\n }\n } else {\n for (const nameIndex in esAgg.buckets) {\n bucket = esAgg.buckets[nameIndex];\n props = _.clone(props);\n if (bucket.key !== void 0) {\n props[aggDef.field] = bucket.key;\n } else {\n props['filter'] = nameIndex;\n }\n if (bucket.key_as_string) {\n props[aggDef.field] = bucket.key_as_string;\n }\n this.processBuckets(bucket, target, seriesList, table, props, depth + 1);\n }\n }\n }\n }\n\n private getMetricName(metric: string): string {\n const metricDef = Object.entries(metricAggregationConfig)\n .filter(([key]) => key === metric)\n .map(([_, value]) => value)[0];\n\n if (metricDef) {\n return metricDef.label;\n }\n\n const extendedStat = queryDef.extendedStats.find(e => e.value === metric);\n if (extendedStat) {\n return extendedStat.label;\n }\n\n return metric;\n }\n\n private getSeriesName(series: any, target: ElasticsearchQuery, metricTypeCount: any) {\n let metricName = this.getMetricName(series.metric);\n\n if (target.alias) {\n const regex = /\\{\\{([\\s\\S]+?)\\}\\}/g;\n\n return target.alias.replace(regex, (match: any, g1: any, g2: any) => {\n const group = g1 || g2;\n\n if (group.indexOf('term ') === 0) {\n return series.props[group.substring(5)];\n }\n if (series.props[group] !== void 0) {\n return series.props[group];\n }\n if (group === 'metric') {\n return metricName;\n }\n if (group === 'field') {\n return series.field || '';\n }\n\n return match;\n });\n }\n\n if (queryDef.isPipelineAgg(series.metric)) {\n if (series.metric && queryDef.isPipelineAggWithMultipleBucketPaths(series.metric)) {\n const agg: any = _.find(target.metrics, { id: series.metricId });\n if (agg && agg.settings.script) {\n metricName = agg.settings.script;\n\n for (const pv of agg.pipelineVariables) {\n const appliedAgg: any = _.find(target.metrics, { id: pv.pipelineAgg });\n if (appliedAgg) {\n metricName = metricName.replace('params.' + pv.name, describeMetric(appliedAgg));\n }\n }\n } else {\n metricName = 'Unset';\n }\n } else {\n const appliedAgg: any = _.find(target.metrics, { id: series.field });\n if (appliedAgg) {\n metricName += ' ' + describeMetric(appliedAgg);\n } else {\n metricName = 'Unset';\n }\n }\n } else if (series.field) {\n metricName += ' ' + series.field;\n }\n\n const propKeys = _.keys(series.props);\n if (propKeys.length === 0) {\n return metricName;\n }\n\n let name = '';\n for (const propName in series.props) {\n name += series.props[propName] + ' ';\n }\n\n if (metricTypeCount === 1) {\n return name.trim();\n }\n\n return name.trim() + ' ' + metricName;\n }\n\n nameSeries(seriesList: any, target: ElasticsearchQuery) {\n const metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;\n\n for (let i = 0; i < seriesList.length; i++) {\n const series = seriesList[i];\n series.target = this.getSeriesName(series, target, metricTypeCount);\n }\n }\n\n processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[], target: ElasticsearchQuery) {\n const hitsTotal = typeof hits.total === 'number' ? hits.total : hits.total.value; // <- Works with Elasticsearch 7.0+\n\n const series: any = {\n target: target.refId,\n type: 'docs',\n refId: target.refId,\n datapoints: [],\n total: hitsTotal,\n filterable: true,\n };\n let propName, hit, doc: any, i;\n\n for (i = 0; i < hits.hits.length; i++) {\n hit = hits.hits[i];\n doc = {\n _id: hit._id,\n _type: hit._type,\n _index: hit._index,\n };\n\n if (hit._source) {\n for (propName in hit._source) {\n doc[propName] = hit._source[propName];\n }\n }\n\n for (propName in hit.fields) {\n doc[propName] = hit.fields[propName];\n }\n series.datapoints.push(doc);\n }\n\n seriesList.push(series);\n }\n\n trimDatapoints(aggregations: any, target: ElasticsearchQuery) {\n const histogram: any = _.find(target.bucketAggs, { type: 'date_histogram' });\n\n const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;\n if (shouldDropFirstAndLast) {\n const trim = histogram.settings.trimEdges;\n for (const prop in aggregations) {\n const points = aggregations[prop];\n if (points.datapoints.length > trim * 2) {\n points.datapoints = points.datapoints.slice(trim, points.datapoints.length - trim);\n }\n }\n }\n }\n\n getErrorFromElasticResponse(response: any, err: any) {\n const result: any = {};\n result.data = JSON.stringify(err, null, 4);\n if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {\n result.message = err.root_cause[0].reason;\n } else {\n result.message = err.reason || 'Unknown elastic error response';\n }\n\n if (response.$$config) {\n result.config = response.$$config;\n }\n\n return result;\n }\n\n getInvalidPPLQuery(response: any) {\n const result: any = {};\n result.message = 'Invalid time series query';\n\n if (response.$$config) {\n result.config = response.$$config;\n }\n\n return result;\n }\n\n getTimeSeries() {\n if (this.targetType === ElasticsearchQueryType.PPL) {\n return this.processPPLResponseToSeries();\n } else if (this.targets.some(target => target.metrics?.some(metric => metric.type === 'raw_data'))) {\n return this.processResponseToDataFrames(false);\n }\n return this.processResponseToSeries();\n }\n\n getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {\n if (this.targetType === ElasticsearchQueryType.PPL) {\n return this.processPPLResponseToDataFrames(true, logMessageField, logLevelField);\n }\n return this.processResponseToDataFrames(true, logMessageField, logLevelField);\n }\n\n getTable() {\n return this.processPPLResponseToDataFrames(false);\n }\n\n processResponseToDataFrames(\n isLogsRequest: boolean,\n logMessageField?: string,\n logLevelField?: string\n ): DataQueryResponse {\n const dataFrame: DataFrame[] = [];\n\n for (let n = 0; n < this.response.responses.length; n++) {\n const response = this.response.responses[n];\n if (response.error) {\n throw this.getErrorFromElasticResponse(this.response, response.error);\n }\n\n if (response.hits && response.hits.hits.length > 0) {\n const { propNames, docs } = flattenHits(response.hits.hits);\n if (docs.length > 0) {\n let series = createEmptyDataFrame(\n propNames,\n this.targets[0].timeField!,\n isLogsRequest,\n this.targetType,\n logMessageField,\n logLevelField\n );\n\n // Add a row for each document\n for (const doc of docs) {\n if (logLevelField) {\n // Remap level field based on the datasource config. This field is then used in explore to figure out the\n // log level. We may rewrite some actual data in the level field if they are different.\n doc['level'] = doc[logLevelField];\n }\n\n series.add(doc);\n }\n if (isLogsRequest) {\n series = addPreferredVisualisationType(series, 'logs');\n }\n const target = this.targets[n];\n series.refId = target.refId;\n dataFrame.push(series);\n }\n }\n\n if (response.aggregations) {\n const aggregations = response.aggregations;\n const target = this.targets[n];\n const tmpSeriesList: any[] = [];\n const table = new TableModel();\n\n this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);\n this.trimDatapoints(tmpSeriesList, target);\n this.nameSeries(tmpSeriesList, target);\n\n if (table.rows.length > 0) {\n const series = toDataFrame(table);\n series.refId = target.refId;\n dataFrame.push(series);\n }\n\n for (let y = 0; y < tmpSeriesList.length; y++) {\n let series = toDataFrame(tmpSeriesList[y]);\n\n // When log results, show aggregations only in graph. Log fields are then going to be shown in table.\n if (isLogsRequest) {\n series = addPreferredVisualisationType(series, 'graph');\n }\n\n series.refId = target.refId;\n dataFrame.push(series);\n }\n }\n }\n\n return { data: dataFrame, key: this.targets[0]?.refId };\n }\n\n processResponseToSeries = () => {\n const seriesList = [];\n\n for (let i = 0; i < this.response.responses.length; i++) {\n const response = this.response.responses[i];\n const target = this.targets[i];\n\n if (response.error) {\n throw this.getErrorFromElasticResponse(this.response, response.error);\n }\n\n if (response.hits && response.hits.hits.length > 0) {\n this.processHits(response.hits, seriesList, target);\n }\n\n if (response.aggregations) {\n const aggregations = response.aggregations;\n const target = this.targets[i];\n const tmpSeriesList: any[] = [];\n const table = new TableModel();\n table.refId = target.refId;\n\n this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);\n this.trimDatapoints(tmpSeriesList, target);\n this.nameSeries(tmpSeriesList, target);\n\n for (let y = 0; y < tmpSeriesList.length; y++) {\n seriesList.push(tmpSeriesList[y]);\n }\n\n if (table.rows.length > 0) {\n seriesList.push(table);\n }\n }\n }\n\n return { data: seriesList, key: this.targets[0]?.refId };\n };\n\n processPPLResponseToSeries = () => {\n const target = this.targets[0];\n const response = this.response;\n const seriesList = [];\n\n if (response.datarows.length > 0) {\n // Handle error from Elasticsearch\n if (response.error) {\n throw this.getErrorFromElasticResponse(this.response, response.error);\n }\n // Get the data points and target that will be inputted to newSeries\n const { datapoints, targetVal, invalidTS } = getPPLDatapoints(response);\n\n // We throw an error if the inputted query is not valid\n if (invalidTS) {\n throw this.getInvalidPPLQuery(this.response);\n }\n\n const newSeries = {\n datapoints,\n props: response.schema,\n refId: target.refId,\n target: targetVal,\n };\n seriesList.push(newSeries);\n }\n return { data: seriesList, key: this.targets[0]?.refId };\n };\n\n processPPLResponseToDataFrames(\n isLogsRequest: boolean,\n logMessageField?: string,\n logLevelField?: string\n ): DataQueryResponse {\n if (this.response.error) {\n throw this.getErrorFromElasticResponse(this.response, this.response.error);\n }\n\n const dataFrame: DataFrame[] = [];\n\n //map the schema into an array of string containing its name\n const schema = this.response.schema.map((a: { name: any }) => a.name);\n //combine the schema key and response value\n const response = _.map(this.response.datarows, arr => _.zipObject(schema, arr));\n //flatten the response\n const { flattenSchema, docs } = flattenResponses(response);\n\n if (response.length > 0) {\n let series = createEmptyDataFrame(\n flattenSchema,\n this.targets[0].timeField!,\n isLogsRequest,\n this.targetType,\n logMessageField,\n logLevelField\n );\n // Add a row for each document\n for (const doc of docs) {\n if (logLevelField) {\n // Remap level field based on the datasource config. This field is then used in explore to figure out the\n // log level. We may rewrite some actual data in the level field if they are different.\n doc['level'] = doc[logLevelField];\n }\n series.add(doc);\n }\n if (isLogsRequest) {\n series = addPreferredVisualisationType(series, 'logs');\n }\n const target = this.targets[0];\n series.refId = target.refId;\n dataFrame.push(series);\n }\n return { data: dataFrame, key: this.targets[0]?.refId };\n }\n}\n\ntype Doc = {\n _id: string;\n _type: string;\n _index: string;\n _source?: any;\n};\n\n/**\n * Flatten the docs from response mainly the _source part which can be nested. This flattens it so that it is one level\n * deep and the keys are: `level1Name.level2Name...`. Also returns list of all properties from all the docs (not all\n * docs have to have the same keys).\n * @param hits\n */\nconst flattenHits = (hits: Doc[]): { docs: Array>; propNames: string[] } => {\n const docs: any[] = [];\n // We keep a list of all props so that we can create all the fields in the dataFrame, this can lead\n // to wide sparse dataframes in case the scheme is different per document.\n let propNames: string[] = [];\n\n for (const hit of hits) {\n const flattened = hit._source ? flatten(hit._source) : {};\n const doc = {\n _id: hit._id,\n _type: hit._type,\n _index: hit._index,\n _source: { ...flattened },\n ...flattened,\n };\n\n for (const propName of Object.keys(doc)) {\n if (propNames.indexOf(propName) === -1) {\n propNames.push(propName);\n }\n }\n\n docs.push(doc);\n }\n\n propNames.sort();\n return { docs, propNames };\n};\n\n/**\n * Flatten the response which can be nested. This flattens it so that it is one level deep and the keys are:\n * `level1Name.level2Name...`. Also returns list of all schemas from all the response\n * @param responses\n */\nconst flattenResponses = (responses: any): { docs: Array>; flattenSchema: string[] } => {\n const docs: any[] = [];\n // We keep a list of all schemas so that we can create all the fields in the dataFrame, this can lead\n // to wide sparse dataframes in case the scheme is different per document.\n let flattenSchema: string[] = [];\n\n for (const response of responses) {\n const doc = flatten(response);\n\n for (const schema of Object.keys(doc)) {\n if (flattenSchema.indexOf(schema) === -1) {\n flattenSchema.push(schema);\n }\n }\n docs.push(doc);\n }\n return { docs, flattenSchema };\n};\n\n/**\n * Returns the datapoints and target needed for parsing PPL time series response.\n * Also checks to ensure the query is a valid time series query\n * @param responses\n */\nconst getPPLDatapoints = (response: any): { datapoints: any; targetVal: any; invalidTS: boolean } => {\n let invalidTS = false;\n\n // We check if a valid date type is contained in the response\n const timeFieldIndex = _.findIndex(\n response.schema,\n (field: { type: string }) => field.type === 'timestamp' || field.type === 'datetime' || field.type === 'date'\n );\n\n const valueIndex = timeFieldIndex === 0 ? 1 : 0;\n\n //time series response should include a value field and timestamp\n if (\n timeFieldIndex === -1 ||\n response.datarows[0].length !== 2 ||\n typeof response.datarows[0][valueIndex] !== 'number'\n ) {\n invalidTS = true;\n }\n\n const datapoints = _.map(response.datarows, datarow => {\n const newDatarow = _.clone(datarow);\n const [timestamp] = newDatarow.splice(timeFieldIndex, 1);\n newDatarow.push(dateTime(timestamp).unix() * 1000);\n return newDatarow;\n });\n\n const targetVal = response.schema[valueIndex]?.name;\n\n return { datapoints, targetVal, invalidTS };\n};\n\n/**\n * Create empty dataframe but with created fields. Fields are based from propNames (should be from the response) and\n * also from configuration specified fields for message, time, and level.\n * @param propNames\n * @param timeField\n * @param logMessageField\n * @param logLevelField\n */\nconst createEmptyDataFrame = (\n propNames: string[],\n timeField: string,\n isLogsRequest: boolean,\n targetType: ElasticsearchQueryType,\n logMessageField?: string,\n logLevelField?: string\n): MutableDataFrame => {\n const series = new MutableDataFrame({ fields: [] });\n\n //PPL table response should add time field only when it is part of the query response\n if (targetType === ElasticsearchQueryType.Lucene || isLogsRequest) {\n series.addField({\n config: {\n filterable: true,\n },\n name: timeField,\n type: FieldType.time,\n });\n }\n\n if (logMessageField) {\n series.addField({\n name: logMessageField,\n type: FieldType.string,\n }).parse = (v: any) => {\n return v || '';\n };\n }\n\n if (logLevelField) {\n series.addField({\n name: 'level',\n type: FieldType.string,\n }).parse = (v: any) => {\n return v || '';\n };\n }\n\n const fieldNames = series.fields.map(field => field.name);\n\n for (const propName of propNames) {\n // Do not duplicate fields. This can mean that we will shadow some fields.\n if (fieldNames.includes(propName)) {\n continue;\n }\n // Do not add _source field (besides logs) as we are showing each _source field in table instead.\n if (!isLogsRequest && propName === '_source') {\n continue;\n }\n\n series.addField({\n config: {\n filterable: true,\n },\n name: propName,\n type: FieldType.string,\n }).parse = (v: any) => {\n return v || '';\n };\n }\n\n return series;\n};\n\nconst addPreferredVisualisationType = (series: any, type: PreferredVisualisationType) => {\n let s = series;\n s.meta\n ? (s.meta.preferredVisualisationType = type)\n : (s.meta = {\n preferredVisualisationType: type,\n });\n\n return s;\n};\n","import { toUtc, dateTime, DateTime, TimeRange } from '@grafana/data';\n\nconst intervalMap: any = {\n Hourly: { startOf: 'hour', amount: 'hours' },\n Daily: { startOf: 'day', amount: 'days' },\n Weekly: { startOf: 'isoWeek', amount: 'weeks' },\n Monthly: { startOf: 'month', amount: 'months' },\n Yearly: { startOf: 'year', amount: 'years' },\n};\n\nexport class IndexPattern {\n private dateLocale = 'en';\n\n constructor(private pattern: any, private interval?: string) {}\n\n getIndexForToday() {\n if (this.interval) {\n return toUtc()\n .locale(this.dateLocale)\n .format(this.pattern);\n } else {\n return this.pattern;\n }\n }\n\n getIndexList(from?: DateTime, to?: DateTime) {\n // When no `from` or `to` is provided, we request data from 7 subsequent/previous indices\n // for the provided index pattern.\n // This is useful when requesting log context where the only time data we have is the log\n // timestamp.\n const indexOffset = 7;\n if (!this.interval) {\n return this.pattern;\n }\n\n const intervalInfo = intervalMap[this.interval];\n const start = dateTime(from || dateTime(to).add(-indexOffset, intervalInfo.amount))\n .utc()\n .startOf(intervalInfo.startOf);\n const endEpoch = dateTime(to || dateTime(from).add(indexOffset, intervalInfo.amount))\n .utc()\n .startOf(intervalInfo.startOf)\n .valueOf();\n const indexList = [];\n\n while (start.valueOf() <= endEpoch) {\n indexList.push(start.locale(this.dateLocale).format(this.pattern));\n start.add(1, intervalInfo.amount);\n }\n\n return indexList;\n }\n\n getPPLIndexPattern() {\n // PPL currently does not support multi-indexing through lists, so a wildcard\n // pattern is used to match all patterns and relies on the time range filter\n // to filter out the incorrect indexes.\n if (!this.interval) {\n return this.pattern;\n }\n\n let indexPattern = this.pattern.match(/\\[(.*?)\\]/)[1];\n\n if (this.pattern.startsWith('[')) {\n indexPattern = indexPattern + '*';\n } else if (this.pattern.endsWith(']')) {\n indexPattern = '*' + indexPattern;\n }\n return indexPattern;\n }\n}\n\nexport function getDefaultTimeRange(): TimeRange {\n const now = dateTime();\n\n return {\n from: dateTime(now).subtract(6, 'hour'),\n to: now,\n raw: { from: 'now-6h', to: 'now' },\n };\n}\n","import { dateTime, dateMath } from '@grafana/data';\nimport {\n Filters,\n Histogram,\n DateHistogram,\n Terms,\n} from './components/QueryEditor/BucketAggregationsEditor/aggregations';\nimport {\n isMetricAggregationWithField,\n isMetricAggregationWithSettings,\n isPipelineAggregation,\n isPipelineAggregationWithMultipleBucketPaths,\n} from './components/QueryEditor/MetricAggregationsEditor/aggregations';\nimport { defaultBucketAgg, defaultMetricAgg, defaultPPLFormat, findMetricById } from './query_def';\nimport { ElasticsearchQuery, ElasticsearchQueryType } from './types';\n\nexport class ElasticQueryBuilder {\n timeField: string;\n esVersion: number;\n\n constructor(options: { timeField: string; esVersion: number }) {\n this.timeField = options.timeField;\n this.esVersion = options.esVersion;\n }\n\n getRangeFilter() {\n const filter: any = {};\n filter[this.timeField] = {\n gte: '$timeFrom',\n lte: '$timeTo',\n format: 'epoch_millis',\n };\n\n return filter;\n }\n\n buildTermsAgg(aggDef: Terms, queryNode: { terms?: any; aggs?: any }, target: ElasticsearchQuery) {\n let metricRef;\n queryNode.terms = { field: aggDef.field };\n\n if (!aggDef.settings) {\n return queryNode;\n }\n\n // TODO: This default should be somewhere else together with the one used in the UI\n const size = aggDef.settings?.size ? parseInt(aggDef.settings.size, 10) : 500;\n queryNode.terms.size = size === 0 ? 500 : size;\n\n if (aggDef.settings.orderBy !== void 0) {\n queryNode.terms.order = {};\n if (aggDef.settings.orderBy === '_term' && this.esVersion >= 60) {\n queryNode.terms.order['_key'] = aggDef.settings.order;\n } else {\n queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;\n }\n\n // if metric ref, look it up and add it to this agg level\n metricRef = parseInt(aggDef.settings.orderBy, 10);\n if (!isNaN(metricRef)) {\n for (let metric of target.metrics || []) {\n if (metric.id === aggDef.settings.orderBy) {\n queryNode.aggs = {};\n queryNode.aggs[metric.id] = {};\n if (isMetricAggregationWithField(metric)) {\n queryNode.aggs[metric.id][metric.type] = { field: metric.field };\n }\n break;\n }\n }\n }\n }\n\n if (aggDef.settings.min_doc_count !== void 0) {\n queryNode.terms.min_doc_count = parseInt(aggDef.settings.min_doc_count, 10);\n\n if (isNaN(queryNode.terms.min_doc_count)) {\n queryNode.terms.min_doc_count = aggDef.settings.min_doc_count;\n }\n }\n\n if (aggDef.settings.missing) {\n queryNode.terms.missing = aggDef.settings.missing;\n }\n\n return queryNode;\n }\n\n getDateHistogramAgg(aggDef: DateHistogram) {\n const esAgg: any = {};\n const settings = aggDef.settings || {};\n esAgg.interval = settings.interval;\n esAgg.field = this.timeField;\n esAgg.min_doc_count = settings.min_doc_count || 0;\n esAgg.extended_bounds = { min: '$timeFrom', max: '$timeTo' };\n esAgg.format = 'epoch_millis';\n\n if (settings.offset !== '') {\n esAgg.offset = settings.offset;\n }\n\n if (esAgg.interval === 'auto') {\n esAgg.interval = '$__interval';\n }\n\n return esAgg;\n }\n\n getHistogramAgg(aggDef: Histogram) {\n const esAgg: any = {};\n const settings = aggDef.settings || {};\n esAgg.interval = settings.interval;\n esAgg.field = aggDef.field;\n esAgg.min_doc_count = settings.min_doc_count || 0;\n\n return esAgg;\n }\n\n getFiltersAgg(aggDef: Filters) {\n const filterObj: Record = {};\n\n for (let { query, label } of aggDef.settings?.filters || []) {\n filterObj[label || query] = {\n query_string: {\n query: query,\n analyze_wildcard: true,\n },\n };\n }\n\n return filterObj;\n }\n\n documentQuery(query: any, size: number) {\n query.size = size;\n query.sort = {};\n query.sort[this.timeField] = { order: 'desc', unmapped_type: 'boolean' };\n\n // fields field not supported on ES 5.x\n if (this.esVersion < 5) {\n query.fields = ['*', '_source'];\n }\n\n query.script_fields = {};\n return query;\n }\n\n addAdhocFilters(query: any, adhocFilters: any) {\n if (!adhocFilters) {\n return;\n }\n\n let i, filter, condition: any, queryCondition: any;\n\n for (i = 0; i < adhocFilters.length; i++) {\n filter = adhocFilters[i];\n condition = {};\n condition[filter.key] = filter.value;\n queryCondition = {};\n queryCondition[filter.key] = { query: filter.value };\n\n switch (filter.operator) {\n case '=':\n if (!query.query.bool.must) {\n query.query.bool.must = [];\n }\n query.query.bool.must.push({ match_phrase: queryCondition });\n break;\n case '!=':\n if (!query.query.bool.must_not) {\n query.query.bool.must_not = [];\n }\n query.query.bool.must_not.push({ match_phrase: queryCondition });\n break;\n case '<':\n condition[filter.key] = { lt: filter.value };\n query.query.bool.filter.push({ range: condition });\n break;\n case '>':\n condition[filter.key] = { gt: filter.value };\n query.query.bool.filter.push({ range: condition });\n break;\n case '=~':\n query.query.bool.filter.push({ regexp: condition });\n break;\n case '!~':\n query.query.bool.filter.push({\n bool: { must_not: { regexp: condition } },\n });\n break;\n }\n }\n }\n\n build(target: ElasticsearchQuery, adhocFilters?: any, queryString?: string) {\n // make sure query has defaults;\n target.metrics = target.metrics || [defaultMetricAgg()];\n target.bucketAggs = target.bucketAggs || [defaultBucketAgg()];\n target.timeField = this.timeField;\n target.queryType = ElasticsearchQueryType.Lucene;\n\n let i, j, pv, nestedAggs, metric;\n const query = {\n size: 0,\n query: {\n bool: {\n filter: [\n { range: this.getRangeFilter() },\n {\n query_string: {\n analyze_wildcard: true,\n query: queryString,\n },\n },\n ],\n },\n },\n };\n\n this.addAdhocFilters(query, adhocFilters);\n\n // If target doesn't have bucketAggs and type is not raw_document, it is invalid query.\n if (target.bucketAggs.length === 0) {\n metric = target.metrics[0];\n\n if (!metric || !(metric.type === 'raw_document' || metric.type === 'raw_data')) {\n throw { message: 'Invalid query' };\n }\n }\n\n /* Handle document query:\n * Check if metric type is raw_document. If metric doesn't have size (or size is 0), update size to 500.\n * Otherwise it will not be a valid query and error will be thrown.\n */\n if (target.metrics?.[0]?.type === 'raw_document' || target.metrics?.[0]?.type === 'raw_data') {\n metric = target.metrics[0];\n\n // TODO: This default should be somewhere else together with the one used in the UI\n const size = metric.settings?.size ? parseInt(metric.settings.size, 10) : 500;\n\n return this.documentQuery(query, size || 500);\n }\n\n nestedAggs = query;\n\n for (i = 0; i < target.bucketAggs.length; i++) {\n const aggDef = target.bucketAggs[i];\n const esAgg: any = {};\n\n switch (aggDef.type) {\n case 'date_histogram': {\n esAgg['date_histogram'] = this.getDateHistogramAgg(aggDef);\n break;\n }\n case 'histogram': {\n esAgg['histogram'] = this.getHistogramAgg(aggDef);\n break;\n }\n case 'filters': {\n esAgg['filters'] = { filters: this.getFiltersAgg(aggDef) };\n break;\n }\n case 'terms': {\n this.buildTermsAgg(aggDef, esAgg, target);\n break;\n }\n case 'geohash_grid': {\n esAgg['geohash_grid'] = {\n field: aggDef.field,\n precision: aggDef.settings?.precision,\n };\n break;\n }\n }\n\n nestedAggs.aggs = nestedAggs.aggs || {};\n nestedAggs.aggs[aggDef.id] = esAgg;\n nestedAggs = esAgg;\n }\n\n nestedAggs.aggs = {};\n\n for (i = 0; i < target.metrics.length; i++) {\n metric = target.metrics[i];\n if (metric.type === 'count') {\n continue;\n }\n\n const aggField: any = {};\n let metricAgg: any = null;\n\n if (isPipelineAggregation(metric)) {\n if (isPipelineAggregationWithMultipleBucketPaths(metric)) {\n if (metric.pipelineVariables) {\n metricAgg = {\n buckets_path: {},\n };\n\n for (j = 0; j < metric.pipelineVariables.length; j++) {\n pv = metric.pipelineVariables[j];\n\n if (pv.name && pv.pipelineAgg && /^\\d*$/.test(pv.pipelineAgg)) {\n const appliedAgg = findMetricById(target.metrics, pv.pipelineAgg);\n if (appliedAgg) {\n if (appliedAgg.type === 'count') {\n metricAgg.buckets_path[pv.name] = '_count';\n } else {\n metricAgg.buckets_path[pv.name] = pv.pipelineAgg;\n }\n }\n }\n }\n } else {\n continue;\n }\n } else {\n if (metric.field && /^\\d*$/.test(metric.field)) {\n const appliedAgg = findMetricById(target.metrics, metric.field);\n if (appliedAgg) {\n if (appliedAgg.type === 'count') {\n metricAgg = { buckets_path: '_count' };\n } else {\n metricAgg = { buckets_path: metric.field };\n }\n }\n } else {\n continue;\n }\n }\n } else if (isMetricAggregationWithField(metric)) {\n metricAgg = { field: metric.field };\n }\n\n metricAgg = {\n ...metricAgg,\n ...(isMetricAggregationWithSettings(metric) && metric.settings),\n };\n\n aggField[metric.type] = metricAgg;\n nestedAggs.aggs[metric.id] = aggField;\n }\n\n return query;\n }\n\n getTermsQuery(queryDef: any) {\n const query: any = {\n size: 0,\n query: {\n bool: {\n filter: [{ range: this.getRangeFilter() }],\n },\n },\n };\n\n if (queryDef.query) {\n query.query.bool.filter.push({\n query_string: {\n analyze_wildcard: true,\n query: queryDef.query,\n },\n });\n }\n\n let size = 500;\n if (queryDef.size) {\n size = queryDef.size;\n }\n\n query.aggs = {\n '1': {\n terms: {\n field: queryDef.field,\n size: size,\n order: {},\n },\n },\n };\n\n // Default behaviour is to order results by { _key: asc }\n // queryDef.order allows selection of asc/desc\n // queryDef.orderBy allows selection of doc_count ordering (defaults desc)\n\n const { orderBy = 'key', order = orderBy === 'doc_count' ? 'desc' : 'asc' } = queryDef;\n\n if (['asc', 'desc'].indexOf(order) < 0) {\n throw { message: `Invalid query sort order ${order}` };\n }\n\n switch (orderBy) {\n case 'key':\n case 'term':\n const keyname = this.esVersion >= 60 ? '_key' : '_term';\n query.aggs['1'].terms.order[keyname] = order;\n break;\n case 'doc_count':\n query.aggs['1'].terms.order['_count'] = order;\n break;\n default:\n throw { message: `Invalid query sort type ${orderBy}` };\n }\n\n return query;\n }\n\n getLogsQuery(target: ElasticsearchQuery, adhocFilters?: any, querystring?: string) {\n let query: any = {\n size: 0,\n query: {\n bool: {\n filter: [{ range: this.getRangeFilter() }],\n },\n },\n };\n\n this.addAdhocFilters(query, adhocFilters);\n\n if (target.query) {\n query.query.bool.filter.push({\n query_string: {\n analyze_wildcard: true,\n query: querystring,\n },\n });\n }\n\n query = this.documentQuery(query, 500);\n\n return {\n ...query,\n aggs: this.build(target, null, querystring).aggs,\n };\n }\n\n /* Adds Ad hoc filters for PPL:\n * Check for the value type and parse it accordingly so it can be added onto the query string through 'where' command\n */\n addPPLAdhocFilters(queryString: any, adhocFilters: any) {\n let i, value, adhocquery;\n\n for (i = 0; i < adhocFilters.length; i++) {\n if (dateMath.isValid(adhocFilters[i].value)) {\n const validTime = dateTime(adhocFilters[i].value).format('YYYY-MM-DD HH:mm:ss.SSSSSS');\n value = `timestamp('${validTime}')`;\n } else if (typeof adhocFilters[i].value === 'string') {\n value = `'${adhocFilters[i].value}'`;\n } else {\n value = adhocFilters[i].value;\n }\n adhocquery = `\\`${adhocFilters[i].key}\\` ${adhocFilters[i].operator} ${value}`;\n\n if (i > 0) {\n queryString += ' and ' + adhocquery;\n } else {\n queryString += ' | where ' + adhocquery;\n }\n }\n return queryString;\n }\n\n buildPPLQuery(target: any, adhocFilters?: any, queryString?: string) {\n // make sure query has defaults\n target.format = target.format || defaultPPLFormat();\n target.queryType = ElasticsearchQueryType.PPL;\n\n // set isLogsQuery depending on the format\n target.isLogsQuery = target.format === 'logs';\n\n if (adhocFilters) {\n queryString = this.addPPLAdhocFilters(queryString, adhocFilters);\n }\n\n const timeRangeFilter = \" where $timestamp >= timestamp('$timeFrom') and $timestamp <= timestamp('$timeTo')\";\n //time range filter must be placed before other query filters\n if (queryString) {\n const separatorIndex = queryString.indexOf('|');\n if (separatorIndex === -1) {\n queryString = [queryString.trimEnd(), timeRangeFilter].join(' |');\n } else {\n queryString = [\n queryString.slice(0, separatorIndex).trimEnd(),\n timeRangeFilter,\n queryString.slice(separatorIndex + 1),\n ].join(' |');\n }\n }\n\n return { query: queryString };\n }\n}\n","import { BucketsConfiguration } from '../../../types';\nimport { defaultFilter } from './SettingsEditor/FiltersSettingsEditor/utils';\n\nexport const bucketAggregationConfig: BucketsConfiguration = {\n terms: {\n label: 'Terms',\n requiresField: true,\n defaultSettings: {\n min_doc_count: '0',\n size: '10',\n order: 'desc',\n orderBy: '_term',\n },\n },\n filters: {\n label: 'Filters',\n requiresField: false,\n defaultSettings: {\n filters: [defaultFilter()],\n },\n },\n geohash_grid: {\n label: 'Geo Hash Grid',\n requiresField: true,\n defaultSettings: {\n precision: '3',\n },\n },\n date_histogram: {\n label: 'Date Histogram',\n requiresField: true,\n defaultSettings: {\n interval: 'auto',\n min_doc_count: '0',\n trimEdges: '0',\n },\n },\n histogram: {\n label: 'Histogram',\n requiresField: true,\n defaultSettings: {\n interval: '1000',\n min_doc_count: '0',\n },\n },\n};\n\n// TODO: Define better types for the following\nexport const orderOptions = [\n { label: 'Top', value: 'desc' },\n { label: 'Bottom', value: 'asc' },\n];\n\nexport const sizeOptions = [\n { label: 'No limit', value: '0' },\n { label: '1', value: '1' },\n { label: '2', value: '2' },\n { label: '3', value: '3' },\n { label: '5', value: '5' },\n { label: '10', value: '10' },\n { label: '15', value: '15' },\n { label: '20', value: '20' },\n];\n\nexport const orderByOptions = [\n { label: 'Term value', value: '_term' },\n { label: 'Doc Count', value: '_count' },\n];\n\nexport const intervalOptions = [\n { label: 'auto', value: 'auto' },\n { label: '10s', value: '10s' },\n { label: '1m', value: '1m' },\n { label: '5m', value: '5m' },\n { label: '10m', value: '10m' },\n { label: '20m', value: '20m' },\n { label: '1h', value: '1h' },\n { label: '1d', value: '1d' },\n];\n","import { Filter } from '../../aggregations';\n\nexport const defaultFilter = (): Filter => ({ label: '', query: '*' });\n","import { bucketAggregationConfig } from './utils';\n\nexport type BucketAggregationType = 'terms' | 'filters' | 'geohash_grid' | 'date_histogram' | 'histogram';\n\ninterface BaseBucketAggregation {\n id: string;\n type: BucketAggregationType;\n settings?: Record;\n}\n\nexport interface BucketAggregationWithField extends BaseBucketAggregation {\n field?: string;\n}\n\nexport interface DateHistogram extends BucketAggregationWithField {\n type: 'date_histogram';\n settings?: {\n interval?: string;\n min_doc_count?: string;\n trimEdges?: string;\n offset?: string;\n };\n}\n\nexport interface Histogram extends BucketAggregationWithField {\n type: 'histogram';\n settings?: {\n interval?: string;\n min_doc_count?: string;\n };\n}\n\ntype TermsOrder = 'desc' | 'asc';\n\nexport interface Terms extends BucketAggregationWithField {\n type: 'terms';\n settings?: {\n order?: TermsOrder;\n size?: string;\n min_doc_count?: string;\n orderBy?: string;\n missing?: string;\n };\n}\n\nexport type Filter = {\n query: string;\n label: string;\n};\nexport interface Filters extends BaseBucketAggregation {\n type: 'filters';\n settings?: {\n filters?: Filter[];\n };\n}\n\ninterface GeoHashGrid extends BucketAggregationWithField {\n type: 'geohash_grid';\n settings?: {\n precision?: string;\n };\n}\n\nexport type BucketAggregation = DateHistogram | Histogram | Terms | Filters | GeoHashGrid;\n\nexport const isBucketAggregationWithField = (\n bucketAgg: BucketAggregation | BucketAggregationWithField\n): bucketAgg is BucketAggregationWithField => bucketAggregationConfig[bucketAgg.type].requiresField;\n","import _ from 'lodash';\nimport { from, merge, of, Observable } from 'rxjs';\nimport { map } from 'rxjs/operators';\nimport {\n DataSourceApi,\n DataSourceInstanceSettings,\n DataQueryRequest,\n DataQueryResponse,\n DataFrame,\n ScopedVars,\n DataLink,\n PluginMeta,\n DataQuery,\n MetricFindValue,\n dateTime,\n TimeRange,\n LoadingState,\n toUtc,\n} from '@grafana/data';\nimport LanguageProvider from './language_provider';\nimport { ElasticResponse } from './elastic_response';\nimport { IndexPattern, getDefaultTimeRange } from './index_pattern';\nimport { ElasticQueryBuilder } from './query_builder';\nimport { defaultBucketAgg, hasMetricOfType } from './query_def';\nimport { getBackendSrv, getDataSourceSrv, getTemplateSrv } from '@grafana/runtime';\nimport { DataLinkConfig, ElasticsearchOptions, ElasticsearchQuery, ElasticsearchQueryType } from './types';\nimport { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';\nimport {\n isMetricAggregationWithField,\n isPipelineAggregationWithMultipleBucketPaths,\n} from './components/QueryEditor/MetricAggregationsEditor/aggregations';\nimport { bucketAggregationConfig } from './components/QueryEditor/BucketAggregationsEditor/utils';\nimport { isBucketAggregationWithField } from './components/QueryEditor/BucketAggregationsEditor/aggregations';\n\n// Those are metadata fields as defined in https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-fields.html#_identity_metadata_fields.\n// custom fields can start with underscores, therefore is not safe to exclude anything that starts with one.\nconst ELASTIC_META_FIELDS = [\n '_index',\n '_type',\n '_id',\n '_source',\n '_size',\n '_field_names',\n '_ignored',\n '_routing',\n '_meta',\n];\n\nexport class ElasticDatasource extends DataSourceApi {\n basicAuth?: string;\n withCredentials?: boolean;\n url: string;\n name: string;\n index: string;\n timeField: string;\n esVersion: number;\n interval: string;\n maxConcurrentShardRequests?: number;\n queryBuilder: ElasticQueryBuilder;\n indexPattern: IndexPattern;\n logMessageField?: string;\n logLevelField?: string;\n dataLinks: DataLinkConfig[];\n languageProvider: LanguageProvider;\n pplEnabled?: boolean;\n\n constructor(instanceSettings: DataSourceInstanceSettings) {\n super(instanceSettings);\n this.basicAuth = instanceSettings.basicAuth;\n this.withCredentials = instanceSettings.withCredentials;\n this.url = instanceSettings.url!;\n this.name = instanceSettings.name;\n const settingsData = instanceSettings.jsonData || ({} as ElasticsearchOptions);\n this.index = settingsData.database ?? '';\n\n this.timeField = settingsData.timeField;\n this.esVersion = settingsData.esVersion;\n this.indexPattern = new IndexPattern(this.index, settingsData.interval);\n this.interval = settingsData.timeInterval;\n this.maxConcurrentShardRequests = settingsData.maxConcurrentShardRequests;\n this.queryBuilder = new ElasticQueryBuilder({\n timeField: this.timeField,\n esVersion: this.esVersion,\n });\n this.logMessageField = settingsData.logMessageField || '';\n this.logLevelField = settingsData.logLevelField || '';\n this.dataLinks = settingsData.dataLinks || [];\n\n if (this.logMessageField === '') {\n this.logMessageField = undefined;\n }\n\n if (this.logLevelField === '') {\n this.logLevelField = undefined;\n }\n this.languageProvider = new LanguageProvider(this);\n this.pplEnabled = settingsData.pplEnabled ?? true;\n }\n\n private request(method: string, url: string, data?: undefined) {\n const options: any = {\n url: this.url + '/' + url,\n method: method,\n data: data,\n };\n\n if (this.basicAuth || this.withCredentials) {\n options.withCredentials = true;\n }\n if (this.basicAuth) {\n options.headers = {\n Authorization: this.basicAuth,\n };\n }\n\n return getBackendSrv()\n .datasourceRequest(options)\n .catch((err: any) => {\n if (err.data && err.data.error) {\n throw {\n message: `Elasticsearch error: ${err.data.error.reason}. ${err.data.error.details}`,\n error: err.data.error,\n };\n }\n throw err;\n });\n }\n\n async importQueries(queries: DataQuery[], originMeta: PluginMeta): Promise {\n return this.languageProvider.importQueries(queries, originMeta.id);\n }\n\n /**\n * Sends a GET request to the specified url on the newest matching and available index.\n *\n * When multiple indices span the provided time range, the request is sent starting from the newest index,\n * and then going backwards until an index is found.\n *\n * @param url the url to query the index on, for example `/_mapping`.\n */\n private get(url: string, range = getDefaultTimeRange()) {\n const indexList = this.indexPattern.getIndexList(range.from, range.to);\n if (_.isArray(indexList) && indexList.length) {\n return this.requestAllIndices(indexList, url).then((results: any) => {\n results.data.$$config = results.config;\n return results.data;\n });\n } else {\n return this.request('GET', this.indexPattern.getIndexForToday() + url).then((results: any) => {\n results.data.$$config = results.config;\n return results.data;\n });\n }\n }\n\n private async requestAllIndices(indexList: string[], url: string): Promise {\n const maxTraversals = 7; // do not go beyond one week (for a daily pattern)\n const listLen = indexList.length;\n for (let i = 0; i < Math.min(listLen, maxTraversals); i++) {\n try {\n return await this.request('GET', indexList[listLen - i - 1] + url);\n } catch (err) {\n if (err.status !== 404 || i === maxTraversals - 1) {\n throw err;\n }\n }\n }\n }\n\n private post(url: string, data: any) {\n return this.request('POST', url, data).then((results: any) => {\n results.data.$$config = results.config;\n return results.data;\n });\n }\n\n annotationQuery(options: any): Promise {\n const annotation = options.annotation;\n const timeField = annotation.timeField || '@timestamp';\n const timeEndField = annotation.timeEndField || null;\n const queryString = annotation.query || '*';\n const tagsField = annotation.tagsField || 'tags';\n const textField = annotation.textField || null;\n\n const dateRanges = [];\n const rangeStart: any = {};\n rangeStart[timeField] = {\n from: options.range.from.valueOf(),\n to: options.range.to.valueOf(),\n format: 'epoch_millis',\n };\n dateRanges.push({ range: rangeStart });\n\n if (timeEndField) {\n const rangeEnd: any = {};\n rangeEnd[timeEndField] = {\n from: options.range.from.valueOf(),\n to: options.range.to.valueOf(),\n format: 'epoch_millis',\n };\n dateRanges.push({ range: rangeEnd });\n }\n\n const queryInterpolated = getTemplateSrv().replace(queryString, {}, 'lucene');\n const query = {\n bool: {\n filter: [\n {\n bool: {\n should: dateRanges,\n minimum_should_match: 1,\n },\n },\n {\n query_string: {\n query: queryInterpolated,\n },\n },\n ],\n },\n };\n\n const data: any = {\n query,\n size: 10000,\n };\n\n // fields field not supported on ES 5.x\n if (this.esVersion < 5) {\n data['fields'] = [timeField, '_source'];\n }\n\n const header: any = {\n search_type: 'query_then_fetch',\n ignore_unavailable: true,\n };\n\n // old elastic annotations had index specified on them\n if (annotation.index) {\n header.index = annotation.index;\n } else {\n header.index = this.indexPattern.getIndexList(options.range.from, options.range.to);\n }\n\n const payload = JSON.stringify(header) + '\\n' + JSON.stringify(data) + '\\n';\n\n return this.post('_msearch', payload).then((res: any) => {\n const list = [];\n const hits = res.responses[0].hits.hits;\n\n const getFieldFromSource = (source: any, fieldName: any) => {\n if (!fieldName) {\n return;\n }\n\n const fieldNames = fieldName.split('.');\n let fieldValue = source;\n\n for (let i = 0; i < fieldNames.length; i++) {\n fieldValue = fieldValue[fieldNames[i]];\n if (!fieldValue) {\n console.log('could not find field in annotation: ', fieldName);\n return '';\n }\n }\n\n return fieldValue;\n };\n\n for (let i = 0; i < hits.length; i++) {\n const source = hits[i]._source;\n let time = getFieldFromSource(source, timeField);\n if (typeof hits[i].fields !== 'undefined') {\n const fields = hits[i].fields;\n if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) {\n time = fields[timeField];\n }\n }\n\n const event: {\n annotation: any;\n time: number;\n timeEnd?: number;\n text: string;\n tags: string | string[];\n } = {\n annotation: annotation,\n time: toUtc(time).valueOf(),\n text: getFieldFromSource(source, textField),\n tags: getFieldFromSource(source, tagsField),\n };\n\n if (timeEndField) {\n const timeEnd = getFieldFromSource(source, timeEndField);\n if (timeEnd) {\n event.timeEnd = toUtc(timeEnd).valueOf();\n }\n }\n\n // legacy support for title tield\n if (annotation.titleField) {\n const title = getFieldFromSource(source, annotation.titleField);\n if (title) {\n event.text = title + '\\n' + event.text;\n }\n }\n\n if (typeof event.tags === 'string') {\n event.tags = event.tags.split(',');\n }\n\n list.push(event);\n }\n return list;\n });\n }\n\n private interpolateLuceneQuery(queryString: string, scopedVars: ScopedVars) {\n // Elasticsearch Lucene queryString should always be '*' if empty string\n return getTemplateSrv().replace(queryString, scopedVars, 'lucene') || '*';\n }\n\n private interpolatePPLQuery(queryString: string, scopedVars: ScopedVars) {\n return getTemplateSrv().replace(queryString, scopedVars, 'pipe');\n }\n\n interpolateVariablesInQueries(queries: ElasticsearchQuery[], scopedVars: ScopedVars): ElasticsearchQuery[] {\n let expandedQueries = queries;\n if (queries && queries.length > 0) {\n expandedQueries = queries.map(query => {\n let interpolatedQuery;\n if (query.queryType === ElasticsearchQueryType.PPL) {\n interpolatedQuery = this.interpolatePPLQuery(query.query || '', scopedVars);\n } else {\n interpolatedQuery = this.interpolateLuceneQuery(query.query || '', scopedVars);\n }\n const expandedQuery = {\n ...query,\n datasource: this.name,\n query: interpolatedQuery,\n };\n\n for (let bucketAgg of query.bucketAggs || []) {\n if (bucketAgg.type === 'filters') {\n for (let filter of bucketAgg.settings?.filters || []) {\n filter.query = this.interpolateLuceneQuery(filter.query, scopedVars);\n }\n }\n }\n return expandedQuery;\n });\n }\n return expandedQueries;\n }\n\n testDatasource() {\n // validate that the index exist and has date field\n return this.getFields('date').then(\n (dateFields: any) => {\n const timeField: any = _.find(dateFields, { text: this.timeField });\n if (!timeField) {\n return {\n status: 'error',\n message: 'No date field named ' + this.timeField + ' found',\n };\n }\n return { status: 'success', message: 'Index OK. Time field name OK.' };\n },\n (err: any) => {\n console.error(err);\n if (err.message) {\n return { status: 'error', message: err.message };\n } else {\n return { status: 'error', message: err.status };\n }\n }\n );\n }\n\n getQueryHeader(searchType: any, timeFrom: any, timeTo: any) {\n const queryHeader: any = {\n search_type: searchType,\n ignore_unavailable: true,\n index: this.indexPattern.getIndexList(timeFrom, timeTo),\n };\n\n if (this.esVersion >= 56 && this.esVersion < 70) {\n queryHeader['max_concurrent_shard_requests'] = this.maxConcurrentShardRequests;\n }\n\n return JSON.stringify(queryHeader);\n }\n\n getQueryDisplayText(query: ElasticsearchQuery) {\n // TODO: This might be refactored a bit.\n const metricAggs = query.metrics;\n const bucketAggs = query.bucketAggs;\n let text = '';\n\n if (query.query) {\n text += 'Query: ' + query.query + ', ';\n }\n\n text += 'Metrics: ';\n\n text += metricAggs?.reduce((acc, metric) => {\n const metricConfig = metricAggregationConfig[metric.type];\n\n let text = metricConfig.label + '(';\n\n if (isMetricAggregationWithField(metric)) {\n text += metric.field;\n }\n if (isPipelineAggregationWithMultipleBucketPaths(metric)) {\n text += metric.settings?.script?.replace(new RegExp('params.', 'g'), '');\n }\n text += '), ';\n\n return `${acc} ${text}`;\n }, '');\n\n text += bucketAggs?.reduce((acc, bucketAgg, index) => {\n const bucketConfig = bucketAggregationConfig[bucketAgg.type];\n\n let text = '';\n if (index === 0) {\n text += ' Group by: ';\n }\n\n text += bucketConfig.label + '(';\n if (isBucketAggregationWithField(bucketAgg)) {\n text += bucketAgg.field;\n }\n\n return `${acc} ${text}), `;\n }, '');\n\n if (query.alias) {\n text += 'Alias: ' + query.alias;\n }\n\n return text;\n }\n\n query(options: DataQueryRequest): Observable {\n const targets = this.interpolateVariablesInQueries(_.cloneDeep(options.targets), options.scopedVars);\n\n const luceneTargets: ElasticsearchQuery[] = [];\n const pplTargets: ElasticsearchQuery[] = [];\n\n for (const target of targets) {\n if (target.hide) {\n continue;\n }\n\n switch (target.queryType) {\n case ElasticsearchQueryType.PPL:\n pplTargets.push(target);\n break;\n case ElasticsearchQueryType.Lucene:\n default:\n luceneTargets.push(target);\n }\n }\n\n const subQueries: Array> = [];\n\n if (luceneTargets.length) {\n const luceneResponses = this.executeLuceneQueries(luceneTargets, options);\n subQueries.push(luceneResponses);\n }\n if (pplTargets.length) {\n const pplResponses = this.executePPLQueries(pplTargets, options);\n subQueries.push(pplResponses);\n }\n if (subQueries.length === 0) {\n return of({\n data: [],\n state: LoadingState.Done,\n });\n }\n return merge(...subQueries);\n }\n\n /**\n * Execute all Lucene queries. Returns an Observable to be merged.\n */\n private executeLuceneQueries(\n targets: ElasticsearchQuery[],\n options: DataQueryRequest\n ): Observable {\n let payload = '';\n\n for (const target of targets) {\n payload += this.createLuceneQuery(target, options);\n }\n\n // We replace the range here for actual values. We need to replace it together with enclosing \"\" so that we replace\n // it as an integer not as string with digits. This is because elastic will convert the string only if the time\n // field is specified as type date (which probably should) but can also be specified as integer (millisecond epoch)\n // and then sending string will error out.\n payload = payload.replace(/\"\\$timeFrom\"/g, options.range.from.valueOf().toString());\n payload = payload.replace(/\"\\$timeTo\"/g, options.range.to.valueOf().toString());\n payload = getTemplateSrv().replace(payload, options.scopedVars);\n\n return from(this.post(this.getMultiSearchUrl(), payload)).pipe(\n map((res: any) => {\n const er = new ElasticResponse(targets, res);\n\n if (targets.some(target => target.isLogsQuery)) {\n const response = er.getLogs(this.logMessageField, this.logLevelField);\n for (const dataFrame of response.data) {\n enhanceDataFrame(dataFrame, this.dataLinks);\n }\n return response;\n }\n\n return er.getTimeSeries();\n })\n );\n }\n\n /**\n * Execute all PPL queries. Returns an Observable to be merged.\n */\n private executePPLQueries(\n targets: ElasticsearchQuery[],\n options: DataQueryRequest\n ): Observable {\n const subQueries: Array> = [];\n\n for (const target of targets) {\n let payload = this.createPPLQuery(target, options);\n\n const rangeFrom = dateTime(options.range.from.valueOf()).format('YYYY-MM-DD HH:mm:ss');\n const rangeTo = dateTime(options.range.to.valueOf()).format('YYYY-MM-DD HH:mm:ss');\n // Replace the range here for actual values.\n payload = payload.replace(/\\$timeTo/g, rangeTo);\n payload = payload.replace(/\\$timeFrom/g, rangeFrom);\n payload = payload.replace(/\\$timestamp/g, `\\`${this.timeField}\\``);\n subQueries.push(\n from(this.post(this.getPPLUrl(), payload)).pipe(\n map((res: any) => {\n const er = new ElasticResponse([target], res, ElasticsearchQueryType.PPL);\n\n if (targets.some(target => target.isLogsQuery)) {\n const response = er.getLogs(this.logMessageField, this.logLevelField);\n for (const dataFrame of response.data) {\n enhanceDataFrame(dataFrame, this.dataLinks);\n }\n return response;\n } else if (targets.some(target => target.format === 'table')) {\n return er.getTable();\n }\n return er.getTimeSeries();\n })\n )\n );\n }\n return merge(...subQueries);\n }\n\n /**\n * Creates the payload string for a Lucene query\n */\n private createLuceneQuery(target: ElasticsearchQuery, options: DataQueryRequest): string {\n let queryString = getTemplateSrv().replace(target.query, options.scopedVars, 'lucene');\n // @ts-ignore\n // add global adhoc filters to timeFilter\n const adhocFilters = getTemplateSrv().getAdhocFilters(this.name);\n // Elasticsearch queryString should always be '*' if empty string\n if (!queryString || queryString === '') {\n queryString = '*';\n }\n\n let queryObj;\n if (target.isLogsQuery || hasMetricOfType(target, 'logs')) {\n target.bucketAggs = [defaultBucketAgg()];\n target.metrics = [];\n // Setting this for metrics queries that are typed as logs\n target.isLogsQuery = true;\n queryObj = this.queryBuilder.getLogsQuery(target, adhocFilters, queryString);\n } else {\n if (target.alias) {\n target.alias = getTemplateSrv().replace(target.alias, options.scopedVars, 'lucene');\n }\n queryObj = this.queryBuilder.build(target, adhocFilters, queryString);\n }\n\n const esQuery = JSON.stringify(queryObj);\n const searchType = queryObj.size === 0 && this.esVersion < 5 ? 'count' : 'query_then_fetch';\n const header = this.getQueryHeader(searchType, options.range.from, options.range.to);\n return header + '\\n' + esQuery + '\\n';\n }\n\n /**\n * Creates the payload string for a PPL query\n */\n private createPPLQuery(target: ElasticsearchQuery, options: DataQueryRequest): string {\n let queryString = getTemplateSrv().replace(target.query, options.scopedVars, 'pipe');\n let queryObj;\n\n // @ts-ignore\n // add global adhoc filters to timeFilter\n const adhocFilters = getTemplateSrv().getAdhocFilters(this.name);\n\n // Elasticsearch PPL queryString should always be 'source=indexName' if empty string\n if (!queryString) {\n queryString = `source=\\`${this.indexPattern.getPPLIndexPattern()}\\``;\n }\n\n queryObj = this.queryBuilder.buildPPLQuery(target, adhocFilters, queryString);\n return JSON.stringify(queryObj);\n }\n\n isMetadataField(fieldName: string) {\n return ELASTIC_META_FIELDS.includes(fieldName);\n }\n\n // TODO: instead of being a string, this could be a custom type representing all the elastic types\n async getFields(type?: string, range?: TimeRange): Promise {\n const configuredEsVersion = this.esVersion;\n return this.get('/_mapping', range).then((result: any) => {\n const typeMap: any = {\n float: 'number',\n double: 'number',\n integer: 'number',\n long: 'number',\n date: 'date',\n date_nanos: 'date',\n string: 'string',\n text: 'string',\n scaled_float: 'number',\n nested: 'nested',\n };\n\n const shouldAddField = (obj: any, key: string) => {\n if (this.isMetadataField(key)) {\n return false;\n }\n\n if (!type) {\n return true;\n }\n\n // equal query type filter, or via typemap translation\n return type === obj.type || type === typeMap[obj.type];\n };\n\n // Store subfield names: [system, process, cpu, total] -> system.process.cpu.total\n const fieldNameParts: any = [];\n const fields: any = {};\n\n function getFieldsRecursively(obj: any) {\n for (const key in obj) {\n const subObj = obj[key];\n\n // Check mapping field for nested fields\n if (_.isObject(subObj.properties)) {\n fieldNameParts.push(key);\n getFieldsRecursively(subObj.properties);\n }\n\n if (_.isObject(subObj.fields)) {\n fieldNameParts.push(key);\n getFieldsRecursively(subObj.fields);\n }\n\n if (_.isString(subObj.type)) {\n const fieldName = fieldNameParts.concat(key).join('.');\n\n // Hide meta-fields and check field type\n if (shouldAddField(subObj, key)) {\n fields[fieldName] = {\n text: fieldName,\n type: subObj.type,\n };\n }\n }\n }\n fieldNameParts.pop();\n }\n\n for (const indexName in result) {\n const index = result[indexName];\n if (index && index.mappings) {\n const mappings = index.mappings;\n\n if (configuredEsVersion < 70) {\n for (const typeName in mappings) {\n const properties = mappings[typeName].properties;\n getFieldsRecursively(properties);\n }\n } else {\n const properties = mappings.properties;\n getFieldsRecursively(properties);\n }\n }\n }\n\n // transform to array\n return _.map(fields, value => {\n return value;\n });\n });\n }\n\n getTerms(queryDef: any, range = getDefaultTimeRange()) {\n const searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count';\n const header = this.getQueryHeader(searchType, range.from, range.to);\n let esQuery = JSON.stringify(this.queryBuilder.getTermsQuery(queryDef));\n\n esQuery = esQuery.replace(/\\$timeFrom/g, range.from.valueOf().toString());\n esQuery = esQuery.replace(/\\$timeTo/g, range.to.valueOf().toString());\n esQuery = header + '\\n' + esQuery + '\\n';\n\n const url = this.getMultiSearchUrl();\n\n return this.post(url, esQuery).then((res: any) => {\n if (!res.responses[0].aggregations) {\n return [];\n }\n\n const buckets = res.responses[0].aggregations['1'].buckets;\n return _.map(buckets, bucket => {\n return {\n text: bucket.key_as_string || bucket.key,\n value: bucket.key,\n };\n });\n });\n }\n\n getMultiSearchUrl() {\n if (this.esVersion >= 70 && this.maxConcurrentShardRequests) {\n return `_msearch?max_concurrent_shard_requests=${this.maxConcurrentShardRequests}`;\n }\n\n return '_msearch';\n }\n\n getPPLUrl() {\n return '_opendistro/_ppl';\n }\n\n metricFindQuery(query: string, options?: any): Promise {\n const range = options?.range;\n const parsedQuery = JSON.parse(query);\n if (query) {\n if (parsedQuery.find === 'fields') {\n parsedQuery.field = getTemplateSrv().replace(parsedQuery.field, {}, 'lucene');\n return this.getFields(query, range);\n }\n\n if (parsedQuery.find === 'terms') {\n parsedQuery.field = getTemplateSrv().replace(parsedQuery.field, {}, 'lucene');\n parsedQuery.query = getTemplateSrv().replace(parsedQuery.query || '*', {}, 'lucene');\n return this.getTerms(query, range);\n }\n }\n\n return Promise.resolve([]);\n }\n\n getTagKeys() {\n return this.getFields();\n }\n\n getTagValues(options: any) {\n return this.getTerms({ field: options.key, query: '*' });\n }\n\n targetContainsTemplate(target: any) {\n // @ts-ignore\n if (getTemplateSrv().variableExists(target.query) || getTemplateSrv().variableExists(target.alias)) {\n return true;\n }\n\n for (const bucketAgg of target.bucketAggs) {\n // @ts-ignore\n if (getTemplateSrv().variableExists(bucketAgg.field) || this.objectContainsTemplate(bucketAgg.settings)) {\n return true;\n }\n }\n\n for (const metric of target.metrics) {\n if (\n // @ts-ignore\n getTemplateSrv().variableExists(metric.field) ||\n this.objectContainsTemplate(metric.settings) ||\n this.objectContainsTemplate(metric.meta)\n ) {\n return true;\n }\n }\n\n return false;\n }\n\n private isPrimitive(obj: any) {\n if (obj === null || obj === undefined) {\n return true;\n }\n if (['string', 'number', 'boolean'].some(type => type === typeof true)) {\n return true;\n }\n\n return false;\n }\n\n private objectContainsTemplate(obj: any) {\n if (!obj) {\n return false;\n }\n\n for (const key of Object.keys(obj)) {\n if (this.isPrimitive(obj[key])) {\n // @ts-ignore\n if (getTemplateSrv().variableExists(obj[key])) {\n return true;\n }\n } else if (Array.isArray(obj[key])) {\n for (const item of obj[key]) {\n if (this.objectContainsTemplate(item)) {\n return true;\n }\n }\n } else {\n if (this.objectContainsTemplate(obj[key])) {\n return true;\n }\n }\n }\n\n return false;\n }\n\n getSupportedQueryTypes(): ElasticsearchQueryType[] {\n return [ElasticsearchQueryType.Lucene, ...(this.pplEnabled ? [ElasticsearchQueryType.PPL] : [])];\n }\n}\n\n/**\n * Modifies dataframe and adds dataLinks from the config.\n * Exported for tests.\n */\nexport function enhanceDataFrame(dataFrame: DataFrame, dataLinks: DataLinkConfig[]) {\n const dataSourceSrv = getDataSourceSrv();\n\n if (!dataLinks.length) {\n return;\n }\n\n for (const field of dataFrame.fields) {\n const dataLinkConfig = dataLinks.find(dataLink => field.name && field.name.match(dataLink.field));\n\n if (!dataLinkConfig) {\n continue;\n }\n\n let link: DataLink;\n\n if (dataLinkConfig.datasourceUid) {\n // @ts-ignore\n const dsSettings = dataSourceSrv.getInstanceSettings(dataLinkConfig.datasourceUid);\n\n link = {\n title: '',\n url: '',\n internal: {\n query: { query: dataLinkConfig.url },\n datasourceUid: dataLinkConfig.datasourceUid,\n // @ts-ignore\n datasourceName: dsSettings?.name ?? 'Data source not found',\n },\n };\n } else {\n link = {\n title: '',\n url: dataLinkConfig.url,\n };\n }\n\n field.config = field.config || {};\n field.config.links = [...(field.config.links || []), link];\n }\n}\n","import React from 'react';\nimport { EventsWithValidation, regexValidation, LegacyForms } from '@grafana/ui';\nconst { Select, Input, FormField, Switch } = LegacyForms;\nimport { ElasticsearchOptions } from '../types';\nimport { DataSourceSettings, SelectableValue } from '@grafana/data';\n\nconst indexPatternTypes = [\n { label: 'No pattern', value: 'none' },\n { label: 'Hourly', value: 'Hourly', example: '[logstash-]YYYY.MM.DD.HH' },\n { label: 'Daily', value: 'Daily', example: '[logstash-]YYYY.MM.DD' },\n { label: 'Weekly', value: 'Weekly', example: '[logstash-]GGGG.WW' },\n { label: 'Monthly', value: 'Monthly', example: '[logstash-]YYYY.MM' },\n { label: 'Yearly', value: 'Yearly', example: '[logstash-]YYYY' },\n];\n\nconst esVersions = [\n { label: '2.x', value: 2 },\n { label: '5.x', value: 5 },\n { label: '5.6+', value: 56 },\n { label: '6.0+', value: 60 },\n { label: '7.0+', value: 70 },\n];\n\ntype Props = {\n value: DataSourceSettings;\n onChange: (value: DataSourceSettings) => void;\n};\nexport const ElasticDetails = (props: Props) => {\n const { value, onChange } = props;\n\n return (\n <>\n Elasticsearch details
\n\n \n
\n
\n \n
\n\n
\n \n pattern.value === (value.jsonData.interval === undefined ? 'none' : value.jsonData.interval)\n )}\n />\n }\n />\n
\n
\n\n
\n \n
\n\n
\n {\n const maxConcurrentShardRequests = getMaxConcurrenShardRequestOrDefault(\n value.jsonData.maxConcurrentShardRequests,\n option.value!\n );\n onChange({\n ...value,\n jsonData: {\n ...value.jsonData,\n esVersion: option.value!,\n maxConcurrentShardRequests,\n },\n });\n }}\n value={esVersions.find(version => version.value === value.jsonData.esVersion)}\n />\n }\n />\n
\n {value.jsonData.esVersion >= 56 && (\n
\n \n
\n )}\n
\n
\n \n }\n tooltip={\n <>\n A lower limit for the auto group by time interval. Recommended to be set to write frequency, for\n example 1m
if your data is written every minute.\n >\n }\n />\n
\n
\n
\n \n
\n
\n >\n );\n};\n\nconst jsonDataChangeHandler = (key: keyof ElasticsearchOptions, value: Props['value'], onChange: Props['onChange']) => (\n event: React.SyntheticEvent\n) => {\n onChange({\n ...value,\n jsonData: {\n ...value.jsonData,\n [key]: event.currentTarget.value,\n },\n });\n};\n\nconst jsonDataSwitchChangeHandler = (\n key: keyof ElasticsearchOptions,\n value: Props['value'],\n onChange: Props['onChange']\n) => (event: React.SyntheticEvent) => {\n onChange({\n ...value,\n jsonData: {\n ...value.jsonData,\n [key]: event.currentTarget.checked,\n },\n });\n};\n\nconst intervalHandler = (value: Props['value'], onChange: Props['onChange']) => (option: SelectableValue) => {\n const { database } = value;\n // If option value is undefined it will send its label instead so we have to convert made up value to undefined here.\n const newInterval = option.value === 'none' ? undefined : option.value;\n\n if (!database || database.length === 0 || database.startsWith('[logstash-]')) {\n let newDatabase = '';\n\n if (newInterval !== undefined) {\n const pattern = indexPatternTypes.find(pattern => pattern.value === newInterval);\n\n if (pattern) {\n newDatabase = pattern.example ?? '';\n }\n }\n\n onChange({\n ...value,\n database: newDatabase,\n jsonData: {\n ...value.jsonData,\n interval: newInterval,\n },\n });\n } else {\n onChange({\n ...value,\n jsonData: {\n ...value.jsonData,\n interval: newInterval,\n },\n });\n }\n};\n\nfunction getMaxConcurrenShardRequestOrDefault(maxConcurrentShardRequests: number | undefined, version: number): number {\n if (maxConcurrentShardRequests === 5 && version < 70) {\n return 256;\n }\n\n if (maxConcurrentShardRequests === 256 && version >= 70) {\n return 5;\n }\n\n return maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(version);\n}\n\nexport function defaultMaxConcurrentShardRequests(version: number) {\n return version >= 70 ? 5 : 256;\n}\n","import React from 'react';\nimport { LegacyForms } from '@grafana/ui';\nconst { FormField } = LegacyForms;\nimport { ElasticsearchOptions } from '../types';\n\ntype Props = {\n value: ElasticsearchOptions;\n onChange: (value: ElasticsearchOptions) => void;\n};\nexport const LogsConfig = (props: Props) => {\n const { value, onChange } = props;\n const changeHandler = (key: keyof ElasticsearchOptions) => (\n event: React.SyntheticEvent\n ) => {\n onChange({\n ...value,\n [key]: event.currentTarget.value,\n });\n };\n\n return (\n <>\n Logs
\n\n \n >\n );\n};\n","import { useEffect, useRef } from 'react';\nvar usePrevious = function (state) {\n var ref = useRef();\n useEffect(function () {\n ref.current = state;\n });\n return ref.current;\n};\nexport default usePrevious;\n","import React, { HTMLAttributes } from 'react';\nimport { Badge, BadgeProps } from '@grafana/ui';\nimport { PluginErrorCode, PluginSignatureStatus } from '@grafana/data';\n\ninterface Props extends HTMLAttributes {\n status?: PluginSignatureStatus;\n}\n\nexport const PluginSignatureBadge: React.FC = ({ status, ...otherProps }) => {\n const display = getSignatureDisplayModel(status);\n return (\n \n );\n};\n\nexport function isUnsignedPluginSignature(signature?: PluginSignatureStatus) {\n return signature && signature !== PluginSignatureStatus.valid && signature !== PluginSignatureStatus.internal;\n}\n\nexport function mapPluginErrorCodeToSignatureStatus(code: PluginErrorCode) {\n switch (code) {\n case PluginErrorCode.invalidSignature:\n return PluginSignatureStatus.invalid;\n case PluginErrorCode.missingSignature:\n return PluginSignatureStatus.missing;\n case PluginErrorCode.modifiedSignature:\n return PluginSignatureStatus.modified;\n default:\n return PluginSignatureStatus.missing;\n }\n}\n\nfunction getSignatureDisplayModel(signature?: PluginSignatureStatus): BadgeProps {\n if (!signature) {\n signature = PluginSignatureStatus.invalid;\n }\n\n switch (signature) {\n case PluginSignatureStatus.internal:\n return { text: 'Core', icon: 'cube', color: 'blue', tooltip: 'Core plugin that is bundled with Grafana' };\n case PluginSignatureStatus.valid:\n return { text: 'Signed', icon: 'lock', color: 'green', tooltip: 'Signed and verified plugin' };\n case PluginSignatureStatus.invalid:\n return {\n text: 'Invalid signature',\n icon: 'exclamation-triangle',\n color: 'red',\n tooltip: 'Invalid plugin signature',\n };\n case PluginSignatureStatus.modified:\n return {\n text: 'Modified signature',\n icon: 'exclamation-triangle',\n color: 'red',\n tooltip: 'Valid signature but content has been modified',\n };\n case PluginSignatureStatus.missing:\n return {\n text: 'Missing signature',\n icon: 'exclamation-triangle',\n color: 'red',\n tooltip: 'Missing plugin signature',\n };\n }\n\n return { text: 'Unsigned', icon: 'exclamation-triangle', color: 'red', tooltip: 'Unsigned external plugin' };\n}\n\nPluginSignatureBadge.displayName = 'PluginSignatureBadge';\n","// Libraries\nimport React, { PureComponent } from 'react';\n\n// Components\nimport { HorizontalGroup, Select } from '@grafana/ui';\nimport { SelectableValue, DataSourceInstanceSettings } from '@grafana/data';\nimport { selectors } from '@grafana/e2e-selectors';\nimport { isUnsignedPluginSignature, PluginSignatureBadge } from './PluginSignatureBadge';\nimport { getDataSourceSrv } from '@grafana/runtime';\n\nexport interface Props {\n onChange: (ds: DataSourceInstanceSettings) => void;\n current: string | null;\n hideTextValue?: boolean;\n onBlur?: () => void;\n autoFocus?: boolean;\n openMenuOnFocus?: boolean;\n placeholder?: string;\n tracing?: boolean;\n mixed?: boolean;\n dashboard?: boolean;\n metrics?: boolean;\n annotations?: boolean;\n variables?: boolean;\n pluginId?: string;\n noDefault?: boolean;\n}\n\nexport interface State {\n error?: string;\n}\n\nexport class DataSourcePicker extends PureComponent {\n dataSourceSrv = getDataSourceSrv();\n\n static defaultProps: Partial = {\n autoFocus: false,\n openMenuOnFocus: false,\n placeholder: 'Select datasource',\n };\n\n state: State = {};\n\n constructor(props: Props) {\n super(props);\n }\n\n componentDidMount() {\n const { current } = this.props;\n // @ts-ignore\n const dsSettings = this.dataSourceSrv.getInstanceSettings(current);\n if (!dsSettings) {\n this.setState({ error: 'Could not find data source ' + current });\n }\n }\n\n onChange = (item: SelectableValue) => {\n // @ts-ignore\n const dsSettings = this.dataSourceSrv.getInstanceSettings(item.value);\n\n if (dsSettings) {\n this.props.onChange(dsSettings);\n this.setState({ error: undefined });\n }\n };\n\n private getCurrentValue() {\n const { current, hideTextValue, noDefault } = this.props;\n\n if (!current && noDefault) {\n return null;\n }\n\n // @ts-ignore\n const ds = this.dataSourceSrv.getInstanceSettings(current);\n\n if (ds) {\n return {\n label: ds.name.substr(0, 37),\n value: ds.name,\n imgUrl: ds.meta.info.logos.small,\n hideText: hideTextValue,\n meta: ds.meta,\n };\n }\n\n return {\n label: (current ?? 'no name') + ' - not found',\n value: current,\n imgUrl: '',\n hideText: hideTextValue,\n };\n }\n\n getDataSourceOptions() {\n const { tracing, metrics, mixed, dashboard, variables, annotations, pluginId } = this.props;\n const options = this.dataSourceSrv\n // @ts-ignore\n .getList({\n tracing,\n metrics,\n dashboard,\n mixed,\n variables,\n annotations,\n pluginId,\n })\n .map(ds => ({\n value: ds.name,\n label: `${ds.name}${ds.isDefault ? ' (default)' : ''}`,\n imgUrl: ds.meta.info.logos.small,\n meta: ds.meta,\n }));\n\n return options;\n }\n\n render() {\n const { autoFocus, onBlur, openMenuOnFocus, placeholder } = this.props;\n const { error } = this.state;\n const options = this.getDataSourceOptions();\n const value = this.getCurrentValue();\n\n return (\n \n
\n );\n }\n}\n","import React, { Dispatch, SetStateAction, useEffect, useState } from 'react';\nimport { css } from 'emotion';\nimport { VariableSuggestion } from '@grafana/data';\nimport { Button, LegacyForms, DataLinkInput, stylesFactory } from '@grafana/ui';\nconst { FormField, Switch } = LegacyForms;\nimport { DataLinkConfig } from '../types';\nimport { usePrevious } from 'react-use';\nimport { DataSourcePicker } from '../dependencies/DataSourcePicker';\n\nconst getStyles = stylesFactory(() => ({\n firstRow: css`\n display: flex;\n `,\n nameField: css`\n flex: 2;\n `,\n regexField: css`\n flex: 3;\n `,\n row: css`\n display: flex;\n align-items: baseline;\n `,\n}));\n\ntype Props = {\n value: DataLinkConfig;\n onChange: (value: DataLinkConfig) => void;\n onDelete: () => void;\n suggestions: VariableSuggestion[];\n className?: string;\n};\nexport const DataLink = (props: Props) => {\n const { value, onChange, onDelete, suggestions, className } = props;\n const styles = getStyles();\n const [showInternalLink, setShowInternalLink] = useInternalLink(value.datasourceUid);\n\n const handleChange = (field: keyof typeof value) => (event: React.ChangeEvent) => {\n onChange({\n ...value,\n [field]: event.currentTarget.value,\n });\n };\n\n return (\n \n
\n \n
\n
\n \n onChange({\n ...value,\n url: newValue,\n })\n }\n suggestions={suggestions}\n />\n }\n className={css`\n width: 100%;\n `}\n />\n
\n\n
\n {\n if (showInternalLink) {\n onChange({\n ...value,\n datasourceUid: undefined,\n });\n }\n setShowInternalLink(!showInternalLink);\n }}\n />\n\n {showInternalLink && (\n {\n onChange({\n ...value,\n datasourceUid: ds.uid,\n });\n }}\n current={value.datasourceUid}\n />\n )}\n
\n
\n );\n};\n\nfunction useInternalLink(datasourceUid?: string): [boolean, Dispatch>] {\n const [showInternalLink, setShowInternalLink] = useState(!!datasourceUid);\n const previousUid = usePrevious(datasourceUid);\n\n // Force internal link visibility change if uid changed outside of this component.\n useEffect(() => {\n if (!previousUid && datasourceUid && !showInternalLink) {\n setShowInternalLink(true);\n }\n if (previousUid && !datasourceUid && showInternalLink) {\n setShowInternalLink(false);\n }\n }, [previousUid, datasourceUid, showInternalLink]);\n\n return [showInternalLink, setShowInternalLink];\n}\n","import React from 'react';\nimport { css } from 'emotion';\nimport { Button, stylesFactory, useTheme } from '@grafana/ui';\nimport { GrafanaTheme, VariableOrigin, DataLinkBuiltInVars } from '@grafana/data';\nimport { DataLinkConfig } from '../types';\nimport { DataLink } from './DataLink';\n\nconst getStyles = stylesFactory((theme: GrafanaTheme) => ({\n infoText: css`\n padding-bottom: ${theme.spacing.md};\n color: ${theme.colors.textWeak};\n `,\n dataLink: css`\n margin-bottom: ${theme.spacing.sm};\n `,\n}));\n\ntype Props = {\n value?: DataLinkConfig[];\n onChange: (value: DataLinkConfig[]) => void;\n};\nexport const DataLinks = (props: Props) => {\n const { value, onChange } = props;\n const theme = useTheme();\n const styles = getStyles(theme);\n\n return (\n <>\n Data links
\n\n \n Add links to existing fields. Links will be shown in log row details next to the field value.\n
\n\n {value && value.length > 0 && (\n \n {value.map((field, index) => {\n return (\n {\n const newDataLinks = [...value];\n newDataLinks.splice(index, 1, newField);\n onChange(newDataLinks);\n }}\n onDelete={() => {\n const newDataLinks = [...value];\n newDataLinks.splice(index, 1);\n onChange(newDataLinks);\n }}\n suggestions={[\n {\n value: DataLinkBuiltInVars.valueRaw,\n label: 'Raw value',\n documentation: 'Raw value of the field',\n origin: VariableOrigin.Value,\n },\n ]}\n />\n );\n })}\n
\n )}\n\n \n >\n );\n};\n","import { createContext, useCallback, useContext } from 'react';\n\nexport interface Action {\n type: T;\n}\n\nexport type Reducer = (state: S, action: A) => S;\n\nexport const combineReducers = (reducers: { [P in keyof S]: Reducer }) => (\n state: S,\n action: A\n): Partial => {\n const newState = {} as S;\n for (const key in reducers) {\n newState[key] = reducers[key](state[key], action);\n }\n return newState;\n};\n\nexport const useStatelessReducer = (\n onChange: (value: State) => void,\n state: State,\n reducer: (state: State, action: A) => State\n) => {\n const dispatch = useCallback(\n (action: A) => {\n onChange(reducer(state, action));\n },\n [onChange, state, reducer]\n );\n\n return dispatch;\n};\n\nexport const DispatchContext = createContext<((action: Action) => void) | undefined>(undefined);\n\nexport const useDispatch = (): ((action: T) => void) => {\n const dispatch = useContext(DispatchContext);\n\n if (!dispatch) {\n throw new Error('Use DispatchContext first.');\n }\n\n return dispatch;\n};\n","import { Action } from '../../hooks/useStatelessReducer';\n\nexport const INIT = 'init';\nconst CHANGE_QUERY = 'change_query';\nconst CHANGE_ALIAS_PATTERN = 'change_alias_pattern';\n\nexport interface InitAction extends Action {}\n\ninterface ChangeQueryAction extends Action {\n payload: {\n query: string;\n };\n}\n\ninterface ChangeAliasPatternAction extends Action {\n payload: {\n aliasPattern: string;\n };\n}\n\nexport const initQuery = (): InitAction => ({ type: INIT });\n\nexport const changeQuery = (query: string): ChangeQueryAction => ({\n type: CHANGE_QUERY,\n payload: {\n query,\n },\n});\n\nexport const changeAliasPattern = (aliasPattern: string): ChangeAliasPatternAction => ({\n type: CHANGE_ALIAS_PATTERN,\n payload: {\n aliasPattern,\n },\n});\n\nexport const queryReducer = (prevQuery: string, action: ChangeQueryAction | InitAction) => {\n switch (action.type) {\n case CHANGE_QUERY:\n return action.payload.query;\n\n case INIT:\n return '';\n\n default:\n return prevQuery;\n }\n};\n\nexport const aliasPatternReducer = (prevAliasPattern: string, action: ChangeAliasPatternAction | InitAction) => {\n switch (action.type) {\n case CHANGE_ALIAS_PATTERN:\n return action.payload.aliasPattern;\n\n case INIT:\n return '';\n\n default:\n return prevAliasPattern;\n }\n};\n","import { defaultMetricAgg } from '../../../../query_def';\nimport { ElasticsearchQuery } from '../../../../types';\nimport { removeEmpty } from '../../../../utils';\nimport { INIT, InitAction } from '../../state';\nimport { isMetricAggregationWithMeta, isMetricAggregationWithSettings, MetricAggregation } from '../aggregations';\nimport { getChildren, metricAggregationConfig } from '../utils';\nimport {\n ADD_METRIC,\n CHANGE_METRIC_TYPE,\n REMOVE_METRIC,\n TOGGLE_METRIC_VISIBILITY,\n MetricAggregationAction,\n CHANGE_METRIC_FIELD,\n CHANGE_METRIC_SETTING,\n CHANGE_METRIC_META,\n CHANGE_METRIC_ATTRIBUTE,\n} from './types';\n\nexport const reducer = (\n state: MetricAggregation[],\n action: MetricAggregationAction | InitAction\n): ElasticsearchQuery['metrics'] => {\n switch (action.type) {\n case ADD_METRIC:\n return [...state, defaultMetricAgg(action.payload.id)];\n\n case REMOVE_METRIC:\n const metricToRemove = state.find(m => m.id === action.payload.id)!;\n const metricsToRemove = [metricToRemove, ...getChildren(metricToRemove, state)];\n const resultingMetrics = state.filter(metric => !metricsToRemove.some(toRemove => toRemove.id === metric.id));\n if (resultingMetrics.length === 0) {\n return [defaultMetricAgg('1')];\n }\n return resultingMetrics;\n\n case CHANGE_METRIC_TYPE:\n return state\n .filter(metric =>\n // When the new metric type is `isSingleMetric` we remove all other metrics from the query\n // leaving only the current one.\n !!metricAggregationConfig[action.payload.type].isSingleMetric ? metric.id === action.payload.id : true\n )\n .map(metric => {\n if (metric.id !== action.payload.id) {\n return metric;\n }\n\n /*\n TODO: The previous version of the query editor was keeping some of the old metric's configurations\n in the new selected one (such as field or some settings).\n It the future would be nice to have the same behavior but it's hard without a proper definition,\n as Elasticsearch will error sometimes if some settings are not compatible.\n */\n return {\n id: metric.id,\n type: action.payload.type,\n ...metricAggregationConfig[action.payload.type].defaults,\n } as MetricAggregation;\n });\n\n case CHANGE_METRIC_FIELD:\n return state.map(metric => {\n if (metric.id !== action.payload.id) {\n return metric;\n }\n\n return {\n ...metric,\n field: action.payload.field,\n };\n });\n\n case TOGGLE_METRIC_VISIBILITY:\n return state.map(metric => {\n if (metric.id !== action.payload.id) {\n return metric;\n }\n\n return {\n ...metric,\n hide: !metric.hide,\n };\n });\n case CHANGE_METRIC_SETTING:\n // @ts-ignore\n return state.map(metric => {\n if (metric.id !== action.payload.metric.id) {\n return metric;\n }\n\n // TODO: Here, instead of this if statement, we should assert that metric is MetricAggregationWithSettings\n if (isMetricAggregationWithSettings(metric)) {\n const newSettings = removeEmpty({\n ...metric.settings,\n [action.payload.settingName]: action.payload.newValue,\n });\n\n return {\n ...metric,\n settings: {\n ...newSettings,\n },\n };\n }\n\n // This should never happen.\n return metric;\n });\n\n case CHANGE_METRIC_META:\n return state.map(metric => {\n if (metric.id !== action.payload.metric.id) {\n return metric;\n }\n\n // TODO: Here, instead of this if statement, we should assert that metric is MetricAggregationWithMeta\n if (isMetricAggregationWithMeta(metric)) {\n return {\n ...metric,\n meta: {\n ...metric.meta,\n [action.payload.meta]: action.payload.newValue,\n },\n };\n }\n\n // This should never happen.\n return metric;\n });\n\n case CHANGE_METRIC_ATTRIBUTE:\n return state.map(metric => {\n if (metric.id !== action.payload.metric.id) {\n return metric;\n }\n\n return {\n ...metric,\n [action.payload.attribute]: action.payload.newValue,\n };\n });\n\n case INIT:\n return [defaultMetricAgg()];\n\n default:\n return state;\n }\n};\n","import { Action } from '../../../../hooks/useStatelessReducer';\nimport { SettingKeyOf } from '../../../types';\nimport {\n MetricAggregation,\n MetricAggregationWithMeta,\n MetricAggregationWithSettings,\n MetricAggregationWithField,\n} from '../aggregations';\n\nexport const ADD_METRIC = '@metrics/add';\nexport const REMOVE_METRIC = '@metrics/remove';\nexport const CHANGE_METRIC_TYPE = '@metrics/change_type';\nexport const CHANGE_METRIC_FIELD = '@metrics/change_field';\nexport const CHANGE_METRIC_SETTING = '@metrics/change_setting';\nexport const CHANGE_METRIC_META = '@metrics/change_meta';\nexport const CHANGE_METRIC_ATTRIBUTE = '@metrics/change_attr';\nexport const TOGGLE_METRIC_VISIBILITY = '@metrics/toggle_visibility';\n\nexport interface AddMetricAction extends Action {\n payload: {\n id: MetricAggregation['id'];\n };\n}\n\nexport interface RemoveMetricAction extends Action {\n payload: {\n id: MetricAggregation['id'];\n };\n}\n\nexport interface ChangeMetricTypeAction extends Action {\n payload: {\n id: MetricAggregation['id'];\n type: MetricAggregation['type'];\n };\n}\n\nexport interface ChangeMetricFieldAction extends Action {\n payload: {\n id: MetricAggregation['id'];\n field: MetricAggregationWithField['field'];\n };\n}\nexport interface ToggleMetricVisibilityAction extends Action {\n payload: {\n id: MetricAggregation['id'];\n };\n}\n\nexport interface ChangeMetricSettingAction\n extends Action {\n payload: {\n metric: T;\n settingName: SettingKeyOf;\n newValue: unknown;\n };\n}\n\nexport interface ChangeMetricMetaAction extends Action {\n payload: {\n metric: T;\n meta: Extract['meta'], string>;\n newValue: string | number | boolean;\n };\n}\n\nexport interface ChangeMetricAttributeAction<\n T extends MetricAggregation,\n K extends Extract = Extract\n> extends Action {\n payload: {\n metric: T;\n attribute: K;\n newValue: T[K];\n };\n}\n\ntype CommonActions =\n | AddMetricAction\n | RemoveMetricAction\n | ChangeMetricTypeAction\n | ChangeMetricFieldAction\n | ToggleMetricVisibilityAction;\n\nexport type MetricAggregationAction =\n | (T extends MetricAggregationWithSettings ? ChangeMetricSettingAction : never)\n | (T extends MetricAggregationWithMeta ? ChangeMetricMetaAction : never)\n | ChangeMetricAttributeAction\n | CommonActions;\n","import { defaultBucketAgg } from '../../../../query_def';\nimport { ElasticsearchQuery } from '../../../../types';\nimport { ChangeMetricTypeAction, CHANGE_METRIC_TYPE } from '../../MetricAggregationsEditor/state/types';\nimport { metricAggregationConfig } from '../../MetricAggregationsEditor/utils';\nimport { BucketAggregation, Terms } from '../aggregations';\nimport { INIT, InitAction } from '../../state';\nimport {\n ADD_BUCKET_AGG,\n REMOVE_BUCKET_AGG,\n CHANGE_BUCKET_AGG_TYPE,\n CHANGE_BUCKET_AGG_FIELD,\n CHANGE_BUCKET_AGG_SETTING,\n BucketAggregationAction,\n} from './types';\nimport { bucketAggregationConfig } from '../utils';\nimport { removeEmpty } from '../../../../utils';\n\nexport const reducer = (\n state: BucketAggregation[],\n action: BucketAggregationAction | ChangeMetricTypeAction | InitAction\n): ElasticsearchQuery['bucketAggs'] => {\n switch (action.type) {\n case ADD_BUCKET_AGG:\n const newAgg: Terms = {\n id: action.payload.id,\n type: 'terms',\n settings: bucketAggregationConfig['terms'].defaultSettings,\n };\n\n // If the last bucket aggregation is a `date_histogram` we add the new one before it.\n const lastAgg = state[state.length - 1];\n if (lastAgg?.type === 'date_histogram') {\n return [...state.slice(0, state.length - 1), newAgg, lastAgg];\n }\n\n return [...state, newAgg];\n\n case REMOVE_BUCKET_AGG:\n return state.filter(bucketAgg => bucketAgg.id !== action.payload.id);\n\n case CHANGE_BUCKET_AGG_TYPE:\n return state.map(bucketAgg => {\n if (bucketAgg.id !== action.payload.id) {\n return bucketAgg;\n }\n\n /*\n TODO: The previous version of the query editor was keeping some of the old bucket aggregation's configurations\n in the new selected one (such as field or some settings).\n It the future would be nice to have the same behavior but it's hard without a proper definition,\n as Elasticsearch will error sometimes if some settings are not compatible.\n */\n return {\n id: bucketAgg.id,\n type: action.payload.newType,\n settings: bucketAggregationConfig[action.payload.newType].defaultSettings,\n } as BucketAggregation;\n });\n\n case CHANGE_BUCKET_AGG_FIELD:\n return state.map(bucketAgg => {\n if (bucketAgg.id !== action.payload.id) {\n return bucketAgg;\n }\n\n return {\n ...bucketAgg,\n field: action.payload.newField,\n };\n });\n\n case CHANGE_METRIC_TYPE:\n // If we are switching to a metric which requires the absence of bucket aggregations\n // we remove all of them.\n if (metricAggregationConfig[action.payload.type].isSingleMetric) {\n return [];\n } else if (state.length === 0) {\n // Else, if there are no bucket aggregations we restore a default one.\n // This happens when switching from a metric that requires the absence of bucket aggregations to\n // one that requires it.\n return [defaultBucketAgg()];\n }\n return state;\n\n case CHANGE_BUCKET_AGG_SETTING:\n // @ts-ignore\n return state.map(bucketAgg => {\n if (bucketAgg.id !== action.payload.bucketAgg.id) {\n return bucketAgg;\n }\n\n const newSettings = removeEmpty({\n ...bucketAgg.settings,\n [action.payload.settingName]: action.payload.newValue,\n });\n\n return {\n ...bucketAgg,\n settings: {\n ...newSettings,\n },\n };\n });\n\n case INIT:\n return [defaultBucketAgg()];\n\n default:\n return state;\n }\n};\n","import { Action } from '../../../../hooks/useStatelessReducer';\nimport { SettingKeyOf } from '../../../types';\nimport { BucketAggregation, BucketAggregationWithField } from '../aggregations';\n\nexport const ADD_BUCKET_AGG = '@bucketAggs/add';\nexport const REMOVE_BUCKET_AGG = '@bucketAggs/remove';\nexport const CHANGE_BUCKET_AGG_TYPE = '@bucketAggs/change_type';\nexport const CHANGE_BUCKET_AGG_FIELD = '@bucketAggs/change_field';\nexport const CHANGE_BUCKET_AGG_SETTING = '@bucketAggs/change_setting';\n\nexport interface AddBucketAggregationAction extends Action {\n payload: {\n id: BucketAggregation['id'];\n };\n}\n\nexport interface RemoveBucketAggregationAction extends Action {\n payload: {\n id: BucketAggregation['id'];\n };\n}\n\nexport interface ChangeBucketAggregationTypeAction extends Action {\n payload: {\n id: BucketAggregation['id'];\n newType: BucketAggregation['type'];\n };\n}\n\nexport interface ChangeBucketAggregationFieldAction extends Action {\n payload: {\n id: BucketAggregation['id'];\n newField: BucketAggregationWithField['field'];\n };\n}\n\nexport interface ChangeBucketAggregationSettingAction\n extends Action {\n payload: {\n bucketAgg: T;\n settingName: SettingKeyOf;\n newValue: unknown;\n };\n}\n\nexport type BucketAggregationAction =\n | AddBucketAggregationAction\n | RemoveBucketAggregationAction\n | ChangeBucketAggregationTypeAction\n | ChangeBucketAggregationFieldAction\n | ChangeBucketAggregationSettingAction;\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport { Action } from '../../../hooks/useStatelessReducer';\nimport { ElasticsearchQueryType } from '../../../types';\nimport { INIT, InitAction } from '../state';\n\nexport const CHANGE_QUERY_TYPE = 'change_query_type';\n\nexport interface ChangeQueryTypeAction extends Action {\n payload: {\n queryType: ElasticsearchQueryType;\n };\n}\n\nexport const changeQueryType = (queryType: ElasticsearchQueryType): ChangeQueryTypeAction => ({\n type: CHANGE_QUERY_TYPE,\n payload: {\n queryType,\n },\n});\n\nexport const queryTypeReducer = (prevQueryType: ElasticsearchQueryType, action: ChangeQueryTypeAction | InitAction) => {\n switch (action.type) {\n case CHANGE_QUERY_TYPE:\n return action.payload.queryType;\n\n case INIT:\n return ElasticsearchQueryType.Lucene;\n\n default:\n return prevQueryType;\n }\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport { Action } from '../../../hooks/useStatelessReducer';\nimport { INIT, InitAction } from '../state';\nimport { PPLFormatType } from './formats';\n\nexport const CHANGE_FORMAT = 'change_format';\n\nexport interface ChangeFormatAction extends Action {\n payload: {\n format: PPLFormatType;\n };\n}\n\nexport const changeFormat = (format: PPLFormatType): ChangeFormatAction => ({\n type: CHANGE_FORMAT,\n payload: {\n format,\n },\n});\n\nexport const formatReducer = (prevFormat: PPLFormatType, action: ChangeFormatAction | InitAction) => {\n switch (action.type) {\n case CHANGE_FORMAT:\n return action.payload.format;\n\n case INIT:\n return 'table';\n\n default:\n return prevFormat;\n }\n};\n","import React, { createContext, FunctionComponent, useContext } from 'react';\nimport { ElasticDatasource } from '../../datasource';\nimport { combineReducers, useStatelessReducer, DispatchContext } from '../../hooks/useStatelessReducer';\nimport { ElasticsearchQuery } from '../../types';\n\nimport { reducer as metricsReducer } from './MetricAggregationsEditor/state/reducer';\nimport { reducer as bucketAggsReducer } from './BucketAggregationsEditor/state/reducer';\nimport { queryTypeReducer } from './QueryTypeEditor/state';\nimport { formatReducer } from './PPLFormatEditor/state';\nimport { aliasPatternReducer, queryReducer, initQuery } from './state';\n\nconst DatasourceContext = createContext(undefined);\nconst QueryContext = createContext(undefined);\n\ninterface Props {\n query: ElasticsearchQuery;\n onChange: (query: ElasticsearchQuery) => void;\n datasource: ElasticDatasource;\n}\n\nexport const ElasticsearchProvider: FunctionComponent = ({ children, onChange, query, datasource }) => {\n const reducer = combineReducers({\n query: queryReducer,\n queryType: queryTypeReducer,\n alias: aliasPatternReducer,\n metrics: metricsReducer,\n bucketAggs: bucketAggsReducer,\n format: formatReducer,\n });\n\n const dispatch = useStatelessReducer(\n // timeField is part of the query model, but its value is always set to be the one from datasource settings.\n newState => onChange({ ...query, ...newState, timeField: datasource.timeField }),\n query,\n reducer\n );\n\n // This initializes the query by dispatching an init action to each reducer.\n // useStatelessReducer will then call `onChange` with the newly generated query\n if (!query.metrics && !query.bucketAggs) {\n dispatch(initQuery());\n\n return null;\n }\n\n return (\n \n \n {children}\n \n \n );\n};\n\nexport const useQuery = (): ElasticsearchQuery => {\n const query = useContext(QueryContext);\n\n if (!query) {\n throw new Error('use ElasticsearchProvider first.');\n }\n\n return query;\n};\n\nexport const useDatasource = () => {\n const datasource = useContext(DatasourceContext);\n if (!datasource) {\n throw new Error('use ElasticsearchProvider first.');\n }\n\n return datasource;\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport { SelectableValue } from '@grafana/data';\nimport { ElasticsearchQueryType, QueryTypeConfiguration } from '../../../types';\n\nexport const queryTypeConfig: QueryTypeConfiguration = {\n [ElasticsearchQueryType.Lucene]: { label: 'Lucene' },\n [ElasticsearchQueryType.PPL]: { label: 'PPL' },\n};\n\nexport const getQueryTypeOptions = (\n supportedTypes: ElasticsearchQueryType[]\n): Array> => {\n return Object.entries(queryTypeConfig)\n .filter(([queryType, _]) => supportedTypes.includes(queryType as ElasticsearchQueryType))\n .map(([key, { label }]) => ({\n label,\n value: key as ElasticsearchQueryType,\n }));\n};\n","import { css } from 'emotion';\n\nexport const segmentStyles = css`\n min-width: 150px;\n`;\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport React, { FunctionComponent } from 'react';\nimport { Segment } from '@grafana/ui';\nimport { useDatasource } from '../ElasticsearchQueryContext';\nimport { useDispatch } from '../../../hooks/useStatelessReducer';\nimport { changeQueryType } from './state';\nimport { queryTypeConfig, getQueryTypeOptions } from './utils';\nimport { segmentStyles } from '../styles';\nimport { ElasticsearchQueryType } from '../../../types';\n\nconst toOption = (queryType: ElasticsearchQueryType) => ({\n label: queryTypeConfig[queryType].label,\n value: queryType,\n});\n\ninterface Props {\n value: ElasticsearchQueryType;\n}\n\nexport const QueryTypeEditor: FunctionComponent = ({ value }) => {\n const datasource = useDatasource();\n const dispatch = useDispatch();\n\n return (\n dispatch(changeQueryType(e.value!))}\n value={toOption(value)}\n />\n );\n};\n","import { GrafanaTheme } from '@grafana/data';\nimport { stylesFactory } from '@grafana/ui';\nimport { css } from 'emotion';\n\nexport const getStyles = stylesFactory((theme: GrafanaTheme, hidden: boolean) => ({\n color:\n hidden &&\n css`\n &,\n &:hover,\n label,\n a {\n color: ${hidden ? theme.colors.textFaint : theme.colors.text};\n }\n `,\n}));\n","import { SettingKeyOf } from '../../../types';\nimport { MetricAggregation, MetricAggregationWithMeta, MetricAggregationWithSettings } from '../aggregations';\nimport {\n ADD_METRIC,\n CHANGE_METRIC_FIELD,\n CHANGE_METRIC_TYPE,\n REMOVE_METRIC,\n TOGGLE_METRIC_VISIBILITY,\n CHANGE_METRIC_SETTING,\n CHANGE_METRIC_META,\n CHANGE_METRIC_ATTRIBUTE,\n MetricAggregationAction,\n ChangeMetricAttributeAction,\n ChangeMetricSettingAction,\n ChangeMetricMetaAction,\n} from './types';\n\nexport const addMetric = (id: MetricAggregation['id']): MetricAggregationAction => ({\n type: ADD_METRIC,\n payload: {\n id,\n },\n});\n\nexport const removeMetric = (id: MetricAggregation['id']): MetricAggregationAction => ({\n type: REMOVE_METRIC,\n payload: {\n id,\n },\n});\n\nexport const changeMetricType = (\n id: MetricAggregation['id'],\n type: MetricAggregation['type']\n): MetricAggregationAction => ({\n type: CHANGE_METRIC_TYPE,\n payload: {\n id,\n type,\n },\n});\n\nexport const changeMetricField = (id: MetricAggregation['id'], field: string): MetricAggregationAction => ({\n type: CHANGE_METRIC_FIELD,\n payload: {\n id,\n field,\n },\n});\n\nexport const toggleMetricVisibility = (id: MetricAggregation['id']): MetricAggregationAction => ({\n type: TOGGLE_METRIC_VISIBILITY,\n payload: {\n id,\n },\n});\n\nexport const changeMetricAttribute = >(\n metric: T,\n attribute: K,\n newValue: T[K]\n): ChangeMetricAttributeAction => ({\n type: CHANGE_METRIC_ATTRIBUTE,\n payload: {\n metric,\n attribute,\n newValue,\n },\n});\n\nexport const changeMetricSetting = >(\n metric: T,\n settingName: K,\n // Maybe this could have been NonNullable[K], but it doesn't seem to work really well\n newValue: NonNullable[K]\n): ChangeMetricSettingAction => ({\n type: CHANGE_METRIC_SETTING,\n payload: {\n metric,\n settingName,\n newValue,\n },\n});\n\nexport const changeMetricMeta = (\n metric: T,\n meta: Extract['meta'], string>,\n newValue: string | number | boolean\n): ChangeMetricMetaAction => ({\n type: CHANGE_METRIC_META,\n payload: {\n metric,\n meta,\n newValue,\n },\n});\n","import { Icon } from '@grafana/ui';\nimport { cx, css } from 'emotion';\nimport React, { FunctionComponent, ComponentProps, ButtonHTMLAttributes } from 'react';\n\nconst SROnly = css`\n clip: rect(0 0 0 0);\n clip-path: inset(50%);\n height: 1px;\n overflow: hidden;\n position: absolute;\n white-space: nowrap;\n width: 1px;\n`;\n\ninterface Props {\n iconName: ComponentProps['name'];\n onClick: () => void;\n className?: string;\n label: string;\n}\n\nexport const IconButton: FunctionComponent> = ({\n iconName,\n onClick,\n className,\n label,\n ...buttonProps\n}) => (\n \n);\n","import { css } from 'emotion';\nimport React, { FunctionComponent } from 'react';\nimport { IconButton } from './IconButton';\n\ninterface Props {\n index: number;\n elements: any[];\n onAdd: () => void;\n onRemove: () => void;\n}\n\n/**\n * A component used to show add & remove buttons for mutable lists of values. Wether to show or not the add or the remove buttons\n * depends on the `index` and `elements` props. This enforces a consistent experience whenever this pattern is used.\n */\nexport const AddRemove: FunctionComponent = ({ index, onAdd, onRemove, elements }) => {\n return (\n \n {index === 0 && }\n\n {elements.length >= 2 && }\n
\n );\n};\n","import React, { FunctionComponent } from 'react';\nimport { css, cx } from 'emotion';\nimport { SelectableValue } from '@grafana/data';\nimport { Segment } from '@grafana/ui';\nimport { describeMetric } from '../utils';\nimport { MetricAggregation } from './QueryEditor/MetricAggregationsEditor/aggregations';\n\nconst noWrap = css`\n white-space: nowrap;\n`;\n\nconst toOption = (metric: MetricAggregation) => ({\n label: describeMetric(metric),\n value: metric,\n});\n\nconst toOptions = (metrics: MetricAggregation[]): Array> => metrics.map(toOption);\n\ninterface Props {\n options: MetricAggregation[];\n onChange: (e: SelectableValue) => void;\n className?: string;\n value?: string;\n}\n\nexport const MetricPicker: FunctionComponent = ({ options, onChange, className, value }) => (\n option.id === value)!) : null}\n />\n);\n","import { PipelineVariable } from '../../../aggregations';\nimport { defaultPipelineVariable } from '../utils';\nimport {\n PipelineVariablesAction,\n REMOVE_PIPELINE_VARIABLE,\n ADD_PIPELINE_VARIABLE,\n RENAME_PIPELINE_VARIABLE,\n CHANGE_PIPELINE_VARIABLE_METRIC,\n} from './types';\n\nexport const reducer = (state: PipelineVariable[] = [], action: PipelineVariablesAction) => {\n switch (action.type) {\n case ADD_PIPELINE_VARIABLE:\n return [...state, defaultPipelineVariable()];\n\n case REMOVE_PIPELINE_VARIABLE:\n return state.slice(0, action.payload.index).concat(state.slice(action.payload.index + 1));\n\n case RENAME_PIPELINE_VARIABLE:\n return state.map((pipelineVariable, index) => {\n if (index !== action.payload.index) {\n return pipelineVariable;\n }\n\n return {\n ...pipelineVariable,\n name: action.payload.newName,\n };\n });\n\n case CHANGE_PIPELINE_VARIABLE_METRIC:\n return state.map((pipelineVariable, index) => {\n if (index !== action.payload.index) {\n return pipelineVariable;\n }\n\n return {\n ...pipelineVariable,\n pipelineAgg: action.payload.newMetric,\n };\n });\n\n default:\n return state;\n }\n};\n","import { Action } from '../../../../../../hooks/useStatelessReducer';\n\nexport const ADD_PIPELINE_VARIABLE = '@pipelineVariables/add';\nexport const REMOVE_PIPELINE_VARIABLE = '@pipelineVariables/remove';\nexport const RENAME_PIPELINE_VARIABLE = '@pipelineVariables/rename';\nexport const CHANGE_PIPELINE_VARIABLE_METRIC = '@pipelineVariables/change_metric';\n\nexport type AddPipelineVariableAction = Action;\n\nexport interface RemovePipelineVariableAction extends Action {\n payload: {\n index: number;\n };\n}\n\nexport interface RenamePipelineVariableAction extends Action {\n payload: {\n index: number;\n newName: string;\n };\n}\n\nexport interface ChangePipelineVariableMetricAction extends Action {\n payload: {\n index: number;\n newMetric: string;\n };\n}\n\nexport type PipelineVariablesAction =\n | AddPipelineVariableAction\n | RemovePipelineVariableAction\n | RenamePipelineVariableAction\n | ChangePipelineVariableMetricAction;\n","import React, { ComponentProps, useState } from 'react';\nimport { InlineField, Input } from '@grafana/ui';\nimport { useDispatch } from '../../../../hooks/useStatelessReducer';\nimport { changeMetricSetting } from '../state/actions';\nimport { ChangeMetricSettingAction } from '../state/types';\nimport { SettingKeyOf } from '../../../types';\nimport { MetricAggregationWithSettings } from '../aggregations';\nimport { uniqueId } from 'lodash';\n\ninterface Props> {\n label: string;\n settingName: K;\n metric: T;\n placeholder?: ComponentProps['placeholder'];\n tooltip?: ComponentProps['tooltip'];\n}\n\nexport function SettingField>({\n label,\n settingName,\n metric,\n placeholder,\n tooltip,\n}: Props) {\n const dispatch = useDispatch>();\n const [id] = useState(uniqueId(`es-field-id-`));\n const settings = metric.settings;\n\n return (\n \n dispatch(changeMetricSetting(metric, settingName, e.target.value as any))}\n defaultValue={settings?.[settingName as keyof typeof settings]}\n />\n \n );\n}\n","import React, { Fragment, FunctionComponent, useEffect } from 'react';\nimport { Input, InlineLabel } from '@grafana/ui';\nimport { MetricAggregationAction } from '../../state/types';\nimport { changeMetricAttribute } from '../../state/actions';\nimport { css } from 'emotion';\nimport { AddRemove } from '../../../../AddRemove';\nimport { useStatelessReducer, useDispatch } from '../../../../../hooks/useStatelessReducer';\nimport { MetricPicker } from '../../../../MetricPicker';\nimport { reducer } from './state/reducer';\nimport {\n addPipelineVariable,\n removePipelineVariable,\n renamePipelineVariable,\n changePipelineVariableMetric,\n} from './state/actions';\nimport { SettingField } from '../SettingField';\nimport { BucketScript, MetricAggregation } from '../../aggregations';\n\ninterface Props {\n value: BucketScript;\n previousMetrics: MetricAggregation[];\n}\n\nexport const BucketScriptSettingsEditor: FunctionComponent = ({ value, previousMetrics }) => {\n const upperStateDispatch = useDispatch>();\n\n const dispatch = useStatelessReducer(\n newState => upperStateDispatch(changeMetricAttribute(value, 'pipelineVariables', newState)),\n value.pipelineVariables,\n reducer\n );\n\n // The model might not have pipeline variables (or an empty array of pipeline vars) in it because of the way it was built in previous versions of the datasource.\n // If this is the case we add a default one.\n useEffect(() => {\n if (!value.pipelineVariables?.length) {\n dispatch(addPipelineVariable());\n }\n }, []);\n\n return (\n <>\n \n
Variables\n
\n {value.pipelineVariables!.map((pipelineVar, index) => (\n
\n \n dispatch(renamePipelineVariable(e.target.value, index))}\n />\n dispatch(changePipelineVariableMetric(e.value!.id, index))}\n options={previousMetrics}\n value={pipelineVar.pipelineAgg}\n />\n
\n\n dispatch(addPipelineVariable())}\n onRemove={() => dispatch(removePipelineVariable(index))}\n />\n \n ))}\n
\n
\n\n to reference a variable. Elasticsearch pre-v5.0: Scripting language is per default Groovy if not changed. For Groovy use to reference a variable.\"\n placeholder=\"params.var1 / params.var2\"\n />\n >\n );\n};\n","import {\n ADD_PIPELINE_VARIABLE,\n REMOVE_PIPELINE_VARIABLE,\n PipelineVariablesAction,\n RENAME_PIPELINE_VARIABLE,\n CHANGE_PIPELINE_VARIABLE_METRIC,\n} from './types';\n\nexport const addPipelineVariable = (): PipelineVariablesAction => ({\n type: ADD_PIPELINE_VARIABLE,\n});\n\nexport const removePipelineVariable = (index: number): PipelineVariablesAction => ({\n type: REMOVE_PIPELINE_VARIABLE,\n payload: {\n index,\n },\n});\n\nexport const renamePipelineVariable = (newName: string, index: number): PipelineVariablesAction => ({\n type: RENAME_PIPELINE_VARIABLE,\n payload: {\n index,\n newName,\n },\n});\n\nexport const changePipelineVariableMetric = (newMetric: string, index: number): PipelineVariablesAction => ({\n type: CHANGE_PIPELINE_VARIABLE_METRIC,\n payload: {\n index,\n newMetric,\n },\n});\n","import { GrafanaTheme } from '@grafana/data';\nimport { Icon, stylesFactory, useTheme } from '@grafana/ui';\nimport { css, cx } from 'emotion';\nimport React, { FunctionComponent, useState } from 'react';\nimport { segmentStyles } from './styles';\n\nconst getStyles = stylesFactory((theme: GrafanaTheme, hidden: boolean) => {\n return {\n wrapper: css`\n display: flex;\n flex-direction: column;\n `,\n settingsWrapper: css`\n padding-top: ${theme.spacing.xs};\n `,\n icon: css`\n margin-right: ${theme.spacing.xs};\n `,\n button: css`\n justify-content: start;\n ${hidden &&\n css`\n color: ${theme.colors.textFaint};\n `}\n `,\n };\n});\ninterface Props {\n label: string;\n hidden?: boolean;\n}\n\nexport const SettingsEditorContainer: FunctionComponent = ({ label, children, hidden = false }) => {\n const [open, setOpen] = useState(false);\n\n const styles = getStyles(useTheme(), hidden);\n\n return (\n \n
\n\n {open &&
{children}
}\n
\n );\n};\n","import { Input, InlineField, Select, Switch } from '@grafana/ui';\nimport React, { FunctionComponent } from 'react';\nimport { useDispatch } from '../../../../hooks/useStatelessReducer';\nimport { movingAvgModelOptions } from '../../../../query_def';\nimport { isEWMAMovingAverage, isHoltMovingAverage, isHoltWintersMovingAverage, MovingAverage } from '../aggregations';\nimport { changeMetricSetting } from '../state/actions';\n\ninterface Props {\n metric: MovingAverage;\n}\n\n// The way we handle changes for those settings is not ideal compared to the other components in the editor\nexport const MovingAverageSettingsEditor: FunctionComponent = ({ metric }) => {\n const dispatch = useDispatch();\n\n return (\n <>\n \n \n\n \n dispatch(changeMetricSetting(metric, 'window', parseInt(e.target.value!, 10)))}\n defaultValue={metric.settings?.window}\n />\n \n\n \n dispatch(changeMetricSetting(metric, 'predict', parseInt(e.target.value!, 10)))}\n defaultValue={metric.settings?.predict}\n />\n \n\n {isEWMAMovingAverage(metric) && (\n <>\n \n dispatch(changeMetricSetting(metric, 'alpha', parseInt(e.target.value!, 10)))}\n defaultValue={metric.settings?.alpha}\n />\n \n\n \n ) =>\n dispatch(changeMetricSetting(metric, 'minimize', e.target.checked))\n }\n checked={!!metric.settings?.minimize}\n />\n \n >\n )}\n\n {isHoltMovingAverage(metric) && (\n <>\n \n \n dispatch(\n changeMetricSetting(metric, 'settings', {\n ...metric.settings?.settings,\n alpha: parseInt(e.target.value!, 10),\n })\n )\n }\n defaultValue={metric.settings?.settings?.alpha}\n />\n \n \n \n dispatch(\n changeMetricSetting(metric, 'settings', {\n ...metric.settings?.settings,\n beta: parseInt(e.target.value!, 10),\n })\n )\n }\n defaultValue={metric.settings?.settings?.beta}\n />\n \n\n \n ) =>\n dispatch(changeMetricSetting(metric, 'minimize', e.target.checked))\n }\n checked={!!metric.settings?.minimize}\n />\n \n >\n )}\n\n {isHoltWintersMovingAverage(metric) && (\n <>\n \n \n dispatch(\n changeMetricSetting(metric, 'settings', {\n ...metric.settings?.settings,\n alpha: parseInt(e.target.value!, 10),\n })\n )\n }\n defaultValue={metric.settings?.settings?.alpha}\n />\n \n \n \n dispatch(\n changeMetricSetting(metric, 'settings', {\n ...metric.settings?.settings,\n beta: parseInt(e.target.value!, 10),\n })\n )\n }\n defaultValue={metric.settings?.settings?.beta}\n />\n \n \n \n dispatch(\n changeMetricSetting(metric, 'settings', {\n ...metric.settings?.settings,\n gamma: parseInt(e.target.value!, 10),\n })\n )\n }\n defaultValue={metric.settings?.settings?.gamma}\n />\n \n \n \n dispatch(\n changeMetricSetting(metric, 'settings', {\n ...metric.settings?.settings,\n period: parseInt(e.target.value!, 10),\n })\n )\n }\n defaultValue={metric.settings?.settings?.period}\n />\n \n\n \n ) =>\n dispatch(\n changeMetricSetting(metric, 'settings', { ...metric.settings?.settings, pad: e.target.checked })\n )\n }\n checked={!!metric.settings?.settings?.pad}\n />\n \n\n \n ) =>\n dispatch(changeMetricSetting(metric, 'minimize', e.target.checked))\n }\n checked={!!metric.settings?.minimize}\n />\n \n >\n )}\n >\n );\n};\n","import { InlineField, Input, Switch } from '@grafana/ui';\nimport React, { FunctionComponent, ComponentProps, useState } from 'react';\nimport { extendedStats } from '../../../../query_def';\nimport { useDispatch } from '../../../../hooks/useStatelessReducer';\nimport { changeMetricMeta, changeMetricSetting } from '../state/actions';\nimport {\n MetricAggregation,\n isMetricAggregationWithInlineScript,\n isMetricAggregationWithMissingSupport,\n ExtendedStat,\n} from '../aggregations';\nimport { BucketScriptSettingsEditor } from './BucketScriptSettingsEditor';\nimport { SettingField } from './SettingField';\nimport { SettingsEditorContainer } from '../../SettingsEditorContainer';\nimport { useDescription } from './useDescription';\nimport { MovingAverageSettingsEditor } from './MovingAverageSettingsEditor';\nimport { uniqueId } from 'lodash';\nimport { metricAggregationConfig } from '../utils';\n\n// TODO: Move this somewhere and share it with BucketsAggregation Editor\nconst inlineFieldProps: Partial> = {\n labelWidth: 16,\n};\n\ninterface Props {\n metric: MetricAggregation;\n previousMetrics: MetricAggregation[];\n}\n\nexport const SettingsEditor: FunctionComponent = ({ metric, previousMetrics }) => {\n const dispatch = useDispatch();\n const description = useDescription(metric);\n\n return (\n \n {metric.type === 'derivative' && }\n\n {metric.type === 'cumulative_sum' && }\n\n {metric.type === 'moving_avg' && }\n\n {metric.type === 'moving_fn' && (\n <>\n \n \n \n >\n )}\n\n {metric.type === 'bucket_script' && (\n \n )}\n\n {(metric.type === 'raw_data' || metric.type === 'raw_document') && (\n \n dispatch(changeMetricSetting(metric, 'size', e.target.value))}\n defaultValue={metric.settings?.size ?? metricAggregationConfig['raw_data'].defaults.settings?.size}\n />\n \n )}\n\n {metric.type === 'cardinality' && (\n \n )}\n\n {metric.type === 'extended_stats' && (\n <>\n {extendedStats.map(stat => (\n dispatch(changeMetricMeta(metric, stat.value, checked))}\n value={\n metric.meta?.[stat.value] !== undefined\n ? !!metric.meta?.[stat.value]\n : !!metricAggregationConfig['extended_stats'].defaults.meta?.[stat.value]\n }\n />\n ))}\n\n \n >\n )}\n\n {metric.type === 'percentiles' && (\n \n dispatch(changeMetricSetting(metric, 'percents', e.target.value.split(',').filter(Boolean)))}\n defaultValue={\n metric.settings?.percents || metricAggregationConfig['percentiles'].defaults.settings?.percents\n }\n placeholder=\"1,5,25,50,75,95,99\"\n />\n \n )}\n\n {isMetricAggregationWithInlineScript(metric) && (\n \n )}\n\n {isMetricAggregationWithMissingSupport(metric) && (\n \n )}\n \n );\n};\n\ninterface ExtendedStatSettingProps {\n stat: ExtendedStat;\n onChange: (checked: boolean) => void;\n value: boolean;\n}\nconst ExtendedStatSetting: FunctionComponent = ({ stat, onChange, value }) => {\n // this is needed for the htmlFor prop in the label so that clicking the label will toggle the switch state.\n const [id] = useState(uniqueId(`es-field-id-`));\n\n return (\n \n ) => onChange(e.target.checked)} value={value} />\n \n );\n};\n","import { extendedStats } from '../../../../query_def';\nimport { MetricAggregation } from '../aggregations';\n\nconst hasValue = (value: string) => (object: { value: string }) => object.value === value;\n\n// FIXME: All the defaults and validations down here should be defined somewhere else\n// as they are also the defaults that are gonna be applied to the query.\n// In the previous version, the same method was taking care of describing the settings and setting defaults.\nexport const useDescription = (metric: MetricAggregation): string => {\n switch (metric.type) {\n case 'cardinality': {\n const precisionThreshold = metric.settings?.precision_threshold || '';\n return `Precision threshold: ${precisionThreshold}`;\n }\n\n case 'percentiles':\n if (metric.settings?.percents && metric.settings?.percents?.length >= 1) {\n return `Values: ${metric.settings?.percents}`;\n }\n\n return 'Percents: Default';\n\n case 'extended_stats': {\n const selectedStats = Object.entries(metric.meta || {})\n .map(([key, value]) => value && extendedStats.find(hasValue(key))?.label)\n .filter(Boolean);\n\n return `Stats: ${selectedStats.length > 0 ? selectedStats.join(', ') : 'None selected'}`;\n }\n\n case 'raw_document':\n case 'raw_data': {\n const size = metric.settings?.size || 500;\n return `Size: ${size}`;\n }\n\n default:\n return 'Options';\n }\n};\n","import { MetricFindValue, SelectableValue } from '@grafana/data';\nimport { Segment, SegmentAsync, useTheme } from '@grafana/ui';\nimport { cx } from 'emotion';\nimport React, { FunctionComponent } from 'react';\nimport { useDatasource, useQuery } from '../ElasticsearchQueryContext';\nimport { useDispatch } from '../../../hooks/useStatelessReducer';\nimport { getStyles } from './styles';\nimport { SettingsEditor } from './SettingsEditor';\nimport { MetricAggregationAction } from './state/types';\nimport { metricAggregationConfig } from './utils';\nimport { changeMetricField, changeMetricType } from './state/actions';\nimport { MetricPicker } from '../../MetricPicker';\nimport { segmentStyles } from '../styles';\nimport {\n isMetricAggregationWithField,\n isMetricAggregationWithSettings,\n isPipelineAggregation,\n isPipelineAggregationWithMultipleBucketPaths,\n MetricAggregation,\n MetricAggregationType,\n} from './aggregations';\n\nconst toOption = (metric: MetricAggregation) => ({\n label: metricAggregationConfig[metric.type].label,\n value: metric.type,\n});\n\n// @ts-ignore\nconst toSelectableValue = ({ value, text }: MetricFindValue): SelectableValue => ({\n label: text,\n value: `${value || text}`,\n});\n\ninterface Props {\n value: MetricAggregation;\n}\n\n// If a metric is a Pipeline Aggregation (https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html)\n// it doesn't make sense to show it in the type picker when there is no non-pipeline-aggregation previously selected\n// as they work on the outputs produced from other aggregations rather than from documents or fields.\n// This means we should filter them out from the type picker if there's no other \"basic\" aggregation before the current one.\nconst isBasicAggregation = (metric: MetricAggregation) => !metricAggregationConfig[metric.type].isPipelineAgg;\n\nconst getTypeOptions = (\n previousMetrics: MetricAggregation[],\n esVersion: number\n): Array> => {\n // we'll include Pipeline Aggregations only if at least one previous metric is a \"Basic\" one\n const includePipelineAggregations = previousMetrics.some(isBasicAggregation);\n\n return (\n Object.entries(metricAggregationConfig)\n // Only showing metrics type supported by the configured version of ES\n .filter(([_, { minVersion = 0, maxVersion = esVersion }]) => {\n // TODO: Double check this\n return esVersion >= minVersion && esVersion <= maxVersion;\n })\n // Filtering out Pipeline Aggregations if there's no basic metric selected before\n .filter(([_, config]) => includePipelineAggregations || !config.isPipelineAgg)\n .map(([key, { label }]) => ({\n label,\n value: key as MetricAggregationType,\n }))\n );\n};\n\nexport const MetricEditor: FunctionComponent = ({ value }) => {\n const styles = getStyles(useTheme(), !!value.hide);\n const datasource = useDatasource();\n const query = useQuery();\n const dispatch = useDispatch();\n\n const previousMetrics = query.metrics!.slice(\n 0,\n query.metrics!.findIndex(m => m.id === value.id)\n );\n\n // TODO: This could be common with the one in BucketAggregationEditor\n const getFields = async () => {\n const get = () => {\n if (value.type === 'cardinality') {\n return datasource.getFields();\n }\n return datasource.getFields('number');\n };\n\n return (await get()).map(toSelectableValue);\n };\n\n return (\n <>\n dispatch(changeMetricType(value.id, e.value!))}\n value={toOption(value)}\n />\n\n {isMetricAggregationWithField(value) && !isPipelineAggregation(value) && (\n dispatch(changeMetricField(value.id, e.value!))}\n placeholder=\"Select Field\"\n value={value.field}\n />\n )}\n\n {isPipelineAggregation(value) && !isPipelineAggregationWithMultipleBucketPaths(value) && (\n dispatch(changeMetricField(value.id, e.value?.id!))}\n options={previousMetrics}\n value={value.field}\n />\n )}\n\n {isMetricAggregationWithSettings(value) && }\n >\n );\n};\n","import { GrafanaTheme } from '@grafana/data';\nimport { IconButton, InlineFieldRow, InlineLabel, stylesFactory, useTheme, InlineSegmentGroup } from '@grafana/ui';\nimport { css } from 'emotion';\nimport { noop } from 'lodash';\nimport React, { FunctionComponent } from 'react';\n\ninterface Props {\n label: string;\n onRemoveClick?: false | (() => void);\n onHideClick?: false | (() => void);\n hidden?: boolean;\n disableRemove?: boolean;\n}\n\nexport const QueryEditorRow: FunctionComponent = ({\n children,\n label,\n onRemoveClick,\n onHideClick,\n hidden = false,\n disableRemove = false,\n}) => {\n const theme = useTheme();\n const styles = getStyles(theme);\n\n return (\n \n \n \n {label}\n \n {onHideClick && (\n \n )}\n {onRemoveClick && (\n \n )}\n \n \n \n {children}\n \n );\n};\n\nconst getStyles = stylesFactory((theme: GrafanaTheme) => {\n return {\n iconWrapper: css`\n display: flex;\n `,\n icon: css`\n color: ${theme.colors.textWeak};\n margin-left: ${theme.spacing.xxs};\n `,\n };\n});\n","import React, { FunctionComponent } from 'react';\nimport { MetricEditor } from './MetricEditor';\nimport { useDispatch } from '../../../hooks/useStatelessReducer';\nimport { MetricAggregationAction } from './state/types';\nimport { metricAggregationConfig } from './utils';\nimport { addMetric, removeMetric, toggleMetricVisibility } from './state/actions';\nimport { MetricAggregation } from './aggregations';\nimport { useQuery } from '../ElasticsearchQueryContext';\nimport { QueryEditorRow } from '../QueryEditorRow';\nimport { IconButton } from '../../IconButton';\n\ninterface Props {\n nextId: MetricAggregation['id'];\n}\n\nexport const MetricAggregationsEditor: FunctionComponent = ({ nextId }) => {\n const dispatch = useDispatch();\n const { metrics } = useQuery();\n const totalMetrics = metrics?.length || 0;\n\n return (\n <>\n {metrics?.map((metric, index) => (\n dispatch(toggleMetricVisibility(metric.id))}\n onRemoveClick={() => dispatch(removeMetric(metric.id))}\n disableRemove={!(totalMetrics > 1)}\n >\n \n\n {!metricAggregationConfig[metric.type].isSingleMetric && index === 0 && (\n dispatch(addMetric(nextId))} label=\"add\" />\n )}\n \n ))}\n >\n );\n};\n","import { SettingKeyOf } from '../../../types';\nimport { BucketAggregation, BucketAggregationWithField } from '../aggregations';\nimport {\n ADD_BUCKET_AGG,\n BucketAggregationAction,\n REMOVE_BUCKET_AGG,\n CHANGE_BUCKET_AGG_TYPE,\n CHANGE_BUCKET_AGG_FIELD,\n CHANGE_BUCKET_AGG_SETTING,\n ChangeBucketAggregationSettingAction,\n} from './types';\n\nexport const addBucketAggregation = (id: string): BucketAggregationAction => ({\n type: ADD_BUCKET_AGG,\n payload: {\n id,\n },\n});\n\nexport const removeBucketAggregation = (id: BucketAggregation['id']): BucketAggregationAction => ({\n type: REMOVE_BUCKET_AGG,\n payload: {\n id,\n },\n});\n\nexport const changeBucketAggregationType = (\n id: BucketAggregation['id'],\n newType: BucketAggregation['type']\n): BucketAggregationAction => ({\n type: CHANGE_BUCKET_AGG_TYPE,\n payload: {\n id,\n newType,\n },\n});\n\nexport const changeBucketAggregationField = (\n id: BucketAggregationWithField['id'],\n newField: BucketAggregationWithField['field']\n): BucketAggregationAction => ({\n type: CHANGE_BUCKET_AGG_FIELD,\n payload: {\n id,\n newField,\n },\n});\n\nexport const changeBucketAggregationSetting = >(\n bucketAgg: T,\n settingName: K,\n // This could be inferred from T, but it's causing some troubles\n newValue: string | string[] | any\n): ChangeBucketAggregationSettingAction => ({\n type: CHANGE_BUCKET_AGG_SETTING,\n payload: {\n bucketAgg,\n settingName,\n newValue,\n },\n});\n","import { Filter } from '../../../aggregations';\nimport { FilterAction, ADD_FILTER, REMOVE_FILTER, CHANGE_FILTER } from './types';\n\nexport const addFilter = (): FilterAction => ({\n type: ADD_FILTER,\n});\n\nexport const removeFilter = (index: number): FilterAction => ({\n type: REMOVE_FILTER,\n payload: { index },\n});\n\nexport const changeFilter = (index: number, filter: Filter): FilterAction => ({\n type: CHANGE_FILTER,\n payload: { index, filter },\n});\n","import { Action } from '../../../../../../hooks/useStatelessReducer';\nimport { Filter } from '../../../aggregations';\n\nexport const ADD_FILTER = '@bucketAggregations/filter/add';\nexport const REMOVE_FILTER = '@bucketAggregations/filter/remove';\nexport const CHANGE_FILTER = '@bucketAggregations/filter/change';\n\nexport type AddFilterAction = Action;\n\nexport interface RemoveFilterAction extends Action {\n payload: {\n index: number;\n };\n}\n\nexport interface ChangeFilterAction extends Action {\n payload: {\n index: number;\n filter: Filter;\n };\n}\nexport type FilterAction = AddFilterAction | RemoveFilterAction | ChangeFilterAction;\n","import { Filter } from '../../../aggregations';\nimport { defaultFilter } from '../utils';\nimport { ADD_FILTER, CHANGE_FILTER, FilterAction, REMOVE_FILTER } from './types';\n\nexport const reducer = (state: Filter[] = [], action: FilterAction) => {\n switch (action.type) {\n case ADD_FILTER:\n return [...state, defaultFilter()];\n case REMOVE_FILTER:\n return state.slice(0, action.payload.index).concat(state.slice(action.payload.index + 1));\n\n case CHANGE_FILTER:\n return state.map((filter, index) => {\n if (index !== action.payload.index) {\n return filter;\n }\n\n return action.payload.filter;\n });\n }\n};\n","import { InlineField, Input, QueryField } from '@grafana/ui';\nimport { css } from 'emotion';\nimport React, { FunctionComponent, useEffect } from 'react';\nimport { AddRemove } from '../../../../AddRemove';\nimport { useDispatch, useStatelessReducer } from '../../../../../hooks/useStatelessReducer';\nimport { Filters } from '../../aggregations';\nimport { changeBucketAggregationSetting } from '../../state/actions';\nimport { BucketAggregationAction } from '../../state/types';\nimport { addFilter, changeFilter, removeFilter } from './state/actions';\nimport { reducer as filtersReducer } from './state/reducer';\n\ninterface Props {\n value: Filters;\n}\n\nexport const FiltersSettingsEditor: FunctionComponent = ({ value }) => {\n const upperStateDispatch = useDispatch>();\n\n const dispatch = useStatelessReducer(\n newState => upperStateDispatch(changeBucketAggregationSetting(value, 'filters', newState)),\n value.settings?.filters,\n filtersReducer\n );\n\n // The model might not have filters (or an empty array of filters) in it because of the way it was built in previous versions of the datasource.\n // If this is the case we add a default one.\n useEffect(() => {\n if (!value.settings?.filters?.length) {\n dispatch(addFilter());\n }\n }, []);\n\n return (\n <>\n \n {value.settings?.filters!.map((filter, index) => (\n
\n
\n \n {}}\n onChange={query => dispatch(changeFilter(index, { ...filter, query }))}\n query={filter.query}\n />\n \n
\n
\n dispatch(changeFilter(index, { ...filter, label: e.target.value }))}\n defaultValue={filter.label}\n />\n \n
dispatch(addFilter())}\n onRemove={() => dispatch(removeFilter(index))}\n />\n \n ))}\n
\n >\n );\n};\n","import { describeMetric } from '../../../../utils';\nimport { useQuery } from '../../ElasticsearchQueryContext';\nimport { BucketAggregation } from '../aggregations';\nimport { bucketAggregationConfig, orderByOptions, orderOptions } from '../utils';\n\nconst hasValue = (value: string) => (object: { value: string }) => object.value === value;\n\n// FIXME: We should apply the same defaults we have in bucketAggregationsConfig here instead of \"custom\" values\n// as they might get out of sync.\n// The reason we need them is that even though after the refactoring each setting is created with its default value,\n// queries created with the old version might not have them.\nexport const useDescription = (bucketAgg: BucketAggregation): string => {\n const { metrics } = useQuery();\n\n switch (bucketAgg.type) {\n case 'terms': {\n const order = bucketAgg.settings?.order || 'desc';\n const size = bucketAgg.settings?.size || '10';\n const minDocCount = parseInt(bucketAgg.settings?.min_doc_count || '0', 10);\n const orderBy = bucketAgg.settings?.orderBy || '_term';\n let description = '';\n\n if (size !== '0') {\n const orderLabel = orderOptions.find(hasValue(order))?.label!;\n description = `${orderLabel} ${size}, `;\n }\n\n if (minDocCount > 0) {\n description += `Min Doc Count: ${minDocCount}, `;\n }\n\n description += 'Order by: ';\n const orderByOption = orderByOptions.find(hasValue(orderBy));\n if (orderByOption) {\n description += orderByOption.label;\n } else {\n const metric = metrics?.find(m => m.id === orderBy);\n if (metric) {\n description += describeMetric(metric);\n } else {\n description += 'metric not found';\n }\n }\n\n if (size === '0') {\n description += ` (${order})`;\n }\n return description;\n }\n\n case 'histogram': {\n const interval = bucketAgg.settings?.interval || 1000;\n const minDocCount = bucketAgg.settings?.min_doc_count || 1;\n\n return `Interval: ${interval}${minDocCount > 0 ? `, Min Doc Count: ${minDocCount}` : ''}`;\n }\n\n case 'filters': {\n const filters = bucketAgg.settings?.filters || bucketAggregationConfig['filters'].defaultSettings?.filters;\n return `Filter Queries (${filters!.length})`;\n }\n\n case 'geohash_grid': {\n const precision = Math.max(Math.min(parseInt(bucketAgg.settings?.precision || '5', 10), 12), 1);\n return `Precision: ${precision}`;\n }\n\n case 'date_histogram': {\n const interval = bucketAgg.settings?.interval || 'auto';\n const minDocCount = bucketAgg.settings?.min_doc_count || 0;\n const trimEdges = bucketAgg.settings?.trimEdges || 0;\n\n let description = `Interval: ${interval}`;\n\n if (minDocCount > 0) {\n description += `, Min Doc Count: ${minDocCount}`;\n }\n\n if (trimEdges > 0) {\n description += `, Trim edges: ${trimEdges}`;\n }\n\n return description;\n }\n\n default:\n return 'Settings';\n }\n};\n","import { InlineField, Input, Select } from '@grafana/ui';\nimport React, { ComponentProps, FunctionComponent } from 'react';\nimport { useDispatch } from '../../../../hooks/useStatelessReducer';\nimport { SettingsEditorContainer } from '../../SettingsEditorContainer';\nimport { changeBucketAggregationSetting } from '../state/actions';\nimport { BucketAggregation } from '../aggregations';\nimport { bucketAggregationConfig, intervalOptions, orderByOptions, orderOptions, sizeOptions } from '../utils';\nimport { FiltersSettingsEditor } from './FiltersSettingsEditor';\nimport { useDescription } from './useDescription';\nimport { useQuery } from '../../ElasticsearchQueryContext';\nimport { describeMetric } from '../../../../utils';\n\nconst inlineFieldProps: Partial> = {\n labelWidth: 16,\n};\n\ninterface Props {\n bucketAgg: BucketAggregation;\n}\n\nexport const SettingsEditor: FunctionComponent = ({ bucketAgg }) => {\n const dispatch = useDispatch();\n const { metrics } = useQuery();\n const settingsDescription = useDescription(bucketAgg);\n\n const orderBy = [...orderByOptions, ...(metrics || []).map(m => ({ label: describeMetric(m), value: m.id }))];\n\n return (\n \n {bucketAgg.type === 'terms' && (\n <>\n \n \n\n \n \n\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'min_doc_count', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.min_doc_count ||\n bucketAggregationConfig[bucketAgg.type].defaultSettings?.min_doc_count\n }\n />\n \n\n \n \n\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'missing', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.missing || bucketAggregationConfig[bucketAgg.type].defaultSettings?.missing\n }\n />\n \n >\n )}\n\n {bucketAgg.type === 'geohash_grid' && (\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'precision', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.precision || bucketAggregationConfig[bucketAgg.type].defaultSettings?.precision\n }\n />\n \n )}\n\n {bucketAgg.type === 'date_histogram' && (\n <>\n \n \n\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'min_doc_count', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.min_doc_count ||\n bucketAggregationConfig[bucketAgg.type].defaultSettings?.min_doc_count\n }\n />\n \n\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'trimEdges', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.trimEdges || bucketAggregationConfig[bucketAgg.type].defaultSettings?.trimEdges\n }\n />\n \n\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'offset', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.offset || bucketAggregationConfig[bucketAgg.type].defaultSettings?.offset\n }\n />\n \n >\n )}\n\n {bucketAgg.type === 'histogram' && (\n <>\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'interval', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.interval || bucketAggregationConfig[bucketAgg.type].defaultSettings?.interval\n }\n />\n \n\n \n dispatch(changeBucketAggregationSetting(bucketAgg, 'min_doc_count', e.target.value!))}\n defaultValue={\n bucketAgg.settings?.min_doc_count ||\n bucketAggregationConfig[bucketAgg.type].defaultSettings?.min_doc_count\n }\n />\n \n >\n )}\n\n {bucketAgg.type === 'filters' && }\n \n );\n};\n","import { MetricFindValue, SelectableValue } from '@grafana/data';\nimport { Segment, SegmentAsync } from '@grafana/ui';\nimport React, { FunctionComponent } from 'react';\nimport { useDispatch } from '../../../hooks/useStatelessReducer';\nimport { useDatasource } from '../ElasticsearchQueryContext';\nimport { segmentStyles } from '../styles';\nimport { BucketAggregation, BucketAggregationType, isBucketAggregationWithField } from './aggregations';\nimport { SettingsEditor } from './SettingsEditor';\nimport { changeBucketAggregationField, changeBucketAggregationType } from './state/actions';\nimport { BucketAggregationAction } from './state/types';\nimport { bucketAggregationConfig } from './utils';\n\nconst bucketAggOptions: Array> = Object.entries(bucketAggregationConfig).map(\n ([key, { label }]) => ({\n label,\n value: key as BucketAggregationType,\n })\n);\n\n// @ts-ignore\nconst toSelectableValue = ({ value, text }: MetricFindValue): SelectableValue => ({\n label: text,\n value: `${value || text}`,\n});\n\nconst toOption = (bucketAgg: BucketAggregation) => ({\n label: bucketAggregationConfig[bucketAgg.type].label,\n value: bucketAgg.type,\n});\n\ninterface QueryMetricEditorProps {\n value: BucketAggregation;\n}\n\nexport const BucketAggregationEditor: FunctionComponent = ({ value }) => {\n const datasource = useDatasource();\n const dispatch = useDispatch();\n\n // TODO: Move this in a separate hook (and simplify)\n const getFields = async () => {\n const get = () => {\n switch (value.type) {\n case 'date_histogram':\n return datasource.getFields('date');\n case 'geohash_grid':\n return datasource.getFields('geo_point');\n default:\n return datasource.getFields();\n }\n };\n\n return (await get()).map(toSelectableValue);\n };\n\n return (\n <>\n dispatch(changeBucketAggregationType(value.id, e.value!))}\n value={toOption(value)}\n />\n\n {isBucketAggregationWithField(value) && (\n dispatch(changeBucketAggregationField(value.id, e.value))}\n placeholder=\"Select Field\"\n value={value.field}\n />\n )}\n\n \n >\n );\n};\n","import React, { FunctionComponent } from 'react';\nimport { BucketAggregationEditor } from './BucketAggregationEditor';\nimport { useDispatch } from '../../../hooks/useStatelessReducer';\nimport { addBucketAggregation, removeBucketAggregation } from './state/actions';\nimport { BucketAggregationAction } from './state/types';\nimport { BucketAggregation } from './aggregations';\nimport { useQuery } from '../ElasticsearchQueryContext';\nimport { QueryEditorRow } from '../QueryEditorRow';\nimport { IconButton } from '../../IconButton';\n\ninterface Props {\n nextId: BucketAggregation['id'];\n}\n\nexport const BucketAggregationsEditor: FunctionComponent = ({ nextId }) => {\n const dispatch = useDispatch();\n const { bucketAggs } = useQuery();\n const totalBucketAggs = bucketAggs?.length || 0;\n\n return (\n <>\n {bucketAggs!.map((bucketAgg, index) => (\n dispatch(removeBucketAggregation(bucketAgg.id))}\n disableRemove={!(totalBucketAggs > 1)}\n >\n \n\n {index === 0 && (\n dispatch(addBucketAggregation(nextId))} label=\"add\" />\n )}\n \n ))}\n >\n );\n};\n","import { useMemo } from 'react';\nimport { useQuery } from '../components/QueryEditor/ElasticsearchQueryContext';\nimport { BucketAggregation } from '../components/QueryEditor/BucketAggregationsEditor/aggregations';\nimport { MetricAggregation } from '../components/QueryEditor/MetricAggregationsEditor/aggregations';\n\nconst toId = (e: T): T['id'] => e.id;\n\nconst toInt = (idString: string) => parseInt(idString, 10);\n\nexport const useNextId = (): MetricAggregation['id'] | BucketAggregation['id'] => {\n const { metrics, bucketAggs } = useQuery();\n\n return useMemo(\n () =>\n (Math.max(...[...(metrics?.map(toId) || ['0']), ...(bucketAggs?.map(toId) || ['0'])].map(toInt)) + 1).toString(),\n [metrics, bucketAggs]\n );\n};\n","import React, { FunctionComponent } from 'react';\nimport { ElasticsearchQuery, ElasticsearchQueryType } from '../../types';\nimport { InlineField, InlineFieldRow, Input, QueryField } from '@grafana/ui';\nimport { changeAliasPattern, changeQuery } from './state';\nimport { QueryTypeEditor } from './QueryTypeEditor';\nimport { MetricAggregationsEditor } from './MetricAggregationsEditor';\nimport { BucketAggregationsEditor } from './BucketAggregationsEditor';\nimport { useDispatch } from '../../hooks/useStatelessReducer';\nimport { useNextId } from '../../hooks/useNextId';\n\ninterface Props {\n query: ElasticsearchQuery['query'];\n}\n\nexport const LuceneEditor: FunctionComponent = ({ query }) => {\n const dispatch = useDispatch();\n const nextId = useNextId();\n\n return (\n <>\n \n \n <>\n \n {}}\n onChange={query => dispatch(changeQuery(query))}\n placeholder=\"Lucene Query\"\n portalOrigin=\"elasticsearch\"\n />\n >\n \n \n dispatch(changeAliasPattern(e.currentTarget.value))} />\n \n \n \n \n >\n );\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport { FormatConfiguration } from '../../../types';\n\nexport const formatConfig: FormatConfiguration = {\n table: { label: 'Table' },\n logs: { label: 'Logs' },\n time_series: { label: 'Time series' },\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport React, { FunctionComponent } from 'react';\nimport { SelectableValue } from '@grafana/data';\nimport { Segment } from '@grafana/ui';\nimport { useDispatch } from '../../../hooks/useStatelessReducer';\nimport { changeFormat } from './state';\nimport { formatConfig } from './utils';\nimport { PPLFormatType } from './formats';\nimport { segmentStyles } from '../styles';\n\nconst queryTypeOptions: Array> = Object.entries(formatConfig).map(\n ([key, { label }]) => ({\n label,\n value: key as PPLFormatType,\n })\n);\n\nconst toOption = (format: PPLFormatType) => ({\n label: formatConfig[format].label,\n value: format,\n});\n\ninterface Props {\n value: PPLFormatType;\n}\n\nexport const SettingsEditor: FunctionComponent = ({ value }) => {\n const dispatch = useDispatch();\n\n return (\n dispatch(changeFormat(e.value!))}\n value={toOption(value)}\n />\n );\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport { GrafanaTheme } from '@grafana/data';\nimport { Icon, stylesFactory, useTheme } from '@grafana/ui';\nimport { css, cx } from 'emotion';\nimport React, { FunctionComponent } from 'react';\nimport { segmentStyles } from '../styles';\n\nconst getStyles = stylesFactory((theme: GrafanaTheme) => {\n return {\n icon: css`\n margin-right: ${theme.spacing.xs};\n `,\n button: css`\n justify-content: start;\n `,\n };\n});\n\ninterface Props {\n label: string;\n open: boolean;\n onClick: () => void;\n}\n\nexport const OpenCloseButton: FunctionComponent = ({ label, open, onClick }) => {\n const styles = getStyles(useTheme());\n\n return (\n \n );\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport React, { FunctionComponent } from 'react';\n\nexport const HelpMessage: FunctionComponent = () => (\n \n
\n
Table
\n
\n - return any set of columns
\n
\n
\n
Logs
\n
\n - return any set of columns
\n
\n
\n
Time series
\n
\n - return column as date, datetime, or timestamp
\n - return column with numeric datatype as values
\n
\n
\n Example PPL query for time series:\n
\n
source=<index> | eval dateValue=timestamp(timestamp) | stats count(response) by dateValue
\n
\n
\n);\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport React, { FunctionComponent, useState } from 'react';\nimport { defaultPPLFormat } from '../../../query_def';\nimport { useQuery } from '../ElasticsearchQueryContext';\nimport { QueryEditorRow } from '../QueryEditorRow';\nimport { SettingsEditor } from './SettingsEditor';\nimport { OpenCloseButton } from './OpenCloseButton';\nimport { HelpMessage } from './HelpMessage';\n\nexport const PPLFormatEditor: FunctionComponent = () => {\n const { format } = useQuery();\n\n const [displayHelp, setDisplayHelp] = useState(false);\n\n return (\n <>\n \n \n setDisplayHelp(!displayHelp)} />\n \n {displayHelp && }\n >\n );\n};\n","/*\n * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\nimport React, { FunctionComponent } from 'react';\nimport { ElasticsearchQuery, ElasticsearchQueryType } from '../../types';\nimport { InlineField, InlineFieldRow, QueryField } from '@grafana/ui';\nimport { QueryTypeEditor } from './QueryTypeEditor';\nimport { PPLFormatEditor } from './PPLFormatEditor';\nimport { changeQuery } from './state';\nimport { useDispatch } from '../../hooks/useStatelessReducer';\n\ninterface Props {\n query: ElasticsearchQuery['query'];\n}\n\nexport const PPLEditor: FunctionComponent = ({ query }) => {\n const dispatch = useDispatch();\n\n return (\n <>\n \n \n <>\n \n {}}\n onChange={query => dispatch(changeQuery(query))}\n placeholder=\"PPL Query\"\n portalOrigin=\"elasticsearch\"\n />\n >\n \n \n \n >\n );\n};\n","import React, { FunctionComponent } from 'react';\nimport { QueryEditorProps } from '@grafana/data';\nimport { ElasticDatasource } from '../../datasource';\nimport { ElasticsearchOptions, ElasticsearchQuery, ElasticsearchQueryType } from '../../types';\nimport { ElasticsearchProvider } from './ElasticsearchQueryContext';\nimport { LuceneEditor } from './LuceneEditor';\nimport { PPLEditor } from './PPLEditor';\n// Fix for https://github.com/grafana/grafana/issues/26512\nimport {} from '@emotion/core';\n\nexport type ElasticQueryEditorProps = QueryEditorProps;\n\nexport const QueryEditor: FunctionComponent = ({ query, onChange, datasource }) => (\n \n \n \n);\n\ninterface Props {\n value: ElasticsearchQuery;\n}\n\nexport const QueryEditorForm: FunctionComponent = ({ value }) => {\n const { queryType } = value;\n\n switch (queryType) {\n case ElasticsearchQueryType.PPL:\n return ;\n default:\n return ;\n }\n};\n","import { DataSourcePlugin } from '@grafana/data';\nimport { ElasticDatasource } from './datasource';\nimport { ConfigEditor } from './configuration/ConfigEditor';\nimport { QueryEditor } from './components/QueryEditor';\n\nclass ElasticAnnotationsQueryCtrl {\n static templateUrl = 'partials/annotations.editor.html';\n}\n\nexport const plugin = new DataSourcePlugin(ElasticDatasource)\n .setQueryEditor(QueryEditor)\n .setConfigEditor(ConfigEditor)\n .setAnnotationQueryCtrl(ElasticAnnotationsQueryCtrl);\n","import React, { useEffect } from 'react';\nimport { DataSourceHttpSettings } from '@grafana/ui';\nimport { DataSourcePluginOptionsEditorProps } from '@grafana/data';\nimport { ElasticsearchOptions } from '../types';\nimport { defaultMaxConcurrentShardRequests, ElasticDetails } from './ElasticDetails';\nimport { LogsConfig } from './LogsConfig';\nimport { DataLinks } from './DataLinks';\nimport { config } from '@grafana/runtime';\n\nexport type Props = DataSourcePluginOptionsEditorProps;\nexport const ConfigEditor = (props: Props) => {\n const { options, onOptionsChange } = props;\n\n // Apply some defaults on initial render\n useEffect(() => {\n const esVersion = options.jsonData.esVersion || 5;\n onOptionsChange({\n ...options,\n jsonData: {\n ...options.jsonData,\n timeField: options.jsonData.timeField || '@timestamp',\n esVersion,\n maxConcurrentShardRequests:\n options.jsonData.maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(esVersion),\n logMessageField: options.jsonData.logMessageField || '',\n logLevelField: options.jsonData.logLevelField || '',\n pplEnabled: options.jsonData.pplEnabled ?? true,\n },\n });\n }, []);\n\n return (\n <>\n \n\n \n\n \n onOptionsChange({\n ...options,\n jsonData: newValue,\n })\n }\n />\n\n {\n onOptionsChange({\n ...options,\n jsonData: {\n ...options.jsonData,\n dataLinks: newValue,\n },\n });\n }}\n />\n >\n );\n};\n"],"sourceRoot":""}
\ No newline at end of file
diff --git a/dist/partials/annotations.editor.html b/dist/partials/annotations.editor.html
new file mode 100644
index 0000000..d51077f
--- /dev/null
+++ b/dist/partials/annotations.editor.html
@@ -0,0 +1,38 @@
+
+
+
diff --git a/dist/plugin.json b/dist/plugin.json
new file mode 100644
index 0000000..60b8f6d
--- /dev/null
+++ b/dist/plugin.json
@@ -0,0 +1,51 @@
+{
+ "type": "datasource",
+ "name": "Open Distro for Elasticsearch",
+ "id": "grafana-es-open-distro-datasource",
+ "category": "logging",
+ "alerting": true,
+ "annotations": true,
+ "metrics": true,
+ "logs": true,
+ "backend": true,
+ "executable": "gpx_es-open-distro-datasource",
+ "info": {
+ "description": "",
+ "author": {
+ "name": "Grafana Labs",
+ "url": "https://grafana.com"
+ },
+ "keywords": [
+ "elasticsearch",
+ "opendistro"
+ ],
+ "logos": {
+ "small": "img/logo.svg",
+ "large": "img/logo.svg"
+ },
+ "links": [
+ {
+ "name": "Learn more",
+ "url": "https://grafana.com/docs/features/datasources/elasticsearch/"
+ }
+ ],
+ "screenshots": [],
+ "version": "1.0.0",
+ "updated": "2021-03-31",
+ "build": {
+ "time": 1617201727978,
+ "repo": "git@github.com:grafana/es-open-distro-datasource.git",
+ "branch": "v1.0.x",
+ "hash": "203ba66595ad2329c9d283fefd6d837fef7736d3",
+ "number": 58
+ }
+ },
+ "dependencies": {
+ "grafanaVersion": "7.5.2",
+ "grafanaDependency": ">=7.5.2",
+ "plugins": []
+ },
+ "queryOptions": {
+ "minInterval": true
+ }
+}
\ No newline at end of file