diff --git a/test/e2e b/test/e2e index d45fb2db..f4115323 160000 --- a/test/e2e +++ b/test/e2e @@ -1 +1 @@ -Subproject commit d45fb2db4bd2360fcafef54cdf1f42fef9b8b9bc +Subproject commit f4115323851f906891afeed694bd353a79f5c91f diff --git a/test/traceql_parser.test.js b/test/traceql_parser.test.js new file mode 100644 index 00000000..7042cc67 --- /dev/null +++ b/test/traceql_parser.test.js @@ -0,0 +1,46 @@ +const parser = require('../traceql/parser') + +it('traceql: one selector', () => { + const res = parser.ParseScript('{.testId="12345"}') + expect(res.rootToken.value).toEqual('{.testId="12345"}') +}) + +it('traceql: multiple selectors', () => { + const res = parser.ParseScript('{.testId="12345" &&.spanN=9}') + expect(res.rootToken.value).toEqual('{.testId="12345" &&.spanN=9}') +}) + +it('traceql: multiple selectors OR Brackets', () => { + const res = parser.ParseScript('{.testId="12345" && (.spanN=9 ||.spanN=8)}') + expect(res.rootToken.value).toEqual('{.testId="12345" && (.spanN=9 ||.spanN=8)}') +}) + +it('traceql: multiple selectors regexp', () => { + const res = parser.ParseScript('{.testId="12345" &&.spanN=~"(9|8)"}') + expect(res.rootToken.value).toEqual('{.testId="12345" &&.spanN=~"(9|8)"}') +}) + +it('traceql: duration', () => { + const res = parser.ParseScript('{.testId="12345" && duration>=9ms}') + expect(res.rootToken.value).toEqual('{.testId="12345" && duration>=9ms}') +}) + +it('traceql: float comparison', () => { + const res = parser.ParseScript('{.testId="12345" &&.spanN>=8.9}') + expect(res.rootToken.value).toEqual('{.testId="12345" &&.spanN>=8.9}') +}) + +it('traceql: count empty result', () => { + const res = parser.ParseScript('{.testId="12345" &&.spanN>=8.9} | count() > 1') + expect(res.rootToken.value).toEqual('{.testId="12345" &&.spanN>=8.9} | count() > 1') +}) + +it('traceql: max duration empty result', () => { + const res = parser.ParseScript('{.testId="12345" &&.spanN>=8.9} | max(duration) > 9ms') + expect(res.rootToken.value).toEqual('{.testId="12345" &&.spanN>=8.9} | max(duration) > 9ms') +}) + +it('traceql: max duration', () => { + const res = parser.ParseScript('{.testId="12345" &&.spanN>=8.9} | max(duration) > 8ms') + expect(res.rootToken.value).toEqual('{.testId="12345" &&.spanN>=8.9} | max(duration) > 8ms') +}) diff --git a/traceql/clickhouse_transpiler/aggregator.js b/traceql/clickhouse_transpiler/aggregator.js new file mode 100644 index 00000000..5348b8fd --- /dev/null +++ b/traceql/clickhouse_transpiler/aggregator.js @@ -0,0 +1,103 @@ +const Sql = require('@cloki/clickhouse-sql') +const { getCompareFn, durationToNs } = require('./shared') + +module.exports = class Builder { + constructor () { + this.main = null + this.fn = '' + this.attr = '' + this.compareFn = '' + this.compareVal = '' + } + + /** + * + * @param main {BuiltProcessFn} + * @returns {Builder} + */ + withMain (main) { + this.main = main + return this + } + + /** + * + * @param fn {string} + * @returns {Builder} + */ + withFn (fn) { + this.fn = fn + return this + } + + /** + * + * @param attr {string} + * @returns {Builder} + */ + withAttr (attr) { + this.attr = attr + return this + } + + /** + * + * @param fn {string} + * @returns {Builder} + */ + withCompareFn (fn) { + this.compareFn = fn + return this + } + + /** + * + * @param val {string} + * @returns {Builder} + */ + withCompareVal (val) { + this.compareVal = val + return this + } + + /** + * @returns {ProcessFn} + */ + build () { + const self = this + /** @type {BuiltProcessFn} */ + const res = (ctx) => { + const sel = this.main(ctx) + const fCmpVal = self.cmpVal() + const agg = self.aggregator() + const compareFn = getCompareFn(self.compareFn) + const comparreExp = compareFn(agg, Sql.val(fCmpVal)) + // .having is broken + sel.having_conditions = Sql.And([...sel.having_conditions.args, comparreExp]) + return sel + } + return res + } + + cmpVal () { + if (this.attr === 'duration') { + return durationToNs(this.compareVal) + } + return parseFloat(this.compareVal) + } + + aggregator () { + switch (this.fn) { + case 'count': + return new Sql.Raw('toFloat64(count(distinct index_search.span_id))') + case 'avg': + return new Sql.Raw('avgIf(agg_val, isNotNull(agg_val))') + case 'max': + return new Sql.Raw('maxIf(agg_val, isNotNull(agg_val))') + case 'min': + return new Sql.Raw('minIf(agg_val, isNotNull(agg_val))') + case 'sum': + return new Sql.Raw('sumIf(agg_val, isNotNull(agg_val))') + } + } +} diff --git a/traceql/clickhouse_transpiler/attr_condition.js b/traceql/clickhouse_transpiler/attr_condition.js new file mode 100644 index 00000000..33e45535 --- /dev/null +++ b/traceql/clickhouse_transpiler/attr_condition.js @@ -0,0 +1,292 @@ +const { getCompareFn, durationToNs, unquote } = require('./shared') +const Sql = require('@cloki/clickhouse-sql') +module.exports = class Builder { + constructor () { + this.main = null + this.terms = [] + this.conds = null + this.aggregatedAttr = '' + + this.sqlConditions = [] + this.isAliased = false + this.alias = '' + this.where = [] + } + + /** + * + * @param main {BuiltProcessFn} + * @returns {Builder} + */ + withMain (main) { + this.main = main + return this + } + + /** + * @param terms {[]} + * @returns {Builder} + */ + withTerms (terms) { + this.terms = terms + return this + } + + /** + * @param conds + * @returns {Builder} + */ + withConditions (conds) { + this.conds = conds + return this + } + + /** + * + * @param aggregatedAttr {string} + * @returns {Builder} + */ + withAggregatedAttr (aggregatedAttr) { + this.aggregatedAttr = aggregatedAttr + return this + } + + /** + * @returns {ProcessFn} + */ + build () { + const self = this + /** @type {BuiltProcessFn} */ + const res = (ctx) => { + const sel = this.main(ctx) + self.alias = 'bsCond' + for (const term of self.terms) { + const sqlTerm = self.getTerm(term) + self.sqlConditions.push(sqlTerm) + if (!term.Child('label_name').value.match(/^(\.|span\.|resource\.|name)/)) { + continue + } + self.where.push(sqlTerm) + } + const having = self.getCond(self.conds) + self.aggregator(sel) + sel.conditions = Sql.And(sel.conditions, Sql.Or(...self.where)) + sel.having(having) + return sel + } + return res + } + + /** + * @typedef {{simpleIdx: number, op: string, complex: [Condition]}} Condition + */ + /** + * @param c {Condition} + */ + getCond (c) { + if (c.simpleIdx === -1) { + const subs = [] + for (const s of c.complex) { + subs.push(this.getCond(s)) + } + switch (c.op) { + case '&&': + return Sql.And(...subs) + case '||': + return Sql.Or(...subs) + } + throw new Error(`unsupported condition operator ${c.op}`) + } + let left = new Sql.Raw(this.alias) + if (!this.isAliased) { + left = groupBitOr(bitSet(this.sqlConditions), this.alias) + } + return Sql.Ne(bitAnd(left, new Sql.Raw((BigInt(1) << BigInt(c.simpleIdx)).toString())), Sql.val(0)) + } + + /** + * @param sel {Select} + */ + aggregator (sel) { + if (!this.aggregatedAttr) { + return + } + + if (this.aggregatedAttr === 'duration') { + sel.select([new Sql.Raw('toFloat64(any(traces_idx.duration))'), 'agg_val']) + return + } + + if (this.aggregatedAttr.match(/^span./)) { + this.aggregatedAttr = this.aggregatedAttr.substr(5) + } + if (this.aggregatedAttr.match(/^resource\./)) { + this.aggregatedAttr = this.aggregatedAttr.substr(9) + } + if (this.aggregatedAttr.match(/^\./)) { + this.aggregatedAttr = this.aggregatedAttr.substr(1) + } + sel.select([sqlAttrValue(this.aggregatedAttr), 'agg_val']) + this.where.push(Sql.Eq(new Sql.Raw('key'), Sql.val(this.aggregatedAttr))) + } + + getTerm (term) { + let key = term.Child('label_name').value + if (key.match(/^span\./)) { + key = key.substr(5) + } else if (key.match(/^resource\./)) { + key = key.substr(9) + } else if (key.match(/^\./)) { + key = key.substr(1) + } else { + switch (key) { + case 'duration': + return this.getDurationCondition(key, term) + case 'name': + key = 'name' + break + default: + throw new Error(`unsupported attribute ${key}`) + } + } + if (term.Child('quoted_str')) { + return this.getStrCondition(key, term) + } else if (term.Child('number')) { + return this.getNumberCondition(key, term) + } + throw new Error(`unsupported term statement ${term.value}`) + } + + getDurationCondition (key, term) { + const fVal = durationToNs(term.Child('value').value) + const fn = getCompareFn(term.Child('op').value) + return fn(new Sql.Raw('traces_idx.duration'), Math.floor(fVal)) + } + + getStrCondition (key, term) { + const strVal = this.getString(term) + switch (term.Child('op').value) { + case '=': + return Sql.And( + Sql.Eq(new Sql.Raw('key'), Sql.val(key)), + Sql.Eq(new Sql.Raw('val'), Sql.val(strVal)) + ) + case '!=': + return Sql.And( + Sql.Eq(new Sql.Raw('key'), Sql.val(key)), + Sql.Ne(new Sql.Raw('val'), Sql.val(strVal)) + ) + case '=~': + return Sql.And( + Sql.Eq(new Sql.Raw('key'), Sql.val(key)), + Sql.Eq(new Sql.Raw(`match(val, ${Sql.quoteVal(strVal)})`), 1) + ) + case '!~': + return Sql.And( + Sql.Eq(new Sql.Raw('key'), Sql.val(key)), + Sql.Ne(new Sql.Raw(`match(val, ${Sql.quoteVal(strVal)})`), 1) + ) + } + throw new Error(`unsupported term statement ${term.value}`) + } + + getNumberCondition (key, term) { + const fn = getCompareFn(term.Child('op').value) + if (!term.Child('value').value.match(/^\d+.?\d*$/)) { + throw new Error(`invalid value in ${term.value}`) + } + const fVal = parseFloat(term.Child('value').value) + return Sql.And( + Sql.Eq('key', Sql.val(key)), + Sql.Eq(new Sql.Raw('isNotNull(toFloat64OrNull(val))'), 1), + fn(new Sql.Raw('toFloat64OrZero(val)'), fVal) + ) + } + + getString (term) { + if (term.Child('quoted_str').value) { + return unquote(term.Child('quoted_str').value) + } + if (term.Child('number').value) { + return term.Child('number').value + } + throw new Error(`unsupported term statement ${term.value}`) + } +} + +/** + * + * @param left + * @param right + * @returns {SQLObject} + */ +function bitAnd (left, right) { + const res = new Sql.Raw('') + res.toString = () => { + const strLeft = left.toString() + const strRight = right.toString() + return `bitAnd(${strLeft}, ${strRight})` + } + return res +} + +/** + * + * @param left + * @param alias + * @returns {SQLObject} + */ +function groupBitOr (left, alias) { + const res = new Sql.Raw('') + res.toString = () => { + const strLeft = left.toString() + if (alias) { + return `groupBitOr(${strLeft}) as ${alias}` + } + return `groupBitOr(${strLeft})` + } + return res +} + +/** + * + * @param terms + * @returns {SQLObject} + */ +function bitSet (terms) { + const res = new Sql.Raw('') + res.terms = terms + res.toString = () => { + return res.terms.map((t, i) => `bitShiftLeft(toUInt64(${t.toString()}), ${i})`).join('+') + } + return res +} + +/** + * + * @param attr {string} + * @returns {SQLObject} + */ +function sqlAttrValue (attr) { + const res = new Sql.Raw('') + res.toString = () => { + const strAttr = Sql.quoteVal(attr) + return `anyIf(toFloat64OrNull(val), key == ${strAttr})` + } + return res +} + +/** + * type sqlAttrValue struct { + * attr string + * } + * + * func (s *sqlAttrValue) String(ctx *sql.Ctx, options ...int) (string, error) { + * attr, err := sql.NewStringVal(s.attr).String(ctx, options...) + * if err != nil { + * return "", err + * } + * + * return fmt.Sprintf("anyIf(toFloat64OrNull(val), key == %s)", attr), nil + * } + */ \ No newline at end of file diff --git a/traceql/clickhouse_transpiler/group_by.js b/traceql/clickhouse_transpiler/group_by.js new file mode 100644 index 00000000..fab2da06 --- /dev/null +++ b/traceql/clickhouse_transpiler/group_by.js @@ -0,0 +1,16 @@ +const Sql = require('@cloki/clickhouse-sql') +const { standardBuilder } = require('./shared') + +module.exports = standardBuilder((sel, ctx) => { + const withMain = new Sql.With('index_search', sel) + return (new Sql.Select()) + .with(withMain) + .select( + ['trace_id', 'trace_id'], + [new Sql.Raw('groupArray(span_id)'), 'span_id'], + [new Sql.Raw('groupArray(duration)'), 'duration'], + [new Sql.Raw('groupArray(timestamp_ns)'), 'timestamp_ns'] + ).from(new Sql.WithReference(withMain)) + .groupBy('trace_id') + .orderBy([new Sql.Raw('max(index_search.timestamp_ns)'), 'desc']) +}) diff --git a/traceql/clickhouse_transpiler/index.js b/traceql/clickhouse_transpiler/index.js new file mode 100644 index 00000000..e43373cb --- /dev/null +++ b/traceql/clickhouse_transpiler/index.js @@ -0,0 +1,109 @@ +const AttrConditionPlanner = require('./attr_condition') +const InitIndexPlanner = require('./init') +const IndexGroupByPlanner = require('./group_by') +const AggregatorPlanner = require('./aggregator') +const IndexLimitPlanner = require('./limit') +const TracesDataPlanner = require('./traces_data') + +/** + * @param script {Token} + */ +module.exports = (script) => { + return new Planner(script).plan() +} + +class Planner { + /** + * + * @param script {Token} + */ + constructor (script) { + this.script = script + this.termIdx = [] + this.cond = null + this.aggregatedAttr = '' + this.cmpVal = '' + this.terms = {} + this.aggFn = '' + } + + plan () { + this.check() + this.analyze() + let res = (new AttrConditionPlanner()) + .withTerms(this.termIdx) + .withConditions(this.cond) + .withAggregatedAttr(this.aggregatedAttr) + .withMain((new InitIndexPlanner()).build()) + .build() + res = (new IndexGroupByPlanner()).withMain(res).build() + if (this.aggFn) { + res = (new AggregatorPlanner()) + .withFn(this.aggFn) + .withAttr(this.aggregatedAttr) + .withCompareFn(this.script.Child('cmp').value) + .withCompareVal(this.script.Child('cmp_val').value) + .withMain(res) + .build() + } + res = (new IndexLimitPlanner()).withMain(res).build() + res = (new TracesDataPlanner()).withMain(res).build() + res = (new IndexLimitPlanner()).withMain(res).build() + + return res + } + + check () { + if (this.script.Children('SYNTAX').length > 1) { + throw new Error('more than one selector is not supported') + } + } + + analyze () { + this.terms = {} + this.cond = this.analyzeCond(this.script.Child('attr_selector_exp')) + this.analyzeAgg() + } + + /** + * + * @param token {Token} + */ + analyzeCond (token) { + let res = {} + const complexHead = token.tokens.find(x => x.name === 'complex_head') + const simpleHead = token.tokens.find(x => x.name === 'attr_selector') + if (complexHead) { + res = this.analyzeCond(complexHead.tokens.find(x => x.name === 'attr_selector_exp')) + } else if (simpleHead) { + const term = simpleHead.value + if (this.terms[term]) { + res = { simpleIdx: this.terms[term] - 1, op: '', complex: [] } + } else { + this.termIdx.push(simpleHead) + this.terms[term] = this.termIdx.length + res = { simpleIdx: this.termIdx.length - 1, op: '', complex: [] } + } + } + const tail = token.tokens.find(x => x.name === 'tail') + if (tail) { + res = { + simpleIdx: -1, + op: token.tokens.find(x => x.name === 'and_or').value, + complex: [res, this.analyzeCond(tail.tokens.find(x => x.name === 'attr_selector_exp'))] + } + } + return res + } + + analyzeAgg () { + const agg = this.script.Child('aggregator') + if (!agg) { + return + } + this.aggFn = agg.Child('fn').value + const labelName = agg.Child('attr').Child('label_name') + this.aggregatedAttr = labelName ? labelName.value : '' + this.cmpVal = agg.Child('cmp_val').value + } +} diff --git a/traceql/clickhouse_transpiler/init.js b/traceql/clickhouse_transpiler/init.js new file mode 100644 index 00000000..ff63f4fe --- /dev/null +++ b/traceql/clickhouse_transpiler/init.js @@ -0,0 +1,35 @@ +const Sql = require('@cloki/clickhouse-sql') +const { format } = require('date-fns') +const { standardBuilder } = require('./shared') +/** + * @typedef {{ + * from: Date, + * to: Date, + * tracesAttrsTable: string, + * limit: number, + * isCluster: boolean, + * tracesTable: string, + * tracesDistTable: string + * }} Context + */ +/** + * @typedef {function(Sql.Select, Context): Select} ProcessFn + */ + +/** + * @type {ProcessFn} + */ +module.exports = standardBuilder((sel, ctx) => { + return (new Sql.Select()).select(['trace_id', 'trace_id'], + [new Sql.Raw('lower(hex(span_id))'), 'span_id'], + [new Sql.Raw('any(duration)'), 'duration'], + [new Sql.Raw('any(timestamp_ns)'), 'timestamp_ns']) + .from([ctx.tracesAttrsTable, 'traces_idx']) + .where(Sql.And( + Sql.Gte('date', Sql.val(format(ctx.from, 'yyyy-MM-dd'))), + Sql.Lte('date', Sql.val(format(ctx.to, 'yyyy-MM-dd'))), + Sql.Gte('traces_idx.timestamp_ns', new Sql.Raw(ctx.from.getTime() + '000000')), + Sql.Lt('traces_idx.timestamp_ns', new Sql.Raw(ctx.to.getTime() + '000000')) + )).groupBy('trace_id', 'span_id') + .orderBy(['timestamp_ns', 'desc']) +}) diff --git a/traceql/clickhouse_transpiler/limit.js b/traceql/clickhouse_transpiler/limit.js new file mode 100644 index 00000000..3ec1c224 --- /dev/null +++ b/traceql/clickhouse_transpiler/limit.js @@ -0,0 +1,8 @@ +const { standardBuilder } = require('./shared') + +module.exports = standardBuilder((sel, ctx) => { + if (!ctx.limit) { + return sel + } + return sel.limit(ctx.limit) +}) diff --git a/traceql/clickhouse_transpiler/shared.js b/traceql/clickhouse_transpiler/shared.js new file mode 100644 index 00000000..dacbac44 --- /dev/null +++ b/traceql/clickhouse_transpiler/shared.js @@ -0,0 +1,84 @@ +const Sql = require('@cloki/clickhouse-sql') +/** + * + * @param op {string} + */ +module.exports.getCompareFn = (op) => { + switch (op) { + case '=': + return Sql.Eq + case '>': + return Sql.Gt + case '<': + return Sql.Lt + case '>=': + return Sql.Gte + case '<=': + return Sql.Lte + case '!=': + return Sql.Ne + } + throw new Error('not supported operator: ' + op) +} + +module.exports.durationToNs = (duration) => { + const measurements = { + ns: 1, + us: 1000, + ms: 1000000, + s: 1000000000, + m: 1000000000 * 60, + h: 1000000000 * 3600, + d: 1000000000 * 3600 * 24 + } + const durationRe = duration.match(/(\d+\.?\d*)(ns|us|ms|s|m|h|d)?/) + if (!durationRe) { + throw new Error('Invalid duration compare value') + } + return parseFloat(durationRe[1]) * measurements[durationRe[2].toLowerCase()] +} + +module.exports.unquote = (val) => { + if (val[0] === '"') { + return JSON.parse(val) + } + if (val[0] === '`') { + return val.substr(1, val.length - 2) + } + throw new Error('unquote not supported') +} + +/** + * @typedef {function(Context): Select} BuiltProcessFn + */ +/** + * @param fn {ProcessFn} + * @returns {{ + * new(): { + * withMain(BuiltProcessFn): this, + * build(): BuiltProcessFn + * }, + * prototype: { + * withMain(BuiltProcessFn): this, + * build(): BuiltProcessFn + * }}} + */ +module.exports.standardBuilder = (fn) => { + return class { + withMain (main) { + this.main = main + return this + } + + /** + * + * @returns {BuiltProcessFn} + */ + build () { + return (ctx) => { + const sel = this.main ? this.main(ctx) : null + return fn(sel, ctx) + } + } + } +} diff --git a/traceql/clickhouse_transpiler/traces_data.js b/traceql/clickhouse_transpiler/traces_data.js new file mode 100644 index 00000000..f99928ef --- /dev/null +++ b/traceql/clickhouse_transpiler/traces_data.js @@ -0,0 +1,34 @@ +const Sql = require('@cloki/clickhouse-sql') +const { standardBuilder } = require('./shared') +/** + * @type {ProcessFn} + */ +const processFn = (sel, ctx) => { + const table = !ctx.isCluster ? ctx.tracesTable : ctx.tracesDistTable + const withMain = new Sql.With('index_grouped', sel) + const withTraceIds = new Sql.With('trace_ids', (new Sql.Select()) + .select('trace_id') + .from(new Sql.WithReference(withMain))) + return (new Sql.Select()) + .with(withMain, withTraceIds) + .select( + [new Sql.Raw('lower(hex(traces.trace_id))'), 'trace_id'], + [new Sql.Raw('any(index_grouped.span_id)'), 'span_id'], + [new Sql.Raw('any(index_grouped.duration)'), 'duration'], + [new Sql.Raw('any(index_grouped.timestamp_ns)'), 'timestamp_ns'], + [new Sql.Raw('min(traces.timestamp_ns)'), 'start_time_unix_nano'], + [new Sql.Raw( + 'toFloat64(max(traces.timestamp_ns + traces.duration_ns) - min(traces.timestamp_ns)) / 1000000' + ), 'duration_ms'], + [new Sql.Raw('argMin(traces.name, traces.timestamp_ns)', 'root_service_name'), 'root_service_name'] + ).from([table, 'traces']).join( + new Sql.WithReference(withMain), + 'left any', + Sql.Eq(new Sql.Raw('traces.trace_id'), new Sql.Raw('index_grouped.trace_id')) + ).where(Sql.And( + new Sql.In(new Sql.Raw('traces.trace_id'), 'in', new Sql.WithReference(withTraceIds)) + )).groupBy('traces.trace_id') + .orderBy(['start_time_unix_nano', 'desc']) +} + +module.exports = standardBuilder(processFn) diff --git a/traceql/index.js b/traceql/index.js index d36e6578..c987dce5 100644 --- a/traceql/index.js +++ b/traceql/index.js @@ -1,9 +1,9 @@ -const { TranspileTraceQL } = require('../wasm_parts/main') -const { clusterName } = require('../common') +const parser = require('./parser') +const transpiler = require('./clickhouse_transpiler') +const logger = require('../lib/logger') const { DATABASE_NAME } = require('../lib/utils') -const dist = clusterName ? '_dist' : '' +const { clusterName } = require('../common') const { rawRequest } = require('../lib/db/clickhouse') -const logger = require('../lib/logger') /** * @@ -14,30 +14,20 @@ const logger = require('../lib/logger') * @returns {Promise<[]>} */ const search = async (query, limit, from, to) => { - const _dbname = '`' + DATABASE_NAME() + '`' - const request = { - Request: query, - Ctx: { - IsCluster: !!clusterName, - OrgID: '0', - FromS: Math.floor(from.getTime() / 1000) - 600, - ToS: Math.floor(to.getTime() / 1000), - Limit: parseInt(limit), - - TimeSeriesGinTableName: `${_dbname}.time_series_gin`, - SamplesTableName: `${_dbname}.samples_v3${dist}`, - TimeSeriesTableName: `${_dbname}.time_series`, - TimeSeriesDistTableName: `${_dbname}.time_series_dist`, - Metrics15sTableName: `${_dbname}.metrics_15s${dist}`, - - TracesAttrsTable: `${_dbname}.tempo_traces_attrs_gin`, - TracesAttrsDistTable: `${_dbname}.tempo_traces_attrs_gin_dist`, - TracesTable: `${_dbname}.tempo_traces`, - TracesDistTable: `${_dbname}.tempo_traces_dist` - } + const _dbname = DATABASE_NAME() + /** @type {Context} */ + const ctx = { + tracesDistTable: `${_dbname}.tempo_traces_dist`, + tracesTable: `${_dbname}.tempo_traces`, + isCluster: !!clusterName, + tracesAttrsTable: `${_dbname}.tempo_traces_attrs_gin`, + from: from, + to: to, + limit: limit } - logger.debug(JSON.stringify(request)) - const sql = TranspileTraceQL(request) + const scrpit = parser.ParseScript(query) + const planner = transpiler(scrpit.rootToken) + const sql = planner(ctx) const response = await rawRequest(sql + ' FORMAT JSON', null, DATABASE_NAME()) const traces = response.data.data.map(row => ({ traceID: row.trace_id, @@ -49,7 +39,7 @@ const search = async (query, limit, from, to) => { { spans: row.span_id.map((spanId, i) => ({ spanID: spanId, - startTimeUnixNano: row.timestamps_ns[i], + startTimeUnixNano: row.timestamp_ns[i], durationNanos: row.duration[i], attributes: [] })), diff --git a/traceql/parser.js b/traceql/parser.js new file mode 100644 index 00000000..42b48730 --- /dev/null +++ b/traceql/parser.js @@ -0,0 +1,9 @@ +const fs = require('fs') +const path = require('path') +const { Compiler } = require('bnf') + +const bnf = fs.readFileSync(path.join(__dirname, 'traceql.bnf')).toString() +const compiler = new Compiler() +compiler.AddLanguage(bnf, 'traceql') + +module.exports = compiler diff --git a/traceql/traceql.bnf b/traceql/traceql.bnf new file mode 100644 index 00000000..d9f21c12 --- /dev/null +++ b/traceql/traceql.bnf @@ -0,0 +1,23 @@ + ::= *( ) + +selector ::= "{" "}" [ ] +attr_selector_exp ::= ( | ) [ ] +complex_head ::= "(" ")" +tail ::= +and_or ::= "&&" | "||" + +aggregator ::= "|" +fn ::= "count"|"sum"|"min"|"max"|"avg" +attr ::= "(" [ ] ")" +cmp ::= "="|"!="|"<="|">="|"<"|">" +cmp_val ::= [] +measurement ::= "ns"|"us"|"ms"|"s"|"m"|"h"|"d" + +label_name ::= ("." | | "_") *("." | | "_" | ) +number ::= ["-"] ["." ] + +attr_selector ::= +op ::= ">="|"<="|"=~"|"="|"!="|"<"|">"|"!~" +value ::= | | +time_val ::= ( ["." ]) +quoted_str ::= () | () | |