Skip to content

Commit

Permalink
fix: switch flux formatter to one that preserves comments (influxdata…
Browse files Browse the repository at this point in the history
  • Loading branch information
danxmoran authored Aug 12, 2021
1 parent a160a1d commit 07d897d
Show file tree
Hide file tree
Showing 44 changed files with 434 additions and 787 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ This release adds an embedded SQLite database for storing metadata required by t
1. [21950](https://github.com/influxdata/influxdb/pull/21950): Invalid requests to /api/v2 subroutes now return 404 instead of a list of links.
1. [21962](https://github.com/influxdata/influxdb/pull/21962): Flux metaqueries for `_field` take fast path if `_measurement` is the only predicate.
1. [22059](https://github.com/influxdata/influxdb/pull/22059): Copy names from mmapped memory before closing iterator
1. [22186](https://github.com/influxdata/influxdb/pull/22186): Preserve comments in flux queries when saving task definitions

## v2.0.7 [2021-06-04]

Expand Down
6 changes: 3 additions & 3 deletions checks/service_external_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ func CreateCheck(
Organization: "theorg",
OwnerID: MustIDBase16("020f755c3c082001"),
Status: "active",
Flux: "package main\nimport \"influxdata/influxdb/monitor\"\nimport \"experimental\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\")\n\t|> range(start: -1h)\n\t|> filter(fn: (r) =>\n\t\t(r._field == \"usage_user\"))\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {\n\t_check_id: \"020f755c3c082000\",\n\t_check_name: \"name1\",\n\t_type: \"deadman\",\n\ttags: {k1: \"v1\", k2: \"v2\"},\n}\ncrit = (r) =>\n\t(r[\"dead\"])\nmessageFn = (r) =>\n\t(\"msg1\")\n\ndata\n\t|> v1[\"fieldsAsCols\"]()\n\t|> monitor[\"deadman\"](t: experimental[\"subDuration\"](from: now(), d: 21s))\n\t|> monitor[\"check\"](data: check, messageFn: messageFn, crit: crit)",
Flux: "import \"influxdata/influxdb/monitor\"\nimport \"experimental\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1h) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082000\", _check_name: \"name1\", _type: \"deadman\", tags: {k1: \"v1\", k2: \"v2\"}}\ncrit = (r) => r[\"dead\"]\nmessageFn = (r) => \"msg1\"\n\ndata |> v1[\"fieldsAsCols\"]() |> monitor[\"deadman\"](t: experimental[\"subDuration\"](from: now(), d: 21s))\n |> monitor[\"check\"](data: check, messageFn: messageFn, crit: crit)",
Every: "1m",
},
},
Expand Down Expand Up @@ -447,7 +447,7 @@ func CreateCheck(
OwnerID: MustIDBase16("020f755c3c082005"),
Status: "active",
Every: "1m",
Flux: "package main\nimport \"influxdata/influxdb/monitor\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\")\n\t|> range(start: -1m)\n\t|> filter(fn: (r) =>\n\t\t(r._field == \"usage_user\"))\n\noption task = {name: \"name2\", every: 1m}\n\ncheck = {\n\t_check_id: \"020f755c3c082001\",\n\t_check_name: \"name2\",\n\t_type: \"threshold\",\n\ttags: {k11: \"v11\"},\n}\nok = (r) =>\n\t(r[\"usage_user\"] < 1000.0)\nwarn = (r) =>\n\t(r[\"usage_user\"] > 2000.0)\ninfo = (r) =>\n\t(r[\"usage_user\"] < 1900.0 and r[\"usage_user\"] > 1500.0)\nmessageFn = (r) =>\n\t(\"msg2\")\n\ndata\n\t|> v1[\"fieldsAsCols\"]()\n\t|> monitor[\"check\"](\n\t\tdata: check,\n\t\tmessageFn: messageFn,\n\t\tok: ok,\n\t\twarn: warn,\n\t\tinfo: info,\n\t)",
Flux: "import \"influxdata/influxdb/monitor\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1m) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name2\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082001\", _check_name: \"name2\", _type: \"threshold\", tags: {k11: \"v11\"}}\nok = (r) => r[\"usage_user\"] < 1000.0\nwarn = (r) => r[\"usage_user\"] > 2000.0\ninfo = (r) => r[\"usage_user\"] < 1900.0 and r[\"usage_user\"] > 1500.0\nmessageFn = (r) => \"msg2\"\n\ndata |> v1[\"fieldsAsCols\"]() |> monitor[\"check\"](\n data: check,\n messageFn: messageFn,\n ok: ok,\n warn: warn,\n info: info,\n)",
},
},
},
Expand Down Expand Up @@ -584,7 +584,7 @@ func CreateCheck(
OwnerID: MustIDBase16("020f755c3c082001"),
Status: "active",
Every: "1m",
Flux: "package main\nimport \"influxdata/influxdb/monitor\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\")\n\t|> range(start: -1m)\n\t|> filter(fn: (r) =>\n\t\t(r._field == \"usage_user\"))\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {\n\t_check_id: \"020f755c3c082001\",\n\t_check_name: \"name1\",\n\t_type: \"threshold\",\n\ttags: {k11: \"v11\", k22: \"v22\"},\n}\nmessageFn = (r) =>\n\t(\"msg2\")\n\ndata\n\t|> v1[\"fieldsAsCols\"]()\n\t|> monitor[\"check\"](data: check, messageFn: messageFn)",
Flux: "import \"influxdata/influxdb/monitor\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1m) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082001\", _check_name: \"name1\", _type: \"threshold\", tags: {k11: \"v11\", k22: \"v22\"}}\nmessageFn = (r) => \"msg2\"\n\ndata |> v1[\"fieldsAsCols\"]() |> monitor[\"check\"](data: check, messageFn: messageFn)",
},
},
checks: []influxdb.Check{
Expand Down
78 changes: 36 additions & 42 deletions cmd/influxd/launcher/pkger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3832,15 +3832,12 @@ spec:
expectedQuery := expectedParams + `
from(bucket: params.bucket)
|> range(start: params.start, end: params.stop)
|> filter(fn: (r) =>
(r._measurement == "processes"))
|> filter(fn: (r) =>
(r.floater == params.floatVal))
|> filter(fn: (r) =>
(r._value > params.minVal))
|> aggregateWindow(every: v.windowPeriod, fn: max)
|> yield(name: params.name)`
|> range(start: params.start, end: params.stop)
|> filter(fn: (r) => r._measurement == "processes")
|> filter(fn: (r) => r.floater == params.floatVal)
|> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: v.windowPeriod, fn: max)
|> yield(name: params.name)`

assert.Equal(t, expectedQuery, props.Queries[0].Text)
assert.Equal(t, "advanced", props.Queries[0].EditMode)
Expand Down Expand Up @@ -3869,12 +3866,12 @@ from(bucket: params.bucket)
actual := impact.Summary.Dashboards[0]

expectedParams := `option params = {
bucket: "bar",
start: -24h0m0s,
stop: now(),
name: "max",
floatVal: 37.2,
minVal: 10,
bucket: "bar",
start: -24h0m0s,
stop: now(),
name: "max",
floatVal: 37.2,
minVal: 10,
}`
isExpectedQuery(t, actual, expectedParams)

Expand Down Expand Up @@ -3955,12 +3952,12 @@ from(bucket: params.bucket)
actual := impact.Summary.Dashboards[0]

expectedParams := `option params = {
bucket: "foobar",
start: -5d,
stop: now(),
name: "min",
floatVal: 33.3,
minVal: 3,
bucket: "foobar",
start: -5d,
stop: now(),
name: "min",
floatVal: 33.3,
minVal: 3,
}`
isExpectedQuery(t, actual, expectedParams)

Expand Down Expand Up @@ -4084,15 +4081,12 @@ spec:
expectedQuery := expectedParams + `
from(bucket: params.bucket)
|> range(start: params.start, stop: params.stop)
|> filter(fn: (r) =>
(r._measurement == "processes"))
|> filter(fn: (r) =>
(r.floater == params.floatVal))
|> filter(fn: (r) =>
(r._value > params.minVal))
|> aggregateWindow(every: 1m, fn: max)
|> yield(name: params.name)`
|> range(start: params.start, stop: params.stop)
|> filter(fn: (r) => r._measurement == "processes")
|> filter(fn: (r) => r.floater == params.floatVal)
|> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: 1m, fn: max)
|> yield(name: params.name)`

assert.Equal(t, expectedQuery, actual.Query)
}
Expand Down Expand Up @@ -4120,12 +4114,12 @@ from(bucket: params.bucket)
actual := impact.Summary.Tasks[0]

expectedParams := `option params = {
bucket: "bar",
start: -24h0m0s,
stop: now(),
name: "max",
floatVal: 37.2,
minVal: 10,
bucket: "bar",
start: -24h0m0s,
stop: now(),
name: "max",
floatVal: 37.2,
minVal: 10,
}`
isExpectedQuery(t, actual, expectedParams)

Expand Down Expand Up @@ -4206,12 +4200,12 @@ from(bucket: params.bucket)
actual := impact.Summary.Tasks[0]

expectedParams := `option params = {
bucket: "foobar",
start: -5d,
stop: now(),
name: "min",
floatVal: 33.3,
minVal: 3,
bucket: "foobar",
start: -5d,
stop: now(),
name: "min",
floatVal: 33.3,
minVal: 3,
}`
isExpectedQuery(t, actual, expectedParams)

Expand Down
2 changes: 1 addition & 1 deletion http/check_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ func TestService_handleGetCheckQuery(t *testing.T) {
wants: wants{
statusCode: http.StatusOK,
contentType: "application/json; charset=utf-8",
body: "{\"flux\":\"package main\\nimport \\\"influxdata/influxdb/monitor\\\"\\nimport \\\"influxdata/influxdb/v1\\\"\\n\\ndata = from(bucket: \\\"foo\\\")\\n\\t|\\u003e range(start: -1h)\\n\\t|\\u003e filter(fn: (r) =\\u003e\\n\\t\\t(r._field == \\\"usage_idle\\\"))\\n\\t|\\u003e aggregateWindow(every: 1h, fn: mean, createEmpty: false)\\n\\noption task = {name: \\\"hello\\\", every: 1h}\\n\\ncheck = {\\n\\t_check_id: \\\"020f755c3c082000\\\",\\n\\t_check_name: \\\"hello\\\",\\n\\t_type: \\\"threshold\\\",\\n\\ttags: {aaa: \\\"vaaa\\\", bbb: \\\"vbbb\\\"},\\n}\\nok = (r) =\\u003e\\n\\t(r[\\\"usage_idle\\\"] \\u003e 10.0)\\ninfo = (r) =\\u003e\\n\\t(r[\\\"usage_idle\\\"] \\u003c 40.0)\\nwarn = (r) =\\u003e\\n\\t(r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0)\\ncrit = (r) =\\u003e\\n\\t(r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0)\\nmessageFn = (r) =\\u003e\\n\\t(\\\"whoa! {check.yeah}\\\")\\n\\ndata\\n\\t|\\u003e v1[\\\"fieldsAsCols\\\"]()\\n\\t|\\u003e monitor[\\\"check\\\"](\\n\\t\\tdata: check,\\n\\t\\tmessageFn: messageFn,\\n\\t\\tok: ok,\\n\\t\\tinfo: info,\\n\\t\\twarn: warn,\\n\\t\\tcrit: crit,\\n\\t)\"}\n",
body: "{\"flux\":\"import \\\"influxdata/influxdb/monitor\\\"\\nimport \\\"influxdata/influxdb/v1\\\"\\n\\ndata = from(bucket: \\\"foo\\\") |\\u003e range(start: -1h) |\\u003e filter(fn: (r) =\\u003e r._field == \\\"usage_idle\\\")\\n |\\u003e aggregateWindow(every: 1h, fn: mean, createEmpty: false)\\n\\noption task = {name: \\\"hello\\\", every: 1h}\\n\\ncheck = {_check_id: \\\"020f755c3c082000\\\", _check_name: \\\"hello\\\", _type: \\\"threshold\\\", tags: {aaa: \\\"vaaa\\\", bbb: \\\"vbbb\\\"}}\\nok = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003e 10.0\\ninfo = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0\\nwarn = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0\\ncrit = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0\\nmessageFn = (r) =\\u003e \\\"whoa! {check.yeah}\\\"\\n\\ndata |\\u003e v1[\\\"fieldsAsCols\\\"]() |\\u003e monitor[\\\"check\\\"](\\n data: check,\\n messageFn: messageFn,\\n ok: ok,\\n info: info,\\n warn: warn,\\n crit: crit,\\n)\"}\n",
},
},
}
Expand Down
15 changes: 14 additions & 1 deletion notification/check/check_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,14 @@ package check_test

import (
"encoding/json"
"github.com/influxdata/influxdb/v2/kit/platform/errors"
"testing"
"time"

"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/ast/astutil"
"github.com/influxdata/influxdb/v2/kit/platform/errors"
"github.com/stretchr/testify/require"

"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/influxdata/flux/parser"
Expand Down Expand Up @@ -282,3 +286,12 @@ func TestJSON(t *testing.T) {
t.Run(c.name, fn)
}
}

func mustFormatPackage(t *testing.T, pkg *ast.Package) string {
if len(pkg.Files) == 0 {
t.Fatal("package expected to have at least one file")
}
v, err := astutil.Format(pkg.Files[0])
require.NoError(t, err)
return v
}
3 changes: 2 additions & 1 deletion notification/check/custom.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"time"

"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/ast/astutil"
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/kit/platform"
"github.com/influxdata/influxdb/v2/kit/platform/errors"
Expand Down Expand Up @@ -98,7 +99,7 @@ func (c Custom) sanitizeFlux(lang fluxlang.FluxLanguageService) (string, error)
}
})

return ast.Format(p), nil
return astutil.Format(p.Files[0])
}

func propertyHasValue(prop *ast.Property, key string, value string) bool {
Expand Down
17 changes: 7 additions & 10 deletions notification/check/custom_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"testing"

"github.com/andreyvit/diff"
"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/parser"
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/notification/check"
Expand All @@ -28,10 +27,8 @@ import "influxdata/influxdb/v1"
data = from(bucket: "_tasks")
|> range(start: -1m)
|> filter(fn: (r) =>
(r._measurement == "runs"))
|> filter(fn: (r) =>
(r._field == "finishedAt"))
|> filter(fn: (r) => r._measurement == "runs")
|> filter(fn: (r) => r._field == "finishedAt")
|> aggregateWindow(every: 1m, fn: mean, createEmpty: false)
option task = {name: "moo", every: 1m, offset: 0s}
Expand Down Expand Up @@ -110,13 +107,13 @@ data
ID: 10,
Name: "moo",
Query: influxdb.DashboardQuery{
Text: ast.Format(parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000a"))),
Text: mustFormatPackage(t, parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000a"))),
},
},
},
wants: wants{
err: nil,
script: ast.Format(parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000a"))),
script: mustFormatPackage(t, parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000a"))),
},
},
{
Expand All @@ -126,13 +123,13 @@ data
ID: 10,
Name: "moo",
Query: influxdb.DashboardQuery{
Text: ast.Format(parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000b"))),
Text: mustFormatPackage(t, parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000b"))),
},
},
},
wants: wants{
err: nil,
script: ast.Format(parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000a"))),
script: mustFormatPackage(t, parser.ParseSource(fmt.Sprintf(validQuery, "000000000000000a"))),
},
},
{
Expand All @@ -157,7 +154,7 @@ data
ID: 10,
Name: "moo",
Query: influxdb.DashboardQuery{
Text: ast.Format(parser.ParseSource(fmt.Sprintf(invalidTaskQuery, "000000000000000b"))),
Text: mustFormatPackage(t, parser.ParseSource(fmt.Sprintf(invalidTaskQuery, "000000000000000b"))),
},
},
},
Expand Down
9 changes: 5 additions & 4 deletions notification/check/deadman.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"strings"

"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/ast/astutil"
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/notification"
"github.com/influxdata/influxdb/v2/notification/flux"
Expand Down Expand Up @@ -33,18 +34,18 @@ func (c Deadman) Type() string {

// GenerateFlux returns a flux script for the Deadman provided.
func (c Deadman) GenerateFlux(lang fluxlang.FluxLanguageService) (string, error) {
p, err := c.GenerateFluxAST(lang)
f, err := c.GenerateFluxAST(lang)
if err != nil {
return "", err
}

return ast.Format(p), nil
return astutil.Format(f)
}

// GenerateFluxAST returns a flux AST for the deadman provided. If there
// are any errors in the flux that the user provided the function will return
// an error for each error found when the script is parsed.
func (c Deadman) GenerateFluxAST(lang fluxlang.FluxLanguageService) (*ast.Package, error) {
func (c Deadman) GenerateFluxAST(lang fluxlang.FluxLanguageService) (*ast.File, error) {
p, err := query.Parse(lang, c.Query.Text)
if p == nil {
return nil, err
Expand All @@ -69,7 +70,7 @@ func (c Deadman) GenerateFluxAST(lang fluxlang.FluxLanguageService) (*ast.Packag
f.Imports = append(f.Imports, flux.Imports("influxdata/influxdb/monitor", "experimental", "influxdata/influxdb/v1")...)
f.Body = append(f.Body, c.generateFluxASTBody()...)

return p, nil
return f, nil
}

func (c Deadman) generateFluxASTBody() []ast.Statement {
Expand Down
Loading

0 comments on commit 07d897d

Please sign in to comment.