From 49bf7b53a7755ed7ed66ba691f1b6893a2f3351a Mon Sep 17 00:00:00 2001 From: Jaz White Date: Mon, 10 Jul 2023 15:04:58 -0400 Subject: [PATCH] feat: use coderefs matcher (#35) instead of doing string contains search, we can use the coderefs matcher to find flag keys and aliases this ensures that results from the commentor are accurate and consistent with what is reported by official code refs tool and sets us up to enable monorepo support Custom delimiters in `.launchdarkly/coderefs.yaml` will be respected Related changes: - test data needed to use `kebabcase` aliases, since we now use [delimiters](https://github.com/launchdarkly/ld-find-code-refs/blob/ed386199c5f41c224a9e7076a751f031cc38c644/docs/CONFIGURATION.md#delimiters) like code refs - test cases added to cover delimiters Dev notes: There is certainly some future refactoring that can be done in the code refs tool itself to make getting a matcher, etc. more uniform. Tested in gonfalon here: https://github.com/launchdarkly/gonfalon/pull/27962#issuecomment-1629334562 --- .launchdarkly/coderefs.yml | 1 + diff/diff.go | 58 +- diff/diff_test.go | 68 +- go.mod | 7 +- go.sum | 14 +- main.go | 19 +- search/search.go | 73 + testdata/test | 9 +- vendor/github.com/go-git/go-git/v5/LICENSE | 201 ++ .../go-git/go-git/v5/plumbing/error.go | 35 + .../go-git/go-git/v5/plumbing/hash.go | 84 + .../go-git/go-git/v5/plumbing/hash/hash.go | 60 + .../go-git/v5/plumbing/hash/hash_sha1.go | 15 + .../go-git/v5/plumbing/hash/hash_sha256.go | 15 + .../go-git/go-git/v5/plumbing/memory.go | 72 + .../go-git/go-git/v5/plumbing/object.go | 111 + .../go-git/go-git/v5/plumbing/reference.go | 225 ++ .../go-git/go-git/v5/plumbing/revision.go | 11 + .../ld-find-code-refs/v2/flags/flags.go | 71 + .../ld-find-code-refs/v2/search/files.go | 107 + .../ld-find-code-refs/v2/search/matcher.go | 198 ++ .../ld-find-code-refs/v2/search/search.go | 233 ++ .../petar-dambovaliev/aho-corasick/.gitignore | 3 + .../petar-dambovaliev/aho-corasick/LICENSE | 21 + .../petar-dambovaliev/aho-corasick/README.md | 85 + .../aho-corasick/ahocorasick.go | 384 +++ .../aho-corasick/automaton.go | 222 ++ .../aho-corasick/byte_frequencies.go | 260 ++ .../petar-dambovaliev/aho-corasick/classes.go | 79 + .../petar-dambovaliev/aho-corasick/dfa.go | 731 ++++++ .../petar-dambovaliev/aho-corasick/nfa.go | 830 ++++++ .../aho-corasick/prefilter.go | 601 +++++ vendor/github.com/pjbgf/sha1cd/Dockerfile.arm | 23 + .../github.com/pjbgf/sha1cd/Dockerfile.arm64 | 23 + vendor/github.com/pjbgf/sha1cd/LICENSE | 201 ++ vendor/github.com/pjbgf/sha1cd/Makefile | 40 + vendor/github.com/pjbgf/sha1cd/README.md | 58 + vendor/github.com/pjbgf/sha1cd/detection.go | 11 + .../github.com/pjbgf/sha1cd/internal/const.go | 42 + vendor/github.com/pjbgf/sha1cd/sha1cd.go | 227 ++ .../pjbgf/sha1cd/sha1cdblock_amd64.go | 50 + .../pjbgf/sha1cd/sha1cdblock_amd64.s | 2274 +++++++++++++++++ .../pjbgf/sha1cd/sha1cdblock_generic.go | 268 ++ .../pjbgf/sha1cd/sha1cdblock_noasm.go | 8 + vendor/github.com/pjbgf/sha1cd/ubc/check.go | 368 +++ vendor/github.com/pjbgf/sha1cd/ubc/const.go | 624 +++++ vendor/github.com/pjbgf/sha1cd/ubc/doc.go | 3 + vendor/golang.org/x/tools/LICENSE | 27 + vendor/golang.org/x/tools/PATENTS | 22 + .../golang.org/x/tools/godoc/util/throttle.go | 85 + vendor/golang.org/x/tools/godoc/util/util.go | 90 + .../golang.org/x/tools/godoc/vfs/emptyvfs.go | 89 + vendor/golang.org/x/tools/godoc/vfs/fs.go | 80 + .../golang.org/x/tools/godoc/vfs/namespace.go | 387 +++ vendor/golang.org/x/tools/godoc/vfs/os.go | 105 + vendor/golang.org/x/tools/godoc/vfs/vfs.go | 58 + vendor/modules.txt | 22 +- 57 files changed, 10030 insertions(+), 58 deletions(-) create mode 100644 search/search.go create mode 100644 vendor/github.com/go-git/go-git/v5/LICENSE create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/error.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/hash.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/hash/hash.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha1.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha256.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/memory.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/object.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/reference.go create mode 100644 vendor/github.com/go-git/go-git/v5/plumbing/revision.go create mode 100644 vendor/github.com/launchdarkly/ld-find-code-refs/v2/flags/flags.go create mode 100644 vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/files.go create mode 100644 vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/matcher.go create mode 100644 vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/search.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/.gitignore create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/LICENSE create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/README.md create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/ahocorasick.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/automaton.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/byte_frequencies.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/classes.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/dfa.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/nfa.go create mode 100644 vendor/github.com/petar-dambovaliev/aho-corasick/prefilter.go create mode 100644 vendor/github.com/pjbgf/sha1cd/Dockerfile.arm create mode 100644 vendor/github.com/pjbgf/sha1cd/Dockerfile.arm64 create mode 100644 vendor/github.com/pjbgf/sha1cd/LICENSE create mode 100644 vendor/github.com/pjbgf/sha1cd/Makefile create mode 100644 vendor/github.com/pjbgf/sha1cd/README.md create mode 100644 vendor/github.com/pjbgf/sha1cd/detection.go create mode 100644 vendor/github.com/pjbgf/sha1cd/internal/const.go create mode 100644 vendor/github.com/pjbgf/sha1cd/sha1cd.go create mode 100644 vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.go create mode 100644 vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.s create mode 100644 vendor/github.com/pjbgf/sha1cd/sha1cdblock_generic.go create mode 100644 vendor/github.com/pjbgf/sha1cd/sha1cdblock_noasm.go create mode 100644 vendor/github.com/pjbgf/sha1cd/ubc/check.go create mode 100644 vendor/github.com/pjbgf/sha1cd/ubc/const.go create mode 100644 vendor/github.com/pjbgf/sha1cd/ubc/doc.go create mode 100644 vendor/golang.org/x/tools/LICENSE create mode 100644 vendor/golang.org/x/tools/PATENTS create mode 100644 vendor/golang.org/x/tools/godoc/util/throttle.go create mode 100644 vendor/golang.org/x/tools/godoc/util/util.go create mode 100644 vendor/golang.org/x/tools/godoc/vfs/emptyvfs.go create mode 100644 vendor/golang.org/x/tools/godoc/vfs/fs.go create mode 100644 vendor/golang.org/x/tools/godoc/vfs/namespace.go create mode 100644 vendor/golang.org/x/tools/godoc/vfs/os.go create mode 100644 vendor/golang.org/x/tools/godoc/vfs/vfs.go diff --git a/.launchdarkly/coderefs.yml b/.launchdarkly/coderefs.yml index 430249a6..b7b4dd2a 100644 --- a/.launchdarkly/coderefs.yml +++ b/.launchdarkly/coderefs.yml @@ -1,3 +1,4 @@ aliases: - type: camelcase - type: snakecase + - type: kebabcase diff --git a/diff/diff.go b/diff/diff.go index b0c9497b..e995413f 100644 --- a/diff/diff.go +++ b/diff/diff.go @@ -5,9 +5,9 @@ import ( "os" "strings" - ldapi "github.com/launchdarkly/api-client-go/v7" lflags "github.com/launchdarkly/cr-flags/flags" "github.com/launchdarkly/cr-flags/ignore" + lsearch "github.com/launchdarkly/ld-find-code-refs/v2/search" "github.com/sourcegraph/go-diff/diff" ) @@ -50,44 +50,40 @@ func CheckDiff(parsedDiff *diff.FileDiff, workspace string) *DiffPaths { return &diffPaths } -func ProcessDiffs(hunk *diff.Hunk, flagsRef lflags.FlagsRef, flags ldapi.FeatureFlags, aliases map[string][]string, maxFlags int) { - diffRows := strings.Split(string(hunk.Body), "\n") - for _, row := range diffRows { +func ProcessDiffs(matcher lsearch.Matcher, hunk *diff.Hunk, flagsRef lflags.FlagsRef, maxFlags int) { + flagMap := map[Operation]lflags.FlagAliasMap{ + Add: flagsRef.FlagsAdded, + Delete: flagsRef.FlagsRemoved, + } + diffLines := strings.Split(string(hunk.Body), "\n") + for _, line := range diffLines { if flagsRef.Count() >= maxFlags { break } - op := operation(row) - for _, flag := range flags.Items { - if strings.Contains(row, flag.Key) { - if op == Add { - if _, ok := flagsRef.FlagsAdded[flag.Key]; !ok { - flagsRef.FlagsAdded[flag.Key] = lflags.AliasSet{} - } - } else if op == Delete { - if _, ok := flagsRef.FlagsRemoved[flag.Key]; !ok { - flagsRef.FlagsRemoved[flag.Key] = lflags.AliasSet{} - } - } + + op := operation(line) + if op == Equal { + continue + } + + // only one for now + elementMatcher := matcher.Elements[0] + for _, flagKey := range elementMatcher.FindMatches(line) { + if _, ok := flagMap[op][flagKey]; !ok { + flagMap[op][flagKey] = make(lflags.AliasSet) } - if len(aliases[flag.Key]) > 0 { - for _, alias := range aliases[flag.Key] { - if strings.Contains(row, alias) { - if op == Add { - if _, ok := flagsRef.FlagsAdded[flag.Key]; !ok { - flagsRef.FlagsAdded[flag.Key] = lflags.AliasSet{} - } - flagsRef.FlagsAdded[flag.Key][alias] = true - } else if op == Delete { - if _, ok := flagsRef.FlagsRemoved[flag.Key]; !ok { - flagsRef.FlagsRemoved[flag.Key] = lflags.AliasSet{} - } - flagsRef.FlagsRemoved[flag.Key][alias] = true - } - } + if aliasMatches := matcher.FindAliases(line, flagKey); len(aliasMatches) > 0 { + if _, ok := flagMap[op][flagKey]; !ok { + flagMap[op][flagKey] = make(lflags.AliasSet) + } + for _, alias := range aliasMatches { + flagMap[op][flagKey][alias] = true } } } } + flagsRef.FlagsAdded = flagMap[Add] + flagsRef.FlagsRemoved = flagMap[Delete] } // Operation defines the operation of a diff item. diff --git a/diff/diff_test.go b/diff/diff_test.go index 95664fa4..1f4b0ffc 100644 --- a/diff/diff_test.go +++ b/diff/diff_test.go @@ -6,6 +6,7 @@ import ( ldapi "github.com/launchdarkly/api-client-go/v7" "github.com/launchdarkly/cr-flags/config" lflags "github.com/launchdarkly/cr-flags/flags" + lsearch "github.com/launchdarkly/ld-find-code-refs/v2/search" "github.com/sourcegraph/go-diff/diff" "github.com/stretchr/testify/assert" ) @@ -45,10 +46,20 @@ type testProcessor struct { Config config.Config } +func (t testProcessor) flagKeys() []string { + keys := make([]string, 0, len(t.Flags.Items)) + for _, f := range t.Flags.Items { + keys = append(keys, f.Key) + } + return keys +} + func newProcessFlagAccEnv() *testProcessor { flag := createFlag("example-flag") + flag2 := createFlag("sample-flag") flags := ldapi.FeatureFlags{} flags.Items = append(flags.Items, flag) + flags.Items = append(flags.Items, flag2) flagsAdded := make(lflags.FlagAliasMap) flagsRemoved := make(lflags.FlagAliasMap) flagsRef := lflags.FlagsRef{ @@ -117,6 +128,7 @@ func TestProcessDiffs(t *testing.T) { sampleBody string expected lflags.FlagsRef aliases map[string][]string + delimiters string }{ { name: "add flag", @@ -147,6 +159,23 @@ func TestProcessDiffs(t *testing.T) { -here is a flag -example-flag - + this is no changes + in the hunk`, + }, + { + name: "add and remove flag", + expected: lflags.FlagsRef{ + FlagsAdded: lflags.FlagAliasMap{"sample-flag": lflags.AliasSet{}}, + FlagsRemoved: lflags.FlagAliasMap{"example-flag": lflags.AliasSet{}}, + }, + aliases: map[string][]string{}, + sampleBody: ` + -Testing data +-this is for testing +-here is a flag +-example-flag +- ++ sample-flag this is no changes in the hunk`, }, @@ -174,13 +203,43 @@ func TestProcessDiffs(t *testing.T) { FlagsAdded: lflags.FlagAliasMap{"example-flag": lflags.AliasSet{"exampleFlag": true}}, FlagsRemoved: lflags.FlagAliasMap{}, }, - aliases: map[string][]string{"example-flag": []string{"exampleFlag"}}, + aliases: map[string][]string{"example-flag": {"exampleFlag"}}, sampleBody: ` +Testing data +this is for testing +here is a flag +exampleFlag +exampleFlag ++`, + }, + { + name: "require delimiters - no matches", + expected: lflags.FlagsRef{ + FlagsAdded: lflags.FlagAliasMap{}, + FlagsRemoved: lflags.FlagAliasMap{}, + }, + delimiters: "'\"", + aliases: map[string][]string{}, + sampleBody: ` + +Testing data ++this is for testing ++here is a flag ++example-flag ++`, + }, + { + name: "require delimiters - match", + expected: lflags.FlagsRef{ + FlagsAdded: lflags.FlagAliasMap{"example-flag": lflags.AliasSet{}}, + FlagsRemoved: lflags.FlagAliasMap{}, + }, + delimiters: "'\"", + aliases: map[string][]string{}, + sampleBody: ` + +Testing data ++this is for testing ++here is a flag ++"example-flag" +`, }, } @@ -196,7 +255,12 @@ func TestProcessDiffs(t *testing.T) { StartPosition: 1, Body: []byte(tc.sampleBody), } - ProcessDiffs(hunk, processor.FlagsRef, processor.Flags, tc.aliases, 5) + elements := []lsearch.ElementMatcher{} + elements = append(elements, lsearch.NewElementMatcher("default", "", tc.delimiters, processor.flagKeys(), tc.aliases)) + matcher := lsearch.Matcher{ + Elements: elements, + } + ProcessDiffs(matcher, hunk, processor.FlagsRef, 5) assert.Equal(t, tc.expected, processor.FlagsRef) }) } diff --git a/go.mod b/go.mod index 4c4e7e4f..83798a93 100644 --- a/go.mod +++ b/go.mod @@ -17,7 +17,7 @@ require ( github.com/jstemmer/go-junit-report/v2 v2.0.0 github.com/kyoh86/richgo v0.3.12 github.com/launchdarkly/api-client-go/v7 v7.1.1 - github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230627211718-c0eec7327a20 + github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230628134336-ed386199c5f4 ) require ( @@ -26,6 +26,7 @@ require ( github.com/bmatcuk/doublestar/v4 v4.6.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/go-git/go-git/v5 v5.7.0 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/uuid v1.3.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect @@ -45,6 +46,8 @@ require ( github.com/morikuni/aec v1.0.0 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/pelletier/go-toml/v2 v2.0.8 // indirect + github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9 // indirect + github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/shopspring/decimal v1.2.0 // indirect github.com/spf13/afero v1.9.5 // indirect @@ -57,9 +60,9 @@ require ( golang.org/x/net v0.11.0 // indirect golang.org/x/sys v0.9.0 // indirect golang.org/x/text v0.10.0 // indirect + golang.org/x/tools v0.10.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/protobuf v1.30.0 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 8b96f5be..57f01643 100644 --- a/go.sum +++ b/go.sum @@ -67,6 +67,8 @@ github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/go-git/go-git/v5 v5.7.0 h1:t9AudWVLmqzlo+4bqdf7GY+46SUuRsx59SboFxkq2aE= +github.com/go-git/go-git/v5 v5.7.0/go.mod h1:coJHKEOk5kUClpsNlXrUvPrDxY3w3gjHvhcZd8Fodw8= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -163,7 +165,6 @@ github.com/jstemmer/go-junit-report/v2 v2.0.0/go.mod h1:mgHVr7VUo5Tn8OLVr1cKnLuE github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= @@ -176,8 +177,8 @@ github.com/launchdarkly/api-client-go/v7 v7.1.1 h1:3VBkFt9xHljMw5KDlVFDUogxfH78Y github.com/launchdarkly/api-client-go/v7 v7.1.1/go.mod h1:GVl1inKsWoKX3yLgdqrjxWw8k4ih0HlSmdnrhi5NNDs= github.com/launchdarkly/json-patch v0.0.0-20180720210516-dd68d883319f h1:jfiPiz2hE/7mHv2NOS4cm07sSJCsKlbxmR7pzPhhvpU= github.com/launchdarkly/json-patch v0.0.0-20180720210516-dd68d883319f/go.mod h1:CHbYdMs8UjvNnS2fatlQvi4UYnBTRYGxRHc/0kQupSQ= -github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230627211718-c0eec7327a20 h1:U5d40xUQQlnD/dTxd7av9k+G1ghC5Ny2Nw7qtShcaMQ= -github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230627211718-c0eec7327a20/go.mod h1:nNi9KzXnlIlE2vD9+ZMbWM/pES6VCMciSsK9duta918= +github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230628134336-ed386199c5f4 h1:mupbGsSmix7Ct0o+BzGOVetwGY7dRENzJdwDu9YroVM= +github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230628134336-ed386199c5f4/go.mod h1:nNi9KzXnlIlE2vD9+ZMbWM/pES6VCMciSsK9duta918= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= @@ -199,6 +200,10 @@ github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= +github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9 h1:lL+y4Xv20pVlCGyLzNHRC0I0rIHhIL1lTvHizoS/dU8= +github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9/go.mod h1:EHPiTAKtiFmrMldLUNswFwfZ2eJIYBHktdaUTZxYWRw= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -448,6 +453,8 @@ golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.10.0 h1:tvDr/iQoUqNdohiYm0LmmKcBk+q86lb9EprIUFhHHGg= +golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -548,7 +555,6 @@ google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqw gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= diff --git a/main.go b/main.go index cd4ada64..656ecb31 100644 --- a/main.go +++ b/main.go @@ -18,7 +18,7 @@ import ( e "github.com/launchdarkly/cr-flags/errors" lflags "github.com/launchdarkly/cr-flags/flags" gha "github.com/launchdarkly/cr-flags/internal/github_actions" - "github.com/launchdarkly/ld-find-code-refs/v2/aliases" + "github.com/launchdarkly/cr-flags/search" "github.com/launchdarkly/ld-find-code-refs/v2/options" "github.com/sourcegraph/go-diff/diff" "github.com/spf13/viper" @@ -45,7 +45,10 @@ func main() { os.Exit(0) } - aliases, err := getAliases(config, flagKeys) + opts, err := getOptions(config) + failExit(err) + + matcher, err := search.GetMatcher(config, opts, flagKeys) failExit(err) multiFiles, err := getDiffs(ctx, config, *event.PullRequest.Number) @@ -62,7 +65,7 @@ func main() { continue } for _, hunk := range parsedDiff.Hunks { - ldiff.ProcessDiffs(hunk, flagsRef, flags, aliases, config.MaxFlags) + ldiff.ProcessDiffs(matcher, hunk, flagsRef, config.MaxFlags) } } @@ -188,7 +191,7 @@ func getDiffs(ctx context.Context, config *lcr.Config, prNumber int) ([]*diff.Fi return diff.ParseMultiFileDiff([]byte(raw)) } -func getAliases(config *lcr.Config, flagKeys []string) (map[string][]string, error) { +func getOptions(config *lcr.Config) (options.Options, error) { // Needed for ld-find-code-refs to work as a library viper.Set("dir", config.Workspace) viper.Set("accessToken", config.ApiToken) @@ -197,13 +200,7 @@ func getAliases(config *lcr.Config, flagKeys []string) (map[string][]string, err if err != nil { log.Println(err) } - opts, err := options.GetOptions() - if err != nil { - log.Println(err) - } - - return aliases.GenerateAliases(flagKeys, opts.Aliases, config.Workspace) - + return options.GetOptions() } func setOutputs(flagsRef lflags.FlagsRef) { diff --git a/search/search.go b/search/search.go new file mode 100644 index 00000000..7ad5b35e --- /dev/null +++ b/search/search.go @@ -0,0 +1,73 @@ +package search + +import ( + "log" + "strings" + + "github.com/launchdarkly/ld-find-code-refs/v2/aliases" + "github.com/launchdarkly/ld-find-code-refs/v2/options" + lsearch "github.com/launchdarkly/ld-find-code-refs/v2/search" + + lcr "github.com/launchdarkly/cr-flags/config" + "github.com/spf13/viper" +) + +func GetMatcher(config *lcr.Config, opts options.Options, flagKeys []string) (matcher lsearch.Matcher, err error) { + elements := []lsearch.ElementMatcher{} + + aliasesByFlagKey, err := aliases.GenerateAliases(flagKeys, opts.Aliases, config.Workspace) + if err != nil { + return lsearch.Matcher{}, err + } + + delimiters := strings.Join(Dedupe(getDelimiters(opts)), "") + elements = append(elements, lsearch.NewElementMatcher(config.LdProject, "", delimiters, flagKeys, aliasesByFlagKey)) + matcher = lsearch.Matcher{ + Elements: elements, + } + + return matcher, nil +} + +func getAliases(config *lcr.Config, flagKeys []string) (map[string][]string, error) { + // Needed for ld-find-code-refs to work as a library + viper.Set("dir", config.Workspace) + viper.Set("accessToken", config.ApiToken) + + err := options.InitYAML() + if err != nil { + log.Println(err) + } + opts, err := options.GetOptions() + if err != nil { + log.Println(err) + } + + return aliases.GenerateAliases(flagKeys, opts.Aliases, config.Workspace) +} + +func getDelimiters(opts options.Options) []string { + delims := []string{`"`, `'`, "`"} + if opts.Delimiters.DisableDefaults { + delims = []string{} + } + + delims = append(delims, opts.Delimiters.Additional...) + + return delims +} + +func Dedupe(s []string) []string { + if len(s) <= 1 { + return s + } + keys := make(map[string]struct{}, len(s)) + ret := make([]string, 0, len(s)) + for _, entry := range s { + if _, value := keys[entry]; !value { + keys[entry] = struct{}{} + ret = append(ret, entry) + } + } + return ret +} diff --git a/testdata/test b/testdata/test index a243c20d..1c2d7a7d 100644 --- a/testdata/test +++ b/testdata/test @@ -1,8 +1,7 @@ - +show-widgets mobile-app-promo-ios - +showWidgets betaUi - +show_widgets beta_ui -saver-goal -mobile-app-promo-ios +show-widgets diff --git a/vendor/github.com/go-git/go-git/v5/LICENSE b/vendor/github.com/go-git/go-git/v5/LICENSE new file mode 100644 index 00000000..8aa3d854 --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 Sourced Technologies, S.L. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/error.go b/vendor/github.com/go-git/go-git/v5/plumbing/error.go new file mode 100644 index 00000000..a3ebed3f --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/error.go @@ -0,0 +1,35 @@ +package plumbing + +import "fmt" + +type PermanentError struct { + Err error +} + +func NewPermanentError(err error) *PermanentError { + if err == nil { + return nil + } + + return &PermanentError{Err: err} +} + +func (e *PermanentError) Error() string { + return fmt.Sprintf("permanent client error: %s", e.Err.Error()) +} + +type UnexpectedError struct { + Err error +} + +func NewUnexpectedError(err error) *UnexpectedError { + if err == nil { + return nil + } + + return &UnexpectedError{Err: err} +} + +func (e *UnexpectedError) Error() string { + return fmt.Sprintf("unexpected client error: %s", e.Err.Error()) +} diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/hash.go b/vendor/github.com/go-git/go-git/v5/plumbing/hash.go new file mode 100644 index 00000000..39bb73fb --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/hash.go @@ -0,0 +1,84 @@ +package plumbing + +import ( + "bytes" + "encoding/hex" + "sort" + "strconv" + + "github.com/go-git/go-git/v5/plumbing/hash" +) + +// Hash SHA1 hashed content +type Hash [hash.Size]byte + +// ZeroHash is Hash with value zero +var ZeroHash Hash + +// ComputeHash compute the hash for a given ObjectType and content +func ComputeHash(t ObjectType, content []byte) Hash { + h := NewHasher(t, int64(len(content))) + h.Write(content) + return h.Sum() +} + +// NewHash return a new Hash from a hexadecimal hash representation +func NewHash(s string) Hash { + b, _ := hex.DecodeString(s) + + var h Hash + copy(h[:], b) + + return h +} + +func (h Hash) IsZero() bool { + var empty Hash + return h == empty +} + +func (h Hash) String() string { + return hex.EncodeToString(h[:]) +} + +type Hasher struct { + hash.Hash +} + +func NewHasher(t ObjectType, size int64) Hasher { + h := Hasher{hash.New(hash.CryptoType)} + h.Write(t.Bytes()) + h.Write([]byte(" ")) + h.Write([]byte(strconv.FormatInt(size, 10))) + h.Write([]byte{0}) + return h +} + +func (h Hasher) Sum() (hash Hash) { + copy(hash[:], h.Hash.Sum(nil)) + return +} + +// HashesSort sorts a slice of Hashes in increasing order. +func HashesSort(a []Hash) { + sort.Sort(HashSlice(a)) +} + +// HashSlice attaches the methods of sort.Interface to []Hash, sorting in +// increasing order. +type HashSlice []Hash + +func (p HashSlice) Len() int { return len(p) } +func (p HashSlice) Less(i, j int) bool { return bytes.Compare(p[i][:], p[j][:]) < 0 } +func (p HashSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// IsHash returns true if the given string is a valid hash. +func IsHash(s string) bool { + switch len(s) { + case hash.HexSize: + _, err := hex.DecodeString(s) + return err == nil + default: + return false + } +} diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash.go b/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash.go new file mode 100644 index 00000000..82d18561 --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash.go @@ -0,0 +1,60 @@ +// package hash provides a way for managing the +// underlying hash implementations used across go-git. +package hash + +import ( + "crypto" + "fmt" + "hash" + + "github.com/pjbgf/sha1cd" +) + +// algos is a map of hash algorithms. +var algos = map[crypto.Hash]func() hash.Hash{} + +func init() { + reset() +} + +// reset resets the default algos value. Can be used after running tests +// that registers new algorithms to avoid side effects. +func reset() { + algos[crypto.SHA1] = sha1cd.New + algos[crypto.SHA256] = crypto.SHA256.New +} + +// RegisterHash allows for the hash algorithm used to be overriden. +// This ensures the hash selection for go-git must be explicit, when +// overriding the default value. +func RegisterHash(h crypto.Hash, f func() hash.Hash) error { + if f == nil { + return fmt.Errorf("cannot register hash: f is nil") + } + + switch h { + case crypto.SHA1: + algos[h] = f + case crypto.SHA256: + algos[h] = f + default: + return fmt.Errorf("unsupported hash function: %v", h) + } + return nil +} + +// Hash is the same as hash.Hash. This allows consumers +// to not having to import this package alongside "hash". +type Hash interface { + hash.Hash +} + +// New returns a new Hash for the given hash function. +// It panics if the hash function is not registered. +func New(h crypto.Hash) Hash { + hh, ok := algos[h] + if !ok { + panic(fmt.Sprintf("hash algorithm not registered: %v", h)) + } + return hh() +} diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha1.go b/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha1.go new file mode 100644 index 00000000..e3cb60fe --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha1.go @@ -0,0 +1,15 @@ +//go:build !sha256 +// +build !sha256 + +package hash + +import "crypto" + +const ( + // CryptoType defines what hash algorithm is being used. + CryptoType = crypto.SHA1 + // Size defines the amount of bytes the hash yields. + Size = 20 + // HexSize defines the strings size of the hash when represented in hexadecimal. + HexSize = 40 +) diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha256.go b/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha256.go new file mode 100644 index 00000000..1c52b897 --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/hash/hash_sha256.go @@ -0,0 +1,15 @@ +//go:build sha256 +// +build sha256 + +package hash + +import "crypto" + +const ( + // CryptoType defines what hash algorithm is being used. + CryptoType = crypto.SHA256 + // Size defines the amount of bytes the hash yields. + Size = 32 + // HexSize defines the strings size of the hash when represented in hexadecimal. + HexSize = 64 +) diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/memory.go b/vendor/github.com/go-git/go-git/v5/plumbing/memory.go new file mode 100644 index 00000000..6d11271d --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/memory.go @@ -0,0 +1,72 @@ +package plumbing + +import ( + "bytes" + "io" +) + +// MemoryObject on memory Object implementation +type MemoryObject struct { + t ObjectType + h Hash + cont []byte + sz int64 +} + +// Hash returns the object Hash, the hash is calculated on-the-fly the first +// time it's called, in all subsequent calls the same Hash is returned even +// if the type or the content have changed. The Hash is only generated if the +// size of the content is exactly the object size. +func (o *MemoryObject) Hash() Hash { + if o.h == ZeroHash && int64(len(o.cont)) == o.sz { + o.h = ComputeHash(o.t, o.cont) + } + + return o.h +} + +// Type returns the ObjectType +func (o *MemoryObject) Type() ObjectType { return o.t } + +// SetType sets the ObjectType +func (o *MemoryObject) SetType(t ObjectType) { o.t = t } + +// Size returns the size of the object +func (o *MemoryObject) Size() int64 { return o.sz } + +// SetSize set the object size, a content of the given size should be written +// afterwards +func (o *MemoryObject) SetSize(s int64) { o.sz = s } + +// Reader returns an io.ReadCloser used to read the object's content. +// +// For a MemoryObject, this reader is seekable. +func (o *MemoryObject) Reader() (io.ReadCloser, error) { + return nopCloser{bytes.NewReader(o.cont)}, nil +} + +// Writer returns a ObjectWriter used to write the object's content. +func (o *MemoryObject) Writer() (io.WriteCloser, error) { + return o, nil +} + +func (o *MemoryObject) Write(p []byte) (n int, err error) { + o.cont = append(o.cont, p...) + o.sz = int64(len(o.cont)) + + return len(p), nil +} + +// Close releases any resources consumed by the object when it is acting as a +// ObjectWriter. +func (o *MemoryObject) Close() error { return nil } + +// nopCloser exposes the extra methods of bytes.Reader while nopping Close(). +// +// This allows clients to attempt seeking in a cached Blob's Reader. +type nopCloser struct { + *bytes.Reader +} + +// Close does nothing. +func (nc nopCloser) Close() error { return nil } diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/object.go b/vendor/github.com/go-git/go-git/v5/plumbing/object.go new file mode 100644 index 00000000..2655dee4 --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/object.go @@ -0,0 +1,111 @@ +// package plumbing implement the core interfaces and structs used by go-git +package plumbing + +import ( + "errors" + "io" +) + +var ( + ErrObjectNotFound = errors.New("object not found") + // ErrInvalidType is returned when an invalid object type is provided. + ErrInvalidType = errors.New("invalid object type") +) + +// Object is a generic representation of any git object +type EncodedObject interface { + Hash() Hash + Type() ObjectType + SetType(ObjectType) + Size() int64 + SetSize(int64) + Reader() (io.ReadCloser, error) + Writer() (io.WriteCloser, error) +} + +// DeltaObject is an EncodedObject representing a delta. +type DeltaObject interface { + EncodedObject + // BaseHash returns the hash of the object used as base for this delta. + BaseHash() Hash + // ActualHash returns the hash of the object after applying the delta. + ActualHash() Hash + // Size returns the size of the object after applying the delta. + ActualSize() int64 +} + +// ObjectType internal object type +// Integer values from 0 to 7 map to those exposed by git. +// AnyObject is used to represent any from 0 to 7. +type ObjectType int8 + +const ( + InvalidObject ObjectType = 0 + CommitObject ObjectType = 1 + TreeObject ObjectType = 2 + BlobObject ObjectType = 3 + TagObject ObjectType = 4 + // 5 reserved for future expansion + OFSDeltaObject ObjectType = 6 + REFDeltaObject ObjectType = 7 + + AnyObject ObjectType = -127 +) + +func (t ObjectType) String() string { + switch t { + case CommitObject: + return "commit" + case TreeObject: + return "tree" + case BlobObject: + return "blob" + case TagObject: + return "tag" + case OFSDeltaObject: + return "ofs-delta" + case REFDeltaObject: + return "ref-delta" + case AnyObject: + return "any" + default: + return "unknown" + } +} + +func (t ObjectType) Bytes() []byte { + return []byte(t.String()) +} + +// Valid returns true if t is a valid ObjectType. +func (t ObjectType) Valid() bool { + return t >= CommitObject && t <= REFDeltaObject +} + +// IsDelta returns true for any ObjectTyoe that represents a delta (i.e. +// REFDeltaObject or OFSDeltaObject). +func (t ObjectType) IsDelta() bool { + return t == REFDeltaObject || t == OFSDeltaObject +} + +// ParseObjectType parses a string representation of ObjectType. It returns an +// error on parse failure. +func ParseObjectType(value string) (typ ObjectType, err error) { + switch value { + case "commit": + typ = CommitObject + case "tree": + typ = TreeObject + case "blob": + typ = BlobObject + case "tag": + typ = TagObject + case "ofs-delta": + typ = OFSDeltaObject + case "ref-delta": + typ = REFDeltaObject + default: + err = ErrInvalidType + } + return +} diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/reference.go b/vendor/github.com/go-git/go-git/v5/plumbing/reference.go new file mode 100644 index 00000000..aeb4227b --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/reference.go @@ -0,0 +1,225 @@ +package plumbing + +import ( + "errors" + "fmt" + "strings" +) + +const ( + refPrefix = "refs/" + refHeadPrefix = refPrefix + "heads/" + refTagPrefix = refPrefix + "tags/" + refRemotePrefix = refPrefix + "remotes/" + refNotePrefix = refPrefix + "notes/" + symrefPrefix = "ref: " +) + +// RefRevParseRules are a set of rules to parse references into short names. +// These are the same rules as used by git in shorten_unambiguous_ref. +// See: https://github.com/git/git/blob/e0aaa1b6532cfce93d87af9bc813fb2e7a7ce9d7/refs.c#L417 +var RefRevParseRules = []string{ + "refs/%s", + "refs/tags/%s", + "refs/heads/%s", + "refs/remotes/%s", + "refs/remotes/%s/HEAD", +} + +var ( + ErrReferenceNotFound = errors.New("reference not found") +) + +// ReferenceType reference type's +type ReferenceType int8 + +const ( + InvalidReference ReferenceType = 0 + HashReference ReferenceType = 1 + SymbolicReference ReferenceType = 2 +) + +func (r ReferenceType) String() string { + switch r { + case InvalidReference: + return "invalid-reference" + case HashReference: + return "hash-reference" + case SymbolicReference: + return "symbolic-reference" + } + + return "" +} + +// ReferenceName reference name's +type ReferenceName string + +// NewBranchReferenceName returns a reference name describing a branch based on +// his short name. +func NewBranchReferenceName(name string) ReferenceName { + return ReferenceName(refHeadPrefix + name) +} + +// NewNoteReferenceName returns a reference name describing a note based on his +// short name. +func NewNoteReferenceName(name string) ReferenceName { + return ReferenceName(refNotePrefix + name) +} + +// NewRemoteReferenceName returns a reference name describing a remote branch +// based on his short name and the remote name. +func NewRemoteReferenceName(remote, name string) ReferenceName { + return ReferenceName(refRemotePrefix + fmt.Sprintf("%s/%s", remote, name)) +} + +// NewRemoteHEADReferenceName returns a reference name describing a the HEAD +// branch of a remote. +func NewRemoteHEADReferenceName(remote string) ReferenceName { + return ReferenceName(refRemotePrefix + fmt.Sprintf("%s/%s", remote, HEAD)) +} + +// NewTagReferenceName returns a reference name describing a tag based on short +// his name. +func NewTagReferenceName(name string) ReferenceName { + return ReferenceName(refTagPrefix + name) +} + +// IsBranch check if a reference is a branch +func (r ReferenceName) IsBranch() bool { + return strings.HasPrefix(string(r), refHeadPrefix) +} + +// IsNote check if a reference is a note +func (r ReferenceName) IsNote() bool { + return strings.HasPrefix(string(r), refNotePrefix) +} + +// IsRemote check if a reference is a remote +func (r ReferenceName) IsRemote() bool { + return strings.HasPrefix(string(r), refRemotePrefix) +} + +// IsTag check if a reference is a tag +func (r ReferenceName) IsTag() bool { + return strings.HasPrefix(string(r), refTagPrefix) +} + +func (r ReferenceName) String() string { + return string(r) +} + +// Short returns the short name of a ReferenceName +func (r ReferenceName) Short() string { + s := string(r) + res := s + for _, format := range RefRevParseRules { + _, err := fmt.Sscanf(s, format, &res) + if err == nil { + continue + } + } + + return res +} + +const ( + HEAD ReferenceName = "HEAD" + Master ReferenceName = "refs/heads/master" + Main ReferenceName = "refs/heads/main" +) + +// Reference is a representation of git reference +type Reference struct { + t ReferenceType + n ReferenceName + h Hash + target ReferenceName +} + +// NewReferenceFromStrings creates a reference from name and target as string, +// the resulting reference can be a SymbolicReference or a HashReference base +// on the target provided +func NewReferenceFromStrings(name, target string) *Reference { + n := ReferenceName(name) + + if strings.HasPrefix(target, symrefPrefix) { + target := ReferenceName(target[len(symrefPrefix):]) + return NewSymbolicReference(n, target) + } + + return NewHashReference(n, NewHash(target)) +} + +// NewSymbolicReference creates a new SymbolicReference reference +func NewSymbolicReference(n, target ReferenceName) *Reference { + return &Reference{ + t: SymbolicReference, + n: n, + target: target, + } +} + +// NewHashReference creates a new HashReference reference +func NewHashReference(n ReferenceName, h Hash) *Reference { + return &Reference{ + t: HashReference, + n: n, + h: h, + } +} + +// Type returns the type of a reference +func (r *Reference) Type() ReferenceType { + return r.t +} + +// Name returns the name of a reference +func (r *Reference) Name() ReferenceName { + return r.n +} + +// Hash returns the hash of a hash reference +func (r *Reference) Hash() Hash { + return r.h +} + +// Target returns the target of a symbolic reference +func (r *Reference) Target() ReferenceName { + return r.target +} + +// Strings dump a reference as a [2]string +func (r *Reference) Strings() [2]string { + var o [2]string + o[0] = r.Name().String() + + switch r.Type() { + case HashReference: + o[1] = r.Hash().String() + case SymbolicReference: + o[1] = symrefPrefix + r.Target().String() + } + + return o +} + +func (r *Reference) String() string { + ref := "" + switch r.Type() { + case HashReference: + ref = r.Hash().String() + case SymbolicReference: + ref = symrefPrefix + r.Target().String() + default: + return "" + } + + name := r.Name().String() + var v strings.Builder + v.Grow(len(ref) + len(name) + 1) + v.WriteString(ref) + v.WriteString(" ") + v.WriteString(name) + return v.String() +} diff --git a/vendor/github.com/go-git/go-git/v5/plumbing/revision.go b/vendor/github.com/go-git/go-git/v5/plumbing/revision.go new file mode 100644 index 00000000..5f053b20 --- /dev/null +++ b/vendor/github.com/go-git/go-git/v5/plumbing/revision.go @@ -0,0 +1,11 @@ +package plumbing + +// Revision represents a git revision +// to get more details about git revisions +// please check git manual page : +// https://www.kernel.org/pub/software/scm/git/docs/gitrevisions.html +type Revision string + +func (r Revision) String() string { + return string(r) +} diff --git a/vendor/github.com/launchdarkly/ld-find-code-refs/v2/flags/flags.go b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/flags/flags.go new file mode 100644 index 00000000..52e44263 --- /dev/null +++ b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/flags/flags.go @@ -0,0 +1,71 @@ +package flags + +import ( + "fmt" + "os" + + "github.com/launchdarkly/ld-find-code-refs/v2/internal/helpers" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/ld" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/log" + "github.com/launchdarkly/ld-find-code-refs/v2/options" +) + +const ( + minFlagKeyLen = 3 // Minimum flag key length helps reduce the number of false positives +) + +func GetFlagKeys(opts options.Options, repoParams ld.RepoParams) map[string][]string { + isDryRun := opts.DryRun + ldApi := ld.InitApiClient(ld.ApiOptions{ApiKey: opts.AccessToken, BaseUri: opts.BaseUri, UserAgent: helpers.GetUserAgent(opts.UserAgent)}) + ignoreServiceErrors := opts.IgnoreServiceErrors + + if !isDryRun { + err := ldApi.MaybeUpsertCodeReferenceRepository(repoParams) + if err != nil { + helpers.FatalServiceError(err, ignoreServiceErrors) + } + } + + flagKeys := make(map[string][]string) + for _, proj := range opts.Projects { + flags, err := getFlags(ldApi, proj.Key) + if err != nil { + helpers.FatalServiceError(fmt.Errorf("could not retrieve flag keys from LaunchDarkly for project `%s`: %w", proj.Key, err), ignoreServiceErrors) + } + + filteredFlags, omittedFlags := filterShortFlagKeys(flags) + if len(filteredFlags) == 0 { + log.Info.Printf("no flag keys longer than the minimum flag key length (%v) were found for project: %s, exiting early", + minFlagKeyLen, proj.Key) + os.Exit(0) + } else if len(omittedFlags) > 0 { + log.Warning.Printf("omitting %d flags with keys less than minimum (%d) for project: %s", len(omittedFlags), minFlagKeyLen, proj.Key) + } + flagKeys[proj.Key] = filteredFlags + } + + return flagKeys +} + +// Very short flag keys lead to many false positives when searching in code, +// so we filter them out. +func filterShortFlagKeys(flags []string) (filtered []string, omitted []string) { + filteredFlags := []string{} + omittedFlags := []string{} + for _, flag := range flags { + if len(flag) >= minFlagKeyLen { + filteredFlags = append(filteredFlags, flag) + } else { + omittedFlags = append(omittedFlags, flag) + } + } + return filteredFlags, omittedFlags +} + +func getFlags(ldApi ld.ApiClient, projKey string) ([]string, error) { + flags, err := ldApi.GetFlagKeyList(projKey) + if err != nil { + return nil, err + } + return flags, nil +} diff --git a/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/files.go b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/files.go new file mode 100644 index 00000000..994de626 --- /dev/null +++ b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/files.go @@ -0,0 +1,107 @@ +package search + +import ( + "bufio" + "context" + "errors" + "os" + "path/filepath" + "strings" + + "github.com/monochromegane/go-gitignore" + "golang.org/x/tools/godoc/util" + + "github.com/launchdarkly/ld-find-code-refs/v2/internal/validation" +) + +type ignore struct { + path string + ignores []gitignore.IgnoreMatcher +} + +func newIgnore(path string, ignoreFiles []string) ignore { + ignores := make([]gitignore.IgnoreMatcher, 0, len(ignoreFiles)) + for _, ignoreFile := range ignoreFiles { + i, err := gitignore.NewGitIgnore(filepath.Join(path, ignoreFile)) + if err != nil { + continue + } + ignores = append(ignores, i) + } + return ignore{path: path, ignores: ignores} +} + +func (m ignore) Match(path string, isDir bool) bool { + for _, i := range m.ignores { + if i.Match(path, isDir) { + return true + } + } + + return false +} + +func readFileLines(path string) ([]string, error) { + if !validation.FileExists(path) { + return nil, errors.New("file does not exist") + } + + /* #nosec */ + file, err := os.Open(path) + if err != nil { + return nil, err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + scanner.Split(bufio.ScanLines) + var lines []string + + for scanner.Scan() { + lines = append(lines, scanner.Text()) + } + + return lines, nil +} + +func readFiles(ctx context.Context, files chan<- file, workspace string) error { + defer close(files) + ignoreFiles := []string{".gitignore", ".ignore", ".ldignore"} + allIgnores := newIgnore(workspace, ignoreFiles) + workspace = filepath.ToSlash(workspace) + + readFile := func(path string, info os.FileInfo, err error) error { + if err != nil || ctx.Err() != nil { + // global context cancelled, don't read any more files + return nil + } + + isDir := info.IsDir() + path = filepath.ToSlash(path) + + // Skip directories, hidden files, and ignored files + if strings.HasPrefix(info.Name(), ".") || allIgnores.Match(path, isDir) { + if isDir { + return filepath.SkipDir + } + return nil + } else if !info.Mode().IsRegular() { + return nil + } + + lines, err := readFileLines(path) + if err != nil { + return err + } + + // only read text files + if !util.IsText([]byte(strings.Join(lines, "\n"))) { + return nil + } + + files <- file{path: strings.TrimPrefix(path, workspace+"/"), lines: lines} + return nil + } + + return filepath.Walk(workspace, readFile) +} diff --git a/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/matcher.go b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/matcher.go new file mode 100644 index 00000000..6478f0ca --- /dev/null +++ b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/matcher.go @@ -0,0 +1,198 @@ +package search + +import ( + "strings" + + ahocorasick "github.com/petar-dambovaliev/aho-corasick" + + "github.com/launchdarkly/ld-find-code-refs/v2/aliases" + "github.com/launchdarkly/ld-find-code-refs/v2/flags" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/helpers" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/ld" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/log" + "github.com/launchdarkly/ld-find-code-refs/v2/options" +) + +type ElementMatcher struct { + ProjKey string + Elements []string + Dir string + allElementAndAliasesMatcher ahocorasick.AhoCorasick + matcherByElement map[string]ahocorasick.AhoCorasick + aliasMatcherByElement map[string]ahocorasick.AhoCorasick + + elementsByPatternIndex [][]string +} + +type Matcher struct { + Elements []ElementMatcher + ctxLines int +} + +// Scan checks the configured directory for flags base on the options configured for Code References. +func Scan(opts options.Options, repoParams ld.RepoParams, dir string) (Matcher, []ld.ReferenceHunksRep) { + flagKeys := flags.GetFlagKeys(opts, repoParams) + elements := []ElementMatcher{} + + for _, project := range opts.Projects { + projectFlags := flagKeys[project.Key] + projectAliases := opts.Aliases + projectAliases = append(projectAliases, project.Aliases...) + aliasesByFlagKey, err := aliases.GenerateAliases(projectFlags, projectAliases, dir) + if err != nil { + log.Error.Fatalf("failed to generate aliases: %s for project: %s", err, project.Key) + } + + delimiters := strings.Join(helpers.Dedupe(getDelimiters(opts)), "") + elements = append(elements, NewElementMatcher(project.Key, project.Dir, delimiters, projectFlags, aliasesByFlagKey)) + } + matcher := Matcher{ + ctxLines: opts.ContextLines, + Elements: elements, + } + + refs, err := SearchForRefs(dir, matcher) + if err != nil { + log.Error.Fatalf("error searching for flag key references: %s", err) + } + + return matcher, refs +} + +func NewElementMatcher(projKey, dir, delimiters string, elements []string, aliasesByElement map[string][]string) ElementMatcher { + matcherBuilder := ahocorasick.NewAhoCorasickBuilder(ahocorasick.Opts{DFA: true, MatchKind: ahocorasick.StandardMatch}) + + allFlagPatternsAndAliases := make([]string, 0) + elementsByPatternIndex := make([][]string, 0) + patternIndex := make(map[string]int) + + recordPatternsForElement := func(element string, patterns []string) { + for _, p := range patterns { + index, exists := patternIndex[p] + if !exists { + allFlagPatternsAndAliases = append(allFlagPatternsAndAliases, p) + index = len(elementsByPatternIndex) + elementsByPatternIndex = append(elementsByPatternIndex, []string{}) + } + patternIndex[p] = index + elementsByPatternIndex[index] = append(elementsByPatternIndex[index], element) + } + } + + patternsByElement := buildElementPatterns(elements, delimiters) + flagMatcherByKey := make(map[string]ahocorasick.AhoCorasick, len(patternsByElement)) + for element, patterns := range patternsByElement { + flagMatcherByKey[element] = matcherBuilder.Build(patterns) + recordPatternsForElement(element, patterns) + } + + aliasMatcherByElement := make(map[string]ahocorasick.AhoCorasick, len(aliasesByElement)) + for element, elementAliases := range aliasesByElement { + aliasMatcherByElement[element] = matcherBuilder.Build(elementAliases) + recordPatternsForElement(element, elementAliases) + } + + return ElementMatcher{ + Elements: elements, + ProjKey: projKey, + Dir: dir, + matcherByElement: flagMatcherByKey, + aliasMatcherByElement: aliasMatcherByElement, + allElementAndAliasesMatcher: matcherBuilder.Build(allFlagPatternsAndAliases), + + elementsByPatternIndex: elementsByPatternIndex, + } +} + +func getDelimiters(opts options.Options) []string { + delims := []string{`"`, `'`, "`"} + if opts.Delimiters.DisableDefaults { + delims = []string{} + } + + delims = append(delims, opts.Delimiters.Additional...) + + return delims +} + +func (m Matcher) MatchElement(line, element string) bool { + for _, em := range m.Elements { + if e, exists := em.matcherByElement[element]; exists { + if e.Iter(line).Next() != nil { + return true + } + } + } + + return false +} + +func (m Matcher) GetProjectElementMatcher(projectKey string) *ElementMatcher { + var elementMatcher ElementMatcher + for _, element := range m.Elements { + if element.ProjKey == projectKey { + elementMatcher = element + break + } + } + return &elementMatcher +} + +func (m Matcher) FindAliases(line, element string) []string { + matches := make([]string, 0) + for _, em := range m.Elements { + matches = append(matches, em.FindAliases(line, element)...) + } + return helpers.Dedupe(matches) +} + +func (m ElementMatcher) FindMatches(line string) []string { + elements := make([]string, 0) + iter := m.allElementAndAliasesMatcher.IterOverlapping(line) + for match := iter.Next(); match != nil; match = iter.Next() { + elements = append(elements, m.elementsByPatternIndex[match.Pattern()]...) + } + return helpers.Dedupe(elements) +} + +func (m ElementMatcher) FindAliases(line, element string) []string { + aliasMatches := make([]string, 0) + if aliasMatcher, exists := m.aliasMatcherByElement[element]; exists { + iter := aliasMatcher.IterOverlapping(line) + for match := iter.Next(); match != nil; match = iter.Next() { + aliasMatches = append(aliasMatches, line[match.Start():match.End()]) + } + } + return aliasMatches +} + +func buildElementPatterns(flags []string, delimiters string) map[string][]string { + patternsByFlag := make(map[string][]string, len(flags)) + for _, flag := range flags { + var patterns []string + if delimiters != "" { + patterns = make([]string, 0, len(delimiters)*len(delimiters)) + for _, left := range delimiters { + for _, right := range delimiters { + var sb strings.Builder + sb.Grow(len(flag) + 2) + sb.WriteRune(left) + sb.WriteString(flag) + sb.WriteRune(right) + patterns = append(patterns, sb.String()) + } + } + } else { + patterns = []string{flag} + } + patternsByFlag[flag] = patterns + } + return patternsByFlag +} + +func (m Matcher) GetElements() (elements [][]string) { + for _, element := range m.Elements { + elements = append(elements, element.Elements) + } + return elements +} diff --git a/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/search.go b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/search.go new file mode 100644 index 00000000..c63ec279 --- /dev/null +++ b/vendor/github.com/launchdarkly/ld-find-code-refs/v2/search/search.go @@ -0,0 +1,233 @@ +package search + +import ( + "context" + "sort" + "strings" + "sync" + "unicode/utf8" + + "github.com/go-git/go-git/v5/plumbing" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/helpers" + "github.com/launchdarkly/ld-find-code-refs/v2/internal/ld" +) + +const ( + // These are defensive limits intended to prevent corner cases stemming from + // large repos, false positives, etc. The goal is a) to prevent the program + // from taking a very long time to run and b) to prevent the program from + // PUTing a massive json payload. These limits will likely be tweaked over + // time. The LaunchDarkly backend will also apply limits. + maxFileCount = 10000 // Maximum number of files containing code references + maxHunkCount = 25000 // Maximum number of total code references + maxLineCharCount = 500 // Maximum number of characters per line +) + +// Truncate lines to prevent sending over massive hunks, e.g. a minified file. +// NOTE: We may end up truncating a valid flag key reference. We accept this risk +// and will handle hunks missing flag key references on the frontend. +func truncateLine(line string, maxCharCount int) string { + if utf8.RuneCountInString(line) <= maxCharCount { + return line + } + // convert to rune slice so that we don't truncate multibyte unicode characters + runes := []rune(line) + return string(runes[0:maxCharCount]) + "…" +} + +type file struct { + path string + lines []string +} + +// hunkForLine returns a matching code reference for a given flag key on a line +func (f file) hunkForLine(projKey, flagKey string, lineNum int, matcher Matcher) *ld.HunkRep { + line := f.lines[lineNum] + ctxLines := matcher.ctxLines + + aliasMatches := matcher.FindAliases(line, flagKey) + if len(aliasMatches) == 0 && !matcher.MatchElement(line, flagKey) { + return nil + } + + startingLineNum := lineNum + var hunkLines []string + if ctxLines >= 0 { + startingLineNum -= ctxLines + if startingLineNum < 0 { + startingLineNum = 0 + } + endingLineNum := lineNum + ctxLines + 1 + if endingLineNum >= len(f.lines) { + hunkLines = f.lines[startingLineNum:] + } else { + hunkLines = f.lines[startingLineNum:endingLineNum] + } + } + + for i, line := range hunkLines { + hunkLines[i] = truncateLine(line, maxLineCharCount) + } + + lines := strings.Join(hunkLines, "\n") + contentHash := getContentHash(lines) + + ret := ld.HunkRep{ + ProjKey: projKey, + FlagKey: flagKey, + StartingLineNumber: startingLineNum + 1, + Lines: lines, + Aliases: aliasMatches, + ContentHash: contentHash, + } + return &ret +} + +// aggregateHunksForFlag finds all references in a file, and combines matches if their context lines overlap +func (f file) aggregateHunksForFlag(projKey, flagKey string, matcher Matcher, lineNumbers []int) []ld.HunkRep { + var hunksForFlag []ld.HunkRep + for _, lineNumber := range lineNumbers { + match := f.hunkForLine(projKey, flagKey, lineNumber, matcher) + if match != nil { + lastHunkIdx := len(hunksForFlag) - 1 + // If the previous hunk overlaps or is adjacent to the current hunk, merge them together + if lastHunkIdx >= 0 && hunksForFlag[lastHunkIdx].Overlap(*match) >= 0 { + hunksForFlag = append(hunksForFlag[:lastHunkIdx], mergeHunks(hunksForFlag[lastHunkIdx], *match)...) + } else { + hunksForFlag = append(hunksForFlag, *match) + } + } + } + return hunksForFlag +} + +func (f file) toHunks(matcher Matcher) *ld.ReferenceHunksRep { + hunks := make([]ld.HunkRep, 0) + filteredMatchers := make([]ElementMatcher, 0) + for _, elementSearch := range matcher.Elements { + if elementSearch.Dir != "" { + matchDir := strings.HasPrefix(f.path, elementSearch.Dir) + if !matchDir { + continue + } + } + filteredMatchers = append(filteredMatchers, elementSearch) + } + for _, elementSearch := range filteredMatchers { + lineNumbersByElement := f.findMatchingLineNumbersByElement(elementSearch) + for element, lineNumbers := range lineNumbersByElement { + hunks = append(hunks, f.aggregateHunksForFlag(elementSearch.ProjKey, element, matcher, lineNumbers)...) + } + } + if len(hunks) == 0 { + return nil + } + return &ld.ReferenceHunksRep{Path: f.path, Hunks: hunks} +} + +func (f file) findMatchingLineNumbersByElement(matcher ElementMatcher) map[string][]int { + lineNumbersByElement := make(map[string][]int) + for lineNum, line := range f.lines { + for _, element := range matcher.FindMatches(line) { + lineNumbersByElement[element] = append(lineNumbersByElement[element], lineNum) + } + } + return lineNumbersByElement +} + +// mergeHunks combines the lines and aliases of two hunks together for a given file +// if the hunks do not overlap, returns each hunk separately +// assumes the startingLineNumber of a is less than b and there is some overlap between the two +func mergeHunks(a, b ld.HunkRep) []ld.HunkRep { + if a.StartingLineNumber > b.StartingLineNumber { + a, b = b, a + } + + aLines := strings.Split(a.Lines, "\n") + bLines := strings.Split(b.Lines, "\n") + + overlap := a.Overlap(b) + // no overlap + if overlap < 0 || len(a.Lines) == 0 && len(b.Lines) == 0 { + return []ld.HunkRep{a, b} + } else if overlap >= len(bLines) { + // subset hunk + return []ld.HunkRep{a} + } + + combinedLines := append(aLines, bLines[overlap:]...) + lines := strings.Join(combinedLines, "\n") + contentHash := getContentHash(lines) + + return []ld.HunkRep{ + { + StartingLineNumber: a.StartingLineNumber, + Lines: lines, + ProjKey: a.ProjKey, + FlagKey: a.FlagKey, + Aliases: helpers.Dedupe(append(a.Aliases, b.Aliases...)), + ContentHash: contentHash, + }, + } +} + +// processFiles starts goroutines to process files individually. When all files have completed processing, the references channel is closed to signal completion. +func processFiles(ctx context.Context, files <-chan file, references chan<- ld.ReferenceHunksRep, matcher Matcher) { + defer close(references) + w := sync.WaitGroup{} + for f := range files { + if ctx.Err() != nil { + // context cancelled, stop processing files, but let the waitgroup finish organically + continue + } + w.Add(1) + go func(f file) { + reference := f.toHunks(matcher) + if reference != nil { + references <- *reference + } + w.Done() + }(f) + } + w.Wait() +} + +func SearchForRefs(directory string, matcher Matcher) ([]ld.ReferenceHunksRep, error) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + files := make(chan file) + references := make(chan ld.ReferenceHunksRep) + // Start workers to process files asynchronously as they are written to the files channel + go processFiles(ctx, files, references, matcher) + + err := readFiles(ctx, files, directory) + if err != nil { + return nil, err + } + + ret := make([]ld.ReferenceHunksRep, 0, len(references)) + + defer sort.SliceStable(ret, func(i, j int) bool { + return ret[i].Path < ret[j].Path + }) + + totalHunks := 0 + for reference := range references { + ret = append(ret, reference) + + // Reached maximum number of files with code references + if len(ret) >= maxFileCount { + return ret, nil + } + totalHunks += len(reference.Hunks) + // Reached maximum number of hunks across all files + if totalHunks > maxHunkCount { + return ret, nil + } + } + return ret, nil +} + +func getContentHash(lines string) string { + return plumbing.ComputeHash(plumbing.BlobObject, []byte(lines)).String() +} diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/.gitignore b/vendor/github.com/petar-dambovaliev/aho-corasick/.gitignore new file mode 100644 index 00000000..204755af --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/.gitignore @@ -0,0 +1,3 @@ +.idea +.DS_Store +.vscode \ No newline at end of file diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/LICENSE b/vendor/github.com/petar-dambovaliev/aho-corasick/LICENSE new file mode 100644 index 00000000..168fd618 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Petar Dambovaliev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/README.md b/vendor/github.com/petar-dambovaliev/aho-corasick/README.md new file mode 100644 index 00000000..5ef7dd32 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/README.md @@ -0,0 +1,85 @@ +# aho-corasick +Efficient string matching in Golang via the aho-corasick algorithm. + +x20 faster than https://github.com/cloudflare/ahocorasick and x3 faster than https://github.com/anknown/ahocorasick + +Memory consuption is a eigth of https://github.com/cloudflare/ahocorasick and half of https://github.com/anknown/ahocorasick + +This library is heavily inspired by https://github.com/BurntSushi/aho-corasick + +## Usage + +```bash +go get -u github.com/petar-dambovaliev/aho-corasick +``` + +```go +import ( + ahocorasick "github.com/petar-dambovaliev/aho-corasick" +) +builder := ahocorasick.NewAhoCorasickBuilder(Opts{ + AsciiCaseInsensitive: true, + MatchOnlyWholeWords: true, + MatchKind: LeftMostLongestMatch, + DFA: true, +}) + +ac := builder.Build([]string{"bear", "masha"}) +haystack := "The Bear and Masha" +matches := ac.FindAll(haystack) + +for _, match := range matches { + println(haystack[match.Start():match.End()]) +} +``` + +Matching can be done via `NFA` or `DFA`. +`NFA` has runtime complexity O(N + M) in relation to the haystack and number of matches. +`DFA` has runtime complexity O(N), but it uses more memory. + +Replacing of matches in the haystack. + +`replaceWith` needs to be the same length as the `patterns` +```go +r := ahocorasick.NewReplacer(ac) +replaced := r.ReplaceAll(haystack, replaceWith) +``` + +`ReplaceAllFunc` is useful, for example, if you want to use the original text cassing but you are matching +case insensitively. You can replace partially by return false and from that point, the original string will be preserved. +```go +replaced := r.ReplaceAllFunc(haystack, func(match Match) (string, bool) { + return `` + haystack[match.Start():match.End()] + `<\a>`, true +}) +``` + +Search for matches one at a time via the iterator + +```go +iter := ac.Iter(haystack) + +for next := iter.Next(); next != nil; next = iter.Next() { + ... +} +``` + +It's plenty fast but if you want to use it in parallel, that is also possible. + +Memory consumption won't increase because the read-only automaton is not actually copied, only the counters are. + +The magic line is `ac := ac` + +```go +var w sync.WaitGroup + +w.Add(50) +for i := 0; i < 50; i++ { + go func() { + ac := ac + matches := ac.FindAll(haystack) + println(len(matches)) + w.Done() + }() +} +w.Wait() +``` diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/ahocorasick.go b/vendor/github.com/petar-dambovaliev/aho-corasick/ahocorasick.go new file mode 100644 index 00000000..445a7f96 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/ahocorasick.go @@ -0,0 +1,384 @@ +package aho_corasick + +import ( + "strings" + "sync" + "unicode" +) + +type findIter struct { + fsm imp + prestate *prefilterState + haystack []byte + pos int + matchOnlyWholeWords bool +} + +// Iter is an iterator over matches found on the current haystack +// it gives the user more granular control. You can chose how many and what kind of matches you need. +type Iter interface { + Next() *Match +} + +// Next gives a pointer to the next match yielded by the iterator or nil, if there is none +func (f *findIter) Next() *Match { + if f.pos > len(f.haystack) { + return nil + } + + result := f.fsm.FindAtNoState(f.prestate, f.haystack, f.pos) + + if result == nil { + return nil + } + + if result.end == f.pos { + f.pos += 1 + } else { + f.pos = result.end + } + + if f.matchOnlyWholeWords { + if result.Start()-1 >= 0 && (unicode.IsLetter(rune(f.haystack[result.Start()-1])) || unicode.IsDigit(rune(f.haystack[result.Start()-1]))) { + return f.Next() + } + if result.end < len(f.haystack) && (unicode.IsLetter(rune(f.haystack[result.end])) || unicode.IsDigit(rune(f.haystack[result.end]))) { + return f.Next() + } + } + + return result +} + +type overlappingIter struct { + fsm imp + prestate *prefilterState + haystack []byte + pos int + stateID stateID + matchIndex int + matchOnlyWholeWords bool +} + +func (f *overlappingIter) Next() *Match { + if f.pos > len(f.haystack) { + return nil + } + + result := f.fsm.OverlappingFindAt(f.prestate, f.haystack, f.pos, &f.stateID, &f.matchIndex) + + if result == nil { + return nil + } + + f.pos = result.End() + + if f.matchOnlyWholeWords { + if result.Start()-1 >= 0 && (unicode.IsLetter(rune(f.haystack[result.Start()-1])) || unicode.IsDigit(rune(f.haystack[result.Start()-1]))) { + return f.Next() + } + if result.end < len(f.haystack) && (unicode.IsLetter(rune(f.haystack[result.end])) || unicode.IsDigit(rune(f.haystack[result.end]))) { + return f.Next() + } + } + + return result +} + +func newOverlappingIter(ac AhoCorasick, haystack []byte) overlappingIter { + prestate := prefilterState{ + skips: 0, + skipped: 0, + maxMatchLen: ac.i.MaxPatternLen(), + inert: false, + lastScanAt: 0, + } + return overlappingIter{ + fsm: ac.i, + prestate: &prestate, + haystack: haystack, + pos: 0, + stateID: ac.i.StartState(), + matchIndex: 0, + matchOnlyWholeWords: ac.matchOnlyWholeWords, + } +} + +// make sure the AhoCorasick data structure implements the Finder interface +var _ Finder = (*AhoCorasick)(nil) + +// AhoCorasick is the main data structure that does most of the work +type AhoCorasick struct { + i imp + matchKind matchKind + matchOnlyWholeWords bool +} + +func (ac AhoCorasick) PatternCount() int { + return ac.i.PatternCount() +} + +// Iter gives an iterator over the built patterns +func (ac AhoCorasick) Iter(haystack string) Iter { + return ac.IterByte([]byte(haystack)) +} + +// IterByte gives an iterator over the built patterns +func (ac AhoCorasick) IterByte(haystack []byte) Iter { + prestate := &prefilterState{ + skips: 0, + skipped: 0, + maxMatchLen: ac.i.MaxPatternLen(), + inert: false, + lastScanAt: 0, + } + + return &findIter{ + fsm: ac.i, + prestate: prestate, + haystack: haystack, + pos: 0, + matchOnlyWholeWords: ac.matchOnlyWholeWords, + } +} + +// Iter gives an iterator over the built patterns with overlapping matches +func (ac AhoCorasick) IterOverlapping(haystack string) Iter { + return ac.IterOverlappingByte([]byte(haystack)) +} + +// IterOverlappingByte gives an iterator over the built patterns with overlapping matches +func (ac AhoCorasick) IterOverlappingByte(haystack []byte) Iter { + if ac.matchKind != StandardMatch { + panic("only StandardMatch allowed for overlapping matches") + } + i := newOverlappingIter(ac, haystack) + return &i +} + +var pool = sync.Pool{ + New: func() interface{} { + return strings.Builder{} + }, +} + +type Replacer struct { + finder Finder +} + +func NewReplacer(finder Finder) Replacer { + return Replacer{finder: finder} +} + +// ReplaceAllFunc replaces the matches found in the haystack according to the user provided function +// it gives fine grained control over what is replaced. +// A user can chose to stop the replacing process early by returning false in the lambda +// In that case, everything from that point will be kept as the original haystack +func (r Replacer) ReplaceAllFunc(haystack string, f func(match Match) (string, bool)) string { + matches := r.finder.FindAll(haystack) + + if len(matches) == 0 { + return haystack + } + + replaceWith := make([]string, 0) + + for _, match := range matches { + rw, ok := f(match) + if !ok { + break + } + replaceWith = append(replaceWith, rw) + } + + str := pool.Get().(strings.Builder) + + defer func() { + str.Reset() + pool.Put(str) + }() + + start := 0 + + for i, match := range matches { + if i >= len(replaceWith) { + str.WriteString(haystack[start:]) + return str.String() + } + str.WriteString(haystack[start:match.Start()]) + str.WriteString(replaceWith[i]) + start = match.Start() + match.len + } + + if start-1 < len(haystack) { + str.WriteString(haystack[start:]) + } + + return str.String() +} + +// ReplaceAll replaces the matches found in the haystack according to the user provided slice `replaceWith` +// It panics, if `replaceWith` has length different from the patterns that it was built with +func (r Replacer) ReplaceAll(haystack string, replaceWith []string) string { + if len(replaceWith) != r.finder.PatternCount() { + panic("replaceWith needs to have the same length as the pattern count") + } + + return r.ReplaceAllFunc(haystack, func(match Match) (string, bool) { + return replaceWith[match.pattern], true + }) +} + +type Finder interface { + FindAll(haystack string) []Match + PatternCount() int +} + +// FindAll returns the matches found in the haystack +func (ac AhoCorasick) FindAll(haystack string) []Match { + iter := ac.Iter(haystack) + matches := make([]Match, 0) + + for { + next := iter.Next() + if next == nil { + break + } + + matches = append(matches, *next) + } + + return matches +} + +// AhoCorasickBuilder defines a set of options applied before the patterns are built +type AhoCorasickBuilder struct { + dfaBuilder *iDFABuilder + nfaBuilder *iNFABuilder + dfa bool + matchOnlyWholeWords bool +} + +// Opts defines a set of options applied before the patterns are built +type Opts struct { + AsciiCaseInsensitive bool + MatchOnlyWholeWords bool + MatchKind matchKind + DFA bool +} + +// NewAhoCorasickBuilder creates a new AhoCorasickBuilder based on Opts +func NewAhoCorasickBuilder(o Opts) AhoCorasickBuilder { + return AhoCorasickBuilder{ + dfaBuilder: newDFABuilder(), + nfaBuilder: newNFABuilder(o.MatchKind, o.AsciiCaseInsensitive), + dfa: o.DFA, + matchOnlyWholeWords: o.MatchOnlyWholeWords, + } +} + +// Build builds a (non)deterministic finite automata from the user provided patterns +func (a *AhoCorasickBuilder) Build(patterns []string) AhoCorasick { + bytePatterns := make([][]byte, len(patterns)) + for pati, pat := range patterns { + bytePatterns[pati] = []byte(pat) + } + + return a.BuildByte(bytePatterns) +} + +// BuildByte builds a (non)deterministic finite automata from the user provided patterns +func (a *AhoCorasickBuilder) BuildByte(patterns [][]byte) AhoCorasick { + nfa := a.nfaBuilder.build(patterns) + match_kind := nfa.matchKind + + if a.dfa { + dfa := a.dfaBuilder.build(nfa) + return AhoCorasick{dfa, match_kind, a.matchOnlyWholeWords} + } + + return AhoCorasick{nfa, match_kind, a.matchOnlyWholeWords} +} + +type imp interface { + MatchKind() *matchKind + StartState() stateID + MaxPatternLen() int + PatternCount() int + Prefilter() prefilter + UsePrefilter() bool + OverlappingFindAt(prestate *prefilterState, haystack []byte, at int, state_id *stateID, match_index *int) *Match + EarliestFindAt(prestate *prefilterState, haystack []byte, at int, state_id *stateID) *Match + FindAtNoState(prestate *prefilterState, haystack []byte, at int) *Match +} + +type matchKind int + +const ( + // Use standard match semantics, which support overlapping matches. When + // used with non-overlapping matches, matches are reported as they are seen. + StandardMatch matchKind = iota + // Use leftmost-first match semantics, which reports leftmost matches. + // When there are multiple possible leftmost matches, the match + // corresponding to the pattern that appeared earlier when constructing + // the automaton is reported. + // This does **not** support overlapping matches or stream searching + LeftMostFirstMatch + // Use leftmost-longest match semantics, which reports leftmost matches. + // When there are multiple possible leftmost matches, the longest match is chosen. + LeftMostLongestMatch +) + +func (m matchKind) supportsOverlapping() bool { + return m.isStandard() +} + +func (m matchKind) supportsStream() bool { + return m.isStandard() +} + +func (m matchKind) isStandard() bool { + return m == StandardMatch +} + +func (m matchKind) isLeftmost() bool { + return m == LeftMostFirstMatch || m == LeftMostLongestMatch +} + +func (m matchKind) isLeftmostFirst() bool { + return m == LeftMostFirstMatch +} + +// A representation of a match reported by an Aho-Corasick automaton. +// +// A match has two essential pieces of information: the identifier of the +// pattern that matched, along with the start and end offsets of the match +// in the haystack. +type Match struct { + pattern int + len int + end int +} + +// Pattern returns the index of the pattern in the slice of the patterns provided by the user that +// was matched +func (m *Match) Pattern() int { + return m.pattern +} + +// End gives the index of the last character of this match inside the haystack +func (m *Match) End() int { + return m.end +} + +// Start gives the index of the first character of this match inside the haystack +func (m *Match) Start() int { + return m.end - m.len +} + +type stateID uint + +const ( + failedStateID stateID = 0 + deadStateID stateID = 1 +) diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/automaton.go b/vendor/github.com/petar-dambovaliev/aho-corasick/automaton.go new file mode 100644 index 00000000..2404cccb --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/automaton.go @@ -0,0 +1,222 @@ +package aho_corasick + +type automaton interface { + Repr() *iRepr + MatchKind() *matchKind + Anchored() bool + Prefilter() prefilter + StartState() stateID + IsValid(stateID) bool + IsMatchState(stateID) bool + IsMatchOrDeadState(stateID) bool + GetMatch(stateID, int, int) *Match + MatchCount(stateID) int + NextState(stateID, byte) stateID + NextStateNoFail(stateID, byte) stateID + StandardFindAt(*prefilterState, []byte, int, *stateID) *Match + StandardFindAtImp(*prefilterState, prefilter, []byte, int, *stateID) *Match + LeftmostFindAt(*prefilterState, []byte, int, *stateID) *Match + LeftmostFindAtImp(*prefilterState, prefilter, []byte, int, *stateID) *Match + LeftmostFindAtNoState(*prefilterState, []byte, int) *Match + LeftmostFindAtNoStateImp(*prefilterState, prefilter, []byte, int) *Match + OverlappingFindAt(*prefilterState, []byte, int, *stateID, *int) *Match + EarliestFindAt(*prefilterState, []byte, int, *stateID) *Match + FindAt(*prefilterState, []byte, int, *stateID) *Match + FindAtNoState(*prefilterState, []byte, int) *Match +} + +func isMatchOrDeadState(a automaton, si stateID) bool { + return si == deadStateID || a.IsMatchState(si) +} + +func standardFindAt(a automaton, prestate *prefilterState, haystack []byte, at int, sID *stateID) *Match { + pre := a.Prefilter() + return a.StandardFindAtImp(prestate, pre, haystack, at, sID) +} + +func standardFindAtImp(a automaton, prestate *prefilterState, prefilter prefilter, haystack []byte, at int, sID *stateID) *Match { + for at < len(haystack) { + if prefilter != nil { + startState := a.StartState() + if prestate.IsEffective(at) && sID == &startState { + c, ttype := nextPrefilter(prestate, prefilter, haystack, at) + switch ttype { + case noneCandidate: + return nil + case possibleStartOfMatchCandidate: + i := c.(int) + at = i + } + } + } + *sID = a.NextStateNoFail(*sID, haystack[at]) + at += 1 + + if a.IsMatchOrDeadState(*sID) { + if *sID == deadStateID { + return nil + } else { + return a.GetMatch(*sID, 0, at) + } + } + } + return nil +} + +func leftmostFindAt(a automaton, prestate *prefilterState, haystack []byte, at int, sID *stateID) *Match { + prefilter := a.Prefilter() + return a.LeftmostFindAtImp(prestate, prefilter, haystack, at, sID) +} + +func leftmostFindAtImp(a automaton, prestate *prefilterState, prefilter prefilter, haystack []byte, at int, sID *stateID) *Match { + if a.Anchored() && at > 0 && *sID == a.StartState() { + return nil + } + lastMatch := a.GetMatch(*sID, 0, at) + + for at < len(haystack) { + if prefilter != nil { + startState := a.StartState() + if prestate.IsEffective(at) && sID == &startState { + c, ttype := nextPrefilter(prestate, prefilter, haystack, at) + switch ttype { + case noneCandidate: + return nil + case possibleStartOfMatchCandidate: + i := c.(int) + at = i + } + } + } + + *sID = a.NextStateNoFail(*sID, haystack[at]) + at += 1 + + if a.IsMatchOrDeadState(*sID) { + if *sID == deadStateID { + return lastMatch + } else { + a.GetMatch(*sID, 0, at) + } + } + } + + return lastMatch +} + +func leftmostFindAtNoState(a automaton, prestate *prefilterState, haystack []byte, at int) *Match { + return leftmostFindAtNoStateImp(a, prestate, a.Prefilter(), haystack, at) +} + +func leftmostFindAtNoStateImp(a automaton, prestate *prefilterState, prefilter prefilter, haystack []byte, at int) *Match { + if a.Anchored() && at > 0 { + return nil + } + if prefilter != nil && !prefilter.ReportsFalsePositives() { + c, ttype := prefilter.NextCandidate(prestate, haystack, at) + switch ttype { + case noneCandidate: + return nil + case matchCandidate: + m := c.(*Match) + return m + } + } + + stateID := a.StartState() + lastMatch := a.GetMatch(stateID, 0, at) + + for at < len(haystack) { + if prefilter != nil && prestate.IsEffective(at) && stateID == a.StartState() { + c, ttype := prefilter.NextCandidate(prestate, haystack, at) + switch ttype { + case noneCandidate: + return nil + case matchCandidate: + m := c.(*Match) + return m + case possibleStartOfMatchCandidate: + i := c.(int) + at = i + } + } + + stateID = a.NextStateNoFail(stateID, haystack[at]) + at += 1 + + if a.IsMatchOrDeadState(stateID) { + if stateID == deadStateID { + return lastMatch + } + lastMatch = a.GetMatch(stateID, 0, at) + } + } + + return lastMatch +} + +func overlappingFindAt(a automaton, prestate *prefilterState, haystack []byte, at int, id *stateID, matchIndex *int) *Match { + if a.Anchored() && at > 0 && *id == a.StartState() { + return nil + } + + matchCount := a.MatchCount(*id) + + if *matchIndex < matchCount { + result := a.GetMatch(*id, *matchIndex, at) + *matchIndex += 1 + return result + } + + *matchIndex = 0 + match := a.StandardFindAt(prestate, haystack, at, id) + + if match == nil { + return nil + } + + *matchIndex = 1 + return match +} + +func earliestFindAt(a automaton, prestate *prefilterState, haystack []byte, at int, id *stateID) *Match { + if *id == a.StartState() { + if a.Anchored() && at > 0 { + return nil + } + match := a.GetMatch(*id, 0, at) + if match != nil { + return match + } + } + return a.StandardFindAt(prestate, haystack, at, id) +} + +func findAt(a automaton, prestate *prefilterState, haystack []byte, at int, id *stateID) *Match { + kind := a.MatchKind() + if kind == nil { + return nil + } + switch *kind { + case StandardMatch: + return a.EarliestFindAt(prestate, haystack, at, id) + case LeftMostFirstMatch, LeftMostLongestMatch: + return a.LeftmostFindAt(prestate, haystack, at, id) + } + return nil +} + +func findAtNoState(a automaton, prestate *prefilterState, haystack []byte, at int) *Match { + kind := a.MatchKind() + if kind == nil { + return nil + } + switch *kind { + case StandardMatch: + state := a.StartState() + return a.EarliestFindAt(prestate, haystack, at, &state) + case LeftMostFirstMatch, LeftMostLongestMatch: + return a.LeftmostFindAtNoState(prestate, haystack, at) + } + return nil +} diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/byte_frequencies.go b/vendor/github.com/petar-dambovaliev/aho-corasick/byte_frequencies.go new file mode 100644 index 00000000..8a45609c --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/byte_frequencies.go @@ -0,0 +1,260 @@ +package aho_corasick + +var byteFrequencies = [256]byte{ + 55, // '\x00' + 52, // '\x01' + 51, // '\x02' + 50, // '\x03' + 49, // '\x04' + 48, // '\x05' + 47, // '\x06' + 46, // '\x07' + 45, // '\x08' + 103, // '\t' + 242, // '\n' + 66, // '\x0b' + 67, // '\x0c' + 229, // '\r' + 44, // '\x0e' + 43, // '\x0f' + 42, // '\x10' + 41, // '\x11' + 40, // '\x12' + 39, // '\x13' + 38, // '\x14' + 37, // '\x15' + 36, // '\x16' + 35, // '\x17' + 34, // '\x18' + 33, // '\x19' + 56, // '\x1a' + 32, // '\x1b' + 31, // '\x1c' + 30, // '\x1d' + 29, // '\x1e' + 28, // '\x1f' + 255, // ' ' + 148, // '!' + 164, // '"' + 149, // '#' + 136, // '$' + 160, // '%' + 155, // '&' + 173, // "'" + 221, // '(' + 222, // ')' + 134, // '*' + 122, // '+' + 232, // ',' + 202, // '-' + 215, // '.' + 224, // '/' + 208, // '0' + 220, // '1' + 204, // '2' + 187, // '3' + 183, // '4' + 179, // '5' + 177, // '6' + 168, // '7' + 178, // '8' + 200, // '9' + 226, // ':' + 195, // ';' + 154, // '<' + 184, // '=' + 174, // '>' + 126, // '?' + 120, // '@' + 191, // 'A' + 157, // 'B' + 194, // 'C' + 170, // 'D' + 189, // 'E' + 162, // 'F' + 161, // 'G' + 150, // 'H' + 193, // 'I' + 142, // 'J' + 137, // 'K' + 171, // 'L' + 176, // 'M' + 185, // 'N' + 167, // 'O' + 186, // 'P' + 112, // 'Q' + 175, // 'R' + 192, // 'S' + 188, // 'T' + 156, // 'U' + 140, // 'V' + 143, // 'W' + 123, // 'X' + 133, // 'Y' + 128, // 'Z' + 147, // '[' + 138, // '\\' + 146, // ']' + 114, // '^' + 223, // '_' + 151, // '`' + 249, // 'a' + 216, // 'b' + 238, // 'c' + 236, // 'd' + 253, // 'e' + 227, // 'f' + 218, // 'g' + 230, // 'h' + 247, // 'i' + 135, // 'j' + 180, // 'k' + 241, // 'l' + 233, // 'm' + 246, // 'n' + 244, // 'o' + 231, // 'p' + 139, // 'q' + 245, // 'r' + 243, // 's' + 251, // 't' + 235, // 'u' + 201, // 'v' + 196, // 'w' + 240, // 'x' + 214, // 'y' + 152, // 'z' + 182, // '{' + 205, // '|' + 181, // '}' + 127, // '~' + 27, // '\x7f' + 212, // '\x80' + 211, // '\x81' + 210, // '\x82' + 213, // '\x83' + 228, // '\x84' + 197, // '\x85' + 169, // '\x86' + 159, // '\x87' + 131, // '\x88' + 172, // '\x89' + 105, // '\x8a' + 80, // '\x8b' + 98, // '\x8c' + 96, // '\x8d' + 97, // '\x8e' + 81, // '\x8f' + 207, // '\x90' + 145, // '\x91' + 116, // '\x92' + 115, // '\x93' + 144, // '\x94' + 130, // '\x95' + 153, // '\x96' + 121, // '\x97' + 107, // '\x98' + 132, // '\x99' + 109, // '\x9a' + 110, // '\x9b' + 124, // '\x9c' + 111, // '\x9d' + 82, // '\x9e' + 108, // '\x9f' + 118, // '\xa0' + 141, // '¡' + 113, // '¢' + 129, // '£' + 119, // '¤' + 125, // '¥' + 165, // '¦' + 117, // '§' + 92, // '¨' + 106, // '©' + 83, // 'ª' + 72, // '«' + 99, // '¬' + 93, // '\xad' + 65, // '®' + 79, // '¯' + 166, // '°' + 237, // '±' + 163, // '²' + 199, // '³' + 190, // '´' + 225, // 'µ' + 209, // '¶' + 203, // '·' + 198, // '¸' + 217, // '¹' + 219, // 'º' + 206, // '»' + 234, // '¼' + 248, // '½' + 158, // '¾' + 239, // '¿' + 255, // 'À' + 255, // 'Á' + 255, // 'Â' + 255, // 'Ã' + 255, // 'Ä' + 255, // 'Å' + 255, // 'Æ' + 255, // 'Ç' + 255, // 'È' + 255, // 'É' + 255, // 'Ê' + 255, // 'Ë' + 255, // 'Ì' + 255, // 'Í' + 255, // 'Î' + 255, // 'Ï' + 255, // 'Ð' + 255, // 'Ñ' + 255, // 'Ò' + 255, // 'Ó' + 255, // 'Ô' + 255, // 'Õ' + 255, // 'Ö' + 255, // '×' + 255, // 'Ø' + 255, // 'Ù' + 255, // 'Ú' + 255, // 'Û' + 255, // 'Ü' + 255, // 'Ý' + 255, // 'Þ' + 255, // 'ß' + 255, // 'à' + 255, // 'á' + 255, // 'â' + 255, // 'ã' + 255, // 'ä' + 255, // 'å' + 255, // 'æ' + 255, // 'ç' + 255, // 'è' + 255, // 'é' + 255, // 'ê' + 255, // 'ë' + 255, // 'ì' + 255, // 'í' + 255, // 'î' + 255, // 'ï' + 255, // 'ð' + 255, // 'ñ' + 255, // 'ò' + 255, // 'ó' + 255, // 'ô' + 255, // 'õ' + 255, // 'ö' + 255, // '÷' + 255, // 'ø' + 255, // 'ù' + 255, // 'ú' + 255, // 'û' + 255, // 'ü' + 255, // 'ý' + 255, // 'þ' + 255, // 'ÿ' +} diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/classes.go b/vendor/github.com/petar-dambovaliev/aho-corasick/classes.go new file mode 100644 index 00000000..c942c023 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/classes.go @@ -0,0 +1,79 @@ +package aho_corasick + +import ( + "math" +) + +type byteClassRepresentatives struct { + classes *byteClasses + bbyte int + lastClass *byte +} + +func (b *byteClassRepresentatives) next() *byte { + for b.bbyte < 256 { + bbyte := byte(b.bbyte) + class := b.classes.bytes[bbyte] + b.bbyte += 1 + + if b.lastClass == nil || *b.lastClass != class { + c := class + b.lastClass = &c + return &bbyte + } + } + return nil +} + +type byteClassBuilder []bool + +func (b byteClassBuilder) setRange(start, end byte) { + if start > 0 { + b[int(start)-1] = true + } + b[int(end)] = true +} + +func (b byteClassBuilder) build() byteClasses { + var classes byteClasses + var class byte + i := 0 + for { + classes.bytes[byte(i)] = class + if i >= 255 { + break + } + if b[i] { + if class+1 > math.MaxUint8 { + panic("shit happens") + } + class += 1 + } + i += 1 + } + return classes +} + +func newByteClassBuilder() byteClassBuilder { + return make([]bool, 256) +} + +type byteClasses struct { + bytes [256]byte +} + +func singletons() byteClasses { + var bc byteClasses + for i := range bc.bytes { + bc.bytes[i] = byte(i) + } + return bc +} + +func (b byteClasses) alphabetLen() int { + return int(b.bytes[255]) + 1 +} + +func (b byteClasses) isSingleton() bool { + return b.alphabetLen() == 256 +} diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/dfa.go b/vendor/github.com/petar-dambovaliev/aho-corasick/dfa.go new file mode 100644 index 00000000..c4242359 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/dfa.go @@ -0,0 +1,731 @@ +package aho_corasick + +import ( + "unsafe" +) + +type iDFA struct { + atom automaton +} + +func (d iDFA) MatchKind() *matchKind { + return d.atom.MatchKind() +} + +func (d iDFA) StartState() stateID { + return d.atom.StartState() +} + +func (d iDFA) MaxPatternLen() int { + return d.atom.Repr().max_pattern_len +} + +func (d iDFA) PatternCount() int { + return d.atom.Repr().pattern_count +} + +func (d iDFA) Prefilter() prefilter { + return d.atom.Prefilter() +} + +func (d iDFA) UsePrefilter() bool { + p := d.Prefilter() + if p == nil { + return false + } + return !p.LooksForNonStartOfMatch() +} + +func (d iDFA) OverlappingFindAt(prestate *prefilterState, haystack []byte, at int, state_id *stateID, match_index *int) *Match { + return overlappingFindAt(d.atom, prestate, haystack, at, state_id, match_index) +} + +func (d iDFA) EarliestFindAt(prestate *prefilterState, haystack []byte, at int, state_id *stateID) *Match { + return earliestFindAt(d.atom, prestate, haystack, at, state_id) +} + +func (d iDFA) FindAtNoState(prestate *prefilterState, haystack []byte, at int) *Match { + return findAtNoState(d.atom, prestate, haystack, at) +} + +func (n iDFA) LeftmostFindAtNoState(prestate *prefilterState, haystack []byte, at int) *Match { + return leftmostFindAtNoState(n.atom, prestate, haystack, at) +} + +type iDFABuilder struct { + premultiply bool + byte_classes bool +} + +func (d *iDFABuilder) build(nfa *iNFA) iDFA { + var byteClasses byteClasses + if d.byte_classes { + byteClasses = nfa.byteClasses + } else { + byteClasses = singletons() + } + + alphabet_len := byteClasses.alphabetLen() + trans := make([]stateID, alphabet_len*len(nfa.states)) + for i := range trans { + trans[i] = failedStateID + } + + matches := make([][]pattern, len(nfa.states)) + var p prefilter + + if nfa.prefil != nil { + p = nfa.prefil.clone() + } + + rep := iRepr{ + match_kind: nfa.matchKind, + anchored: nfa.anchored, + premultiplied: false, + start_id: nfa.startID, + max_pattern_len: nfa.maxPatternLen, + pattern_count: nfa.patternCount, + state_count: len(nfa.states), + max_match: failedStateID, + heap_bytes: 0, + prefilter: p, + byte_classes: byteClasses, + trans: trans, + matches: matches, + } + + for id := 0; id < len(nfa.states); id += 1 { + rep.matches[id] = append(rep.matches[id], nfa.states[id].matches...) + fail := nfa.states[id].fail + + nfa.iterAllTransitions(&byteClasses, stateID(id), func(tr *next) { + if tr.id == failedStateID { + tr.id = nfaNextStateMemoized(nfa, &rep, stateID(id), fail, tr.key) + } + rep.setNextState(stateID(id), tr.key, tr.id) + }) + + } + + rep.shuffleMatchStates() + rep.calculateSize() + + if d.premultiply { + rep.premultiply() + if byteClasses.isSingleton() { + return iDFA{&iPremultiplied{rep}} + } else { + return iDFA{&iPremultipliedByteClass{&rep}} + } + } + if byteClasses.isSingleton() { + return iDFA{&iStandard{rep}} + } + return iDFA{&iByteClass{&rep}} +} + +type iByteClass struct { + repr *iRepr +} + +func (p iByteClass) FindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return findAtNoState(p, prefilterState, bytes, i) +} + +func (p iByteClass) Repr() *iRepr { + return p.repr +} + +func (p iByteClass) MatchKind() *matchKind { + return &p.repr.match_kind +} + +func (p iByteClass) Anchored() bool { + return p.repr.anchored +} + +func (p iByteClass) Prefilter() prefilter { + return p.repr.prefilter +} + +func (p iByteClass) StartState() stateID { + return p.repr.start_id +} + +func (b iByteClass) IsValid(id stateID) bool { + return int(id) < b.repr.state_count +} + +func (b iByteClass) IsMatchState(id stateID) bool { + return b.repr.isMatchState(id) +} + +func (b iByteClass) IsMatchOrDeadState(id stateID) bool { + return b.repr.isMatchStateOrDeadState(id) +} + +func (b iByteClass) GetMatch(id stateID, i int, i2 int) *Match { + return b.repr.GetMatch(id, i, i2) +} + +func (b iByteClass) MatchCount(id stateID) int { + return b.repr.MatchCount(id) +} + +func (b iByteClass) NextState(id stateID, b2 byte) stateID { + alphabet_len := b.repr.byte_classes.alphabetLen() + input := b.repr.byte_classes.bytes[b2] + o := int(id)*alphabet_len + int(input) + return b.repr.trans[o] +} + +func (p iByteClass) NextStateNoFail(id stateID, b byte) stateID { + next := p.NextState(id, b) + if next == failedStateID { + panic("automaton should never return fail_id for next state") + } + return next +} + +func (p iByteClass) StandardFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return standardFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iByteClass) StandardFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return standardFindAtImp(&p, prefilterState, prefilter, bytes, i, id) +} + +func (p iByteClass) LeftmostFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iByteClass) LeftmostFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAtImp(&p, prefilterState, prefilter, bytes, i, id) +} + +func (p iByteClass) LeftmostFindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return leftmostFindAtNoState(&p, prefilterState, bytes, i) +} + +func (p iByteClass) LeftmostFindAtNoStateImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int) *Match { + return leftmostFindAtNoStateImp(&p, prefilterState, prefilter, bytes, i) +} + +func (p iByteClass) OverlappingFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID, i2 *int) *Match { + return overlappingFindAt(&p, prefilterState, bytes, i, id, i2) +} + +func (p iByteClass) EarliestFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return earliestFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iByteClass) FindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return findAt(&p, prefilterState, bytes, i, id) +} + +type iPremultipliedByteClass struct { + repr *iRepr +} + +func (p iPremultipliedByteClass) FindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return findAtNoState(p, prefilterState, bytes, i) +} + +func (p iPremultipliedByteClass) Repr() *iRepr { + return p.repr +} + +func (p iPremultipliedByteClass) MatchKind() *matchKind { + return &p.repr.match_kind +} + +func (p iPremultipliedByteClass) Anchored() bool { + return p.repr.anchored +} + +func (p iPremultipliedByteClass) Prefilter() prefilter { + return p.repr.prefilter +} + +func (p iPremultipliedByteClass) StartState() stateID { + return p.repr.start_id +} + +func (p iPremultipliedByteClass) IsValid(id stateID) bool { + return (int(id) / p.repr.alphabetLen()) < p.repr.state_count +} + +func (p iPremultipliedByteClass) IsMatchState(id stateID) bool { + return p.repr.isMatchState(id) +} + +func (p iPremultipliedByteClass) IsMatchOrDeadState(id stateID) bool { + return p.repr.isMatchStateOrDeadState(id) +} + +func (p iPremultipliedByteClass) GetMatch(id stateID, match_index int, end int) *Match { + if id > p.repr.max_match { + return nil + } + + m := p.repr.matches[int(id)/p.repr.alphabetLen()][match_index] + return &Match{ + pattern: m.PatternID, + len: m.PatternLength, + end: end, + } +} + +func (p iPremultipliedByteClass) MatchCount(id stateID) int { + o := int(id) / p.repr.alphabetLen() + return len(p.repr.matches[o]) +} + +func (p iPremultipliedByteClass) NextState(id stateID, b byte) stateID { + input := p.repr.byte_classes.bytes[b] + o := int(id) + int(input) + return p.repr.trans[o] +} + +//todo this leaks garbage +func (p iPremultipliedByteClass) NextStateNoFail(id stateID, b byte) stateID { + next := p.NextState(id, b) + if next == failedStateID { + panic("automaton should never return fail_id for next state") + } + return next +} + +func (p iPremultipliedByteClass) StandardFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return standardFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iPremultipliedByteClass) StandardFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return standardFindAtImp(&p, prefilterState, prefilter, bytes, i, id) +} + +func (p iPremultipliedByteClass) LeftmostFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iPremultipliedByteClass) LeftmostFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAtImp(&p, prefilterState, prefilter, bytes, i, id) +} + +func (p iPremultipliedByteClass) LeftmostFindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return leftmostFindAtNoState(&p, prefilterState, bytes, i) +} + +func (p iPremultipliedByteClass) LeftmostFindAtNoStateImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int) *Match { + return leftmostFindAtNoStateImp(&p, prefilterState, prefilter, bytes, i) +} + +func (p iPremultipliedByteClass) OverlappingFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID, i2 *int) *Match { + return overlappingFindAt(&p, prefilterState, bytes, i, id, i2) +} + +func (p iPremultipliedByteClass) EarliestFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return earliestFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iPremultipliedByteClass) FindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return findAt(&p, prefilterState, bytes, i, id) +} + +type iPremultiplied struct { + repr iRepr +} + +func (p iPremultiplied) FindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return findAtNoState(p, prefilterState, bytes, i) +} + +func (p iPremultiplied) Repr() *iRepr { + return &p.repr +} + +func (p iPremultiplied) MatchKind() *matchKind { + return &p.repr.match_kind +} + +func (p iPremultiplied) Anchored() bool { + return p.repr.anchored +} + +func (p iPremultiplied) Prefilter() prefilter { + return p.repr.prefilter +} + +func (p iPremultiplied) StartState() stateID { + return p.repr.start_id +} + +func (p iPremultiplied) IsValid(id stateID) bool { + return int(id)/256 < p.repr.state_count +} + +func (p iPremultiplied) IsMatchState(id stateID) bool { + return p.repr.isMatchState(id) +} + +func (p iPremultiplied) IsMatchOrDeadState(id stateID) bool { + return p.repr.isMatchStateOrDeadState(id) +} + +func (p iPremultiplied) GetMatch(id stateID, match_index int, end int) *Match { + if id > p.repr.max_match { + return nil + } + m := p.repr.matches[int(id)/256][match_index] + return &Match{ + pattern: m.PatternID, + len: m.PatternLength, + end: end, + } +} + +func (p iPremultiplied) MatchCount(id stateID) int { + return len(p.repr.matches[int(id)/256]) +} + +func (p iPremultiplied) NextState(id stateID, b byte) stateID { + o := int(id) + int(b) + return p.repr.trans[o] +} + +func (p iPremultiplied) NextStateNoFail(id stateID, b byte) stateID { + next := p.NextState(id, b) + if next == failedStateID { + panic("automaton should never return fail_id for next state") + } + return next +} + +func (p iPremultiplied) StandardFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return standardFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iPremultiplied) StandardFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return standardFindAtImp(&p, prefilterState, prefilter, bytes, i, id) +} + +func (p iPremultiplied) LeftmostFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iPremultiplied) LeftmostFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAtImp(&p, prefilterState, prefilter, bytes, i, id) +} + +func (p iPremultiplied) LeftmostFindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return leftmostFindAtNoState(&p, prefilterState, bytes, i) +} + +func (p iPremultiplied) LeftmostFindAtNoStateImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int) *Match { + return leftmostFindAtNoStateImp(&p, prefilterState, prefilter, bytes, i) +} + +func (p iPremultiplied) OverlappingFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID, i2 *int) *Match { + return overlappingFindAt(&p, prefilterState, bytes, i, id, i2) +} + +func (p iPremultiplied) EarliestFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return earliestFindAt(&p, prefilterState, bytes, i, id) +} + +func (p iPremultiplied) FindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return findAt(&p, prefilterState, bytes, i, id) +} + +func nfaNextStateMemoized(nfa *iNFA, dfa *iRepr, populating stateID, current stateID, input byte) stateID { + for { + if current < populating { + return dfa.nextState(current, input) + } + + next := nfa.states[current].nextState(input) + + if next != failedStateID { + return next + } + current = nfa.states[current].fail + } +} + +func newDFABuilder() *iDFABuilder { + return &iDFABuilder{ + premultiply: true, + byte_classes: true, + } +} + +type iStandard struct { + repr iRepr +} + +func (p iStandard) FindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return findAtNoState(&p, prefilterState, bytes, i) +} + +func (p iStandard) Repr() *iRepr { + return &p.repr +} + +func (s *iStandard) MatchKind() *matchKind { + return &s.repr.match_kind +} + +func (s *iStandard) Anchored() bool { + return s.repr.anchored +} + +func (s *iStandard) Prefilter() prefilter { + return s.repr.prefilter +} + +func (s *iStandard) StartState() stateID { + return s.repr.start_id +} + +func (s *iStandard) IsValid(id stateID) bool { + return int(id) < s.repr.state_count +} + +func (s *iStandard) IsMatchState(id stateID) bool { + return s.repr.isMatchState(id) +} + +func (s *iStandard) IsMatchOrDeadState(id stateID) bool { + return s.repr.isMatchStateOrDeadState(id) +} + +func (s *iStandard) GetMatch(id stateID, match_index int, end int) *Match { + return s.repr.GetMatch(id, match_index, end) +} + +func (s *iStandard) MatchCount(id stateID) int { + return s.repr.MatchCount(id) +} + +func (s *iStandard) NextState(current stateID, input byte) stateID { + o := int(current)*256 + int(input) + return s.repr.trans[o] +} + +func (s *iStandard) NextStateNoFail(id stateID, b byte) stateID { + next := s.NextState(id, b) + if next == failedStateID { + panic("automaton should never return fail_id for next state") + } + return next +} + +func (s *iStandard) StandardFindAt(state *prefilterState, bytes []byte, i int, id *stateID) *Match { + return standardFindAt(s, state, bytes, i, id) +} + +func (s *iStandard) StandardFindAtImp(state *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return standardFindAtImp(s, state, prefilter, bytes, i, id) +} + +func (s *iStandard) LeftmostFindAt(state *prefilterState, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAt(s, state, bytes, i, id) +} + +func (s *iStandard) LeftmostFindAtImp(state *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAtImp(s, state, prefilter, bytes, i, id) +} + +func (s *iStandard) LeftmostFindAtNoState(state *prefilterState, bytes []byte, i int) *Match { + return leftmostFindAtNoState(s, state, bytes, i) +} + +func (s *iStandard) LeftmostFindAtNoStateImp(state *prefilterState, prefilter prefilter, bytes []byte, i int) *Match { + return leftmostFindAtNoStateImp(s, state, prefilter, bytes, i) +} + +func (s *iStandard) OverlappingFindAt(state *prefilterState, bytes []byte, i int, id *stateID, i2 *int) *Match { + return overlappingFindAt(s, state, bytes, i, id, i2) +} + +func (s *iStandard) EarliestFindAt(state *prefilterState, bytes []byte, i int, id *stateID) *Match { + return earliestFindAt(s, state, bytes, i, id) +} + +func (s *iStandard) FindAt(state *prefilterState, bytes []byte, i int, id *stateID) *Match { + return findAt(s, state, bytes, i, id) +} + +type iRepr struct { + match_kind matchKind + anchored bool + premultiplied bool + start_id stateID + max_pattern_len int + pattern_count int + state_count int + max_match stateID + heap_bytes int + prefilter prefilter + byte_classes byteClasses + trans []stateID + matches [][]pattern +} + +func (r *iRepr) premultiply() { + if r.premultiplied || r.state_count <= 1 { + return + } + alpha_len := r.alphabetLen() + + for id := 2; id < r.state_count; id++ { + offset := id * alpha_len + slice := r.trans[offset : offset+alpha_len] + for i := range slice { + if slice[i] == deadStateID { + continue + } + slice[i] = stateID(int(slice[i]) * alpha_len) + } + } + r.premultiplied = true + r.start_id = stateID(int(r.start_id) * alpha_len) + r.max_match = stateID(int(r.max_match) * alpha_len) +} + +func (r *iRepr) setNextState(from stateID, b byte, to stateID) { + alphabet_len := r.alphabetLen() + b = r.byte_classes.bytes[b] + r.trans[int(from)*alphabet_len+int(b)] = to +} + +func (r *iRepr) alphabetLen() int { + return r.byte_classes.alphabetLen() +} + +func (r *iRepr) nextState(from stateID, b byte) stateID { + alphabet_len := r.alphabetLen() + b = r.byte_classes.bytes[b] + return r.trans[int(from)*alphabet_len+int(b)] +} + +func (r *iRepr) isMatchState(id stateID) bool { + return id <= r.max_match && id > deadStateID +} + +func (r *iRepr) isMatchStateOrDeadState(id stateID) bool { + return id <= r.max_match +} + +func (r *iRepr) GetMatch(id stateID, match_index int, end int) *Match { + i := int(id) + if id > r.max_match { + return nil + } + if i > len(r.matches) { + return nil + } + matches := r.matches[int(id)] + if match_index > len(matches) { + return nil + } + pattern := matches[match_index] + + return &Match{ + pattern: pattern.PatternID, + len: pattern.PatternLength, + end: end, + } +} + +func (r *iRepr) MatchCount(id stateID) int { + return len(r.matches[id]) +} + +func (r *iRepr) swapStates(id1 stateID, id2 stateID) { + if r.premultiplied { + panic("cannot shuffle match states of premultiplied iDFA") + } + + o1 := int(id1) * r.alphabetLen() + o2 := int(id2) * r.alphabetLen() + + for b := 0; b < r.alphabetLen(); b++ { + r.trans[o1+b], r.trans[o2+b] = r.trans[o2+b], r.trans[o1+b] + } + r.matches[int(id1)], r.matches[int(id2)] = r.matches[int(id2)], r.matches[int(id1)] +} + +func (r *iRepr) calculateSize() { + intSize := int(unsafe.Sizeof(stateID(1))) + size := (len(r.trans) * intSize) + (len(r.matches) * (intSize * 3)) + + for _, state_matches := range r.matches { + size += len(state_matches) * (intSize * 2) + } + var hb int + if r.prefilter != nil { + hb = r.prefilter.HeapBytes() + } + size += hb + r.heap_bytes = size +} + +func (r *iRepr) shuffleMatchStates() { + if r.premultiplied { + panic("cannot shuffle match states of premultiplied iDFA") + } + + if r.state_count <= 1 { + return + } + + first_non_match := int(r.start_id) + for first_non_match < r.state_count && len(r.matches[first_non_match]) > 0 { + first_non_match += 1 + } + swaps := make([]stateID, r.state_count) + + for i := range swaps { + swaps[i] = failedStateID + } + + cur := r.state_count - 1 + + for cur > first_non_match { + if len(r.matches[cur]) > 0 { + r.swapStates(stateID(cur), stateID(first_non_match)) + swaps[cur] = stateID(first_non_match) + swaps[first_non_match] = stateID(cur) + + first_non_match += 1 + for first_non_match < cur && len(r.matches[first_non_match]) > 0 { + first_non_match += 1 + } + } + cur -= 1 + } + + for id := 0; id < r.state_count; id++ { + alphabet_len := r.alphabetLen() + offset := id * alphabet_len + + slice := r.trans[offset : offset+alphabet_len] + + for i := range slice { + if swaps[slice[i]] != failedStateID { + slice[i] = swaps[slice[i]] + } + } + } + + if swaps[r.start_id] != failedStateID { + r.start_id = swaps[r.start_id] + } + r.max_match = stateID(first_non_match - 1) +} + +type pattern struct { + PatternID int + PatternLength int +} diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/nfa.go b/vendor/github.com/petar-dambovaliev/aho-corasick/nfa.go new file mode 100644 index 00000000..c111ade0 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/nfa.go @@ -0,0 +1,830 @@ +package aho_corasick + +import ( + "sort" + "unsafe" +) + +type iNFA struct { + matchKind matchKind + startID stateID + maxPatternLen int + patternCount int + heapBytes int + prefil prefilter + anchored bool + byteClasses byteClasses + states []state +} + +func (n *iNFA) FindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return findAtNoState(n, prefilterState, bytes, i) +} + +func (n *iNFA) Repr() *iRepr { + return nil +} + +func (n *iNFA) MatchKind() *matchKind { + return &n.matchKind +} + +func (n *iNFA) Anchored() bool { + return n.anchored +} + +func (n *iNFA) Prefilter() prefilter { + return n.prefil +} + +func (n *iNFA) StartState() stateID { + return n.startID +} + +func (n *iNFA) IsValid(id stateID) bool { + return int(id) < len(n.states) +} + +func (n *iNFA) IsMatchState(id stateID) bool { + return n.state(id).isMatch() +} + +func (n *iNFA) IsMatchOrDeadState(id stateID) bool { + return isMatchOrDeadState(n, id) +} + +func (n *iNFA) MatchCount(id stateID) int { + return len(n.states[id].matches) +} + +func (n *iNFA) NextState(id stateID, b byte) stateID { + for { + state := n.states[id] + next := state.nextState(b) + if next != failedStateID { + return next + } + id = state.fail + } +} + +func (n *iNFA) NextStateNoFail(id stateID, b byte) stateID { + next := n.NextState(id, b) + if next == failedStateID { + panic("automaton should never return fail_id for next state") + } + return next +} + +func (n *iNFA) StandardFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return standardFindAt(n, prefilterState, bytes, i, id) +} + +func (n *iNFA) StandardFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return standardFindAtImp(n, prefilterState, prefilter, bytes, i, id) +} + +func (n *iNFA) LeftmostFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAt(n, prefilterState, bytes, i, id) +} + +func (n *iNFA) LeftmostFindAtImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int, id *stateID) *Match { + return leftmostFindAtImp(n, prefilterState, prefilter, bytes, i, id) +} + +func (n *iNFA) LeftmostFindAtNoState(prefilterState *prefilterState, bytes []byte, i int) *Match { + return leftmostFindAtNoState(n, prefilterState, bytes, i) +} + +func (n *iNFA) LeftmostFindAtNoStateImp(prefilterState *prefilterState, prefilter prefilter, bytes []byte, i int) *Match { + return leftmostFindAtNoStateImp(n, prefilterState, prefilter, bytes, i) +} + +func (n *iNFA) OverlappingFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID, i2 *int) *Match { + return overlappingFindAt(n, prefilterState, bytes, i, id, i2) +} + +func (n *iNFA) EarliestFindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return earliestFindAt(n, prefilterState, bytes, i, id) +} + +func (n *iNFA) FindAt(prefilterState *prefilterState, bytes []byte, i int, id *stateID) *Match { + return findAt(n, prefilterState, bytes, i, id) +} + +func (n *iNFA) MaxPatternLen() int { + return n.maxPatternLen +} + +func (n *iNFA) PatternCount() int { + return n.patternCount +} + +func (n *iNFA) UsePrefilter() bool { + p := n.Prefilter() + if p == nil { + return false + } + return !p.LooksForNonStartOfMatch() +} + +func (n *iNFA) GetMatch(id stateID, matchIndex int, end int) *Match { + if int(id) >= len(n.states) { + return nil + } + state := n.states[id] + if matchIndex >= len(state.matches) { + return nil + } + pat := state.matches[matchIndex] + return &Match{ + pattern: pat.PatternID, + len: pat.PatternLength, + end: end, + } +} + +func (n *iNFA) addDenseState(depth int) stateID { + d := newDense() + trans := transitions{dense: &d} + id := stateID(len(n.states)) + + fail := n.startID + + if n.anchored { + fail = deadStateID + } + + n.states = append(n.states, state{ + trans: trans, + fail: fail, + matches: nil, + depth: depth, + }) + return id +} + +func (n *iNFA) addSparseState(depth int) stateID { + trans := transitions{sparse: &sparse{inner: nil}} + id := stateID(len(n.states)) + + fail := n.startID + + if n.anchored { + fail = deadStateID + } + + n.states = append(n.states, state{ + trans: trans, + fail: fail, + matches: nil, + depth: depth, + }) + return id +} + +func (n *iNFA) state(id stateID) *state { + return &n.states[int(id)] +} + +type compiler struct { + builder iNFABuilder + prefilter prefilterBuilder + nfa iNFA + byteclassBuilder byteClassBuilder +} + +func (c *compiler) compile(patterns [][]byte) *iNFA { + c.addState(0) + c.addState(0) + c.addState(0) + + c.buildTrie(patterns) + + c.addStartStateLoop() + c.addDeadStateLoop() + + if !c.builder.anchored { + if c.builder.matchKind.isLeftmost() { + c.fillFailureTransitionsLeftmost() + } else { + c.fillFailureTransitionsStandard() + } + } + c.closeStartStateLoop() + + c.nfa.byteClasses = c.byteclassBuilder.build() + if !c.builder.anchored { + c.nfa.prefil = c.prefilter.build() + } + c.calculateSize() + + return &c.nfa +} + +func (c *compiler) calculateSize() { + var size int + for _, state := range c.nfa.states { + size += state.heapBytes() + } + + c.nfa.heapBytes = size +} + +func (c *compiler) closeStartStateLoop() { + if c.builder.anchored || (c.builder.matchKind.isLeftmost() && c.nfa.state(c.nfa.startID).isMatch()) { + startId := c.nfa.startID + start := c.nfa.state(startId) + + for b := 0; b < 256; b++ { + if start.nextState(byte(b)) == startId { + start.setNextState(byte(b), deadStateID) + } + } + } +} + +type queuedState struct { + id stateID + matchAtDepth *int +} + +func startQueuedState(nfa *iNFA) queuedState { + var matchAtDepth *int + if nfa.states[nfa.startID].isMatch() { + r := 0 + matchAtDepth = &r + } + return queuedState{id: nfa.startID, matchAtDepth: matchAtDepth} +} + +func (q *queuedState) nextQueuedState(nfa *iNFA, id stateID) queuedState { + nextMatchAtDepth := q.nextMatchAtDepth(nfa, id) + return queuedState{id, nextMatchAtDepth} +} + +func (q *queuedState) nextMatchAtDepth( + nfa *iNFA, + next stateID, +) *int { + switch q.matchAtDepth { + case nil: + if !nfa.state(next).isMatch() { + return nil + } + default: + return q.matchAtDepth + } + + depth := nfa.state(next).depth - *nfa.state(next).getLongestMatch() + 1 + return &depth +} + +func (c *compiler) fillFailureTransitionsStandard() { + queue := make([]stateID, 0) + seen := c.queuedSet() + + for b := 0; b < 256; b++ { + next := c.nfa.state(c.nfa.startID).nextState(byte(b)) + if next != c.nfa.startID { + if !seen.contains(next) { + queue = append(queue, next) + seen.insert(next) + } + } + } + + for len(queue) > 0 { + id := queue[0] + queue = queue[1:] + it := newIterTransitions(&c.nfa, id) + + for next := it.next(); next != nil; next = it.next() { + if seen.contains(next.id) { + continue + } + queue = append(queue, next.id) + seen.insert(next.id) + + fail := it.nfa.state(id).fail + for it.nfa.state(fail).nextState(next.key) == failedStateID { + fail = it.nfa.state(fail).fail + } + fail = it.nfa.state(fail).nextState(next.key) + it.nfa.state(next.id).fail = fail + it.nfa.copyMatches(fail, next.id) + } + it.nfa.copyEmptyMatches(id) + } +} + +func (c *compiler) fillFailureTransitionsLeftmost() { + queue := make([]queuedState, 0) + seen := c.queuedSet() + start := startQueuedState(&c.nfa) + + for b := 0; b < 256; b++ { + nextId := c.nfa.state(c.nfa.startID).nextState(byte(b)) + if nextId != start.id { + next := start.nextQueuedState(&c.nfa, nextId) + if !seen.contains(next.id) { + queue = append(queue, next) + seen.insert(next.id) + } + if c.nfa.state(nextId).isMatch() { + c.nfa.state(nextId).fail = deadStateID + } + } + } + + for len(queue) > 0 { + item := queue[0] + queue = queue[1:] + anyTrans := false + it := newIterTransitions(&c.nfa, item.id) + tr := it.next() + for tr != nil { + anyTrans = true + next := item.nextQueuedState(it.nfa, tr.id) + if seen.contains(next.id) { + tr = it.next() + continue + } + queue = append(queue, next) + seen.insert(next.id) + + fail := it.nfa.state(item.id).fail + for it.nfa.state(fail).nextState(tr.key) == failedStateID { + fail = it.nfa.state(fail).fail + } + fail = it.nfa.state(fail).nextState(tr.key) + + if next.matchAtDepth != nil { + failDepth := it.nfa.state(fail).depth + nextDepth := it.nfa.state(next.id).depth + if nextDepth-*next.matchAtDepth+1 > failDepth { + it.nfa.state(next.id).fail = deadStateID + tr = it.next() + continue + } + + if start.id == it.nfa.state(next.id).fail { + panic("states that are match states or follow match states should never have a failure transition back to the start state in leftmost searching") + } + } + it.nfa.state(next.id).fail = fail + it.nfa.copyMatches(fail, next.id) + tr = it.next() + } + if !anyTrans && it.nfa.state(item.id).isMatch() { + it.nfa.state(item.id).fail = deadStateID + } + } +} + +func (n *iNFA) copyEmptyMatches(dst stateID) { + n.copyMatches(n.startID, dst) +} + +func (n *iNFA) copyMatches(src stateID, dst stateID) { + srcState, dstState := n.getTwo(src, dst) + dstState.matches = append(dstState.matches, srcState.matches...) +} + +func (n *iNFA) getTwo(i stateID, j stateID) (*state, *state) { + if i == j { + panic("src and dst should not be equal") + } + + if i < j { + before, after := n.states[0:j], n.states[j:] + return &before[i], &after[0] + } + + before, after := n.states[0:i], n.states[i:] + return &after[0], &before[j] +} + +func (n *iNFA) iterAllTransitions(byteClasses *byteClasses, id stateID, f func(tr *next)) { + n.states[id].trans.iterAll(byteClasses, f) +} + +func newIterTransitions(nfa *iNFA, stateId stateID) iterTransitions { + return iterTransitions{ + nfa: nfa, + stateId: stateId, + cur: 0, + } +} + +type iterTransitions struct { + nfa *iNFA + stateId stateID + cur int +} + +type next struct { + key byte + id stateID +} + +func (i *iterTransitions) next() *next { + sparse := i.nfa.states[int(i.stateId)].trans.sparse + if sparse != nil { + if i.cur >= len(sparse.inner) { + return nil + } + ii := i.cur + i.cur += 1 + return &next{ + key: sparse.inner[ii].b, + id: sparse.inner[ii].s, + } + } + + dense := i.nfa.states[int(i.stateId)].trans.dense + for i.cur < len(dense.inner) { + if i.cur >= 256 { + panic("There are always exactly 255 transitions in dense repr") + } + + b := byte(i.cur) + id := dense.inner[b] + i.cur += 1 + if id != failedStateID { + return &next{ + key: b, + id: id, + } + } + } + return nil +} + +type queuedSet struct { + set map[stateID]struct{} + ind int +} + +func newInertQueuedSet() queuedSet { + return queuedSet{ + set: make(map[stateID]struct{}), + ind: 0, + } +} + +func (q *queuedSet) contains(s stateID) bool { + _, ok := q.set[s] + return ok +} + +func (q *queuedSet) insert(s stateID) { + q.set[s] = struct{}{} +} + +func newActiveQueuedSet() queuedSet { + return queuedSet{ + set: make(map[stateID]struct{}, 0), + ind: 0, + } +} + +func (c *compiler) queuedSet() queuedSet { + if c.builder.asciiCaseInsensitive { + return newActiveQueuedSet() + } + return newInertQueuedSet() +} + +func (c *compiler) addStartStateLoop() { + startId := c.nfa.startID + start := c.nfa.state(startId) + for b := 0; b < 256; b++ { + if start.nextState(byte(b)) == failedStateID { + start.setNextState(byte(b), startId) + } + } +} + +func (c *compiler) addDeadStateLoop() { + dead := c.nfa.state(deadStateID) + for b := 0; b < 256; b++ { + dead.setNextState(byte(b), deadStateID) + } +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func (c *compiler) buildTrie(patterns [][]byte) { + +Patterns: + for pati, pat := range patterns { + c.nfa.maxPatternLen = max(c.nfa.maxPatternLen, len(pat)) + c.nfa.patternCount += 1 + + prev := c.nfa.startID + sawMatch := false + + for depth, b := range pat { + sawMatch = sawMatch || c.nfa.state(prev).isMatch() + if c.builder.matchKind.isLeftmostFirst() && sawMatch { + continue Patterns + } + + c.byteclassBuilder.setRange(b, b) + + if c.builder.asciiCaseInsensitive { + b := oppositeAsciiCase(b) + c.byteclassBuilder.setRange(b, b) + } + + next := c.nfa.state(prev).nextState(b) + + if next != failedStateID { + prev = next + } else { + next := c.addState(depth + 1) + c.nfa.state(prev).setNextState(b, next) + if c.builder.asciiCaseInsensitive { + b := oppositeAsciiCase(b) + c.nfa.state(prev).setNextState(b, next) + } + prev = next + } + } + c.nfa.state(prev).addMatch(pati, len(pat)) + + if c.builder.prefilter { + c.prefilter.add(pat) + } + } +} + +const asciiCaseMask byte = 0b0010_0000 + +func toAsciiLowercase(b byte) byte { + return b | (1 * asciiCaseMask) +} + +func toAsciiUpper(b byte) byte { + b &= ^(1 * asciiCaseMask) + return b +} + +func oppositeAsciiCase(b byte) byte { + if 'A' <= b && b <= 'Z' { + return toAsciiLowercase(b) + } else if 'a' <= b && b <= 'z' { + return toAsciiUpper(b) + } + return b +} + +func (c *compiler) addState(depth int) stateID { + if depth < c.builder.denseDepth { + return c.nfa.addDenseState(depth) + + } + return c.nfa.addSparseState(depth) +} + +func newCompiler(builder iNFABuilder) compiler { + p := newPrefilterBuilder(builder.asciiCaseInsensitive) + + return compiler{ + builder: builder, + prefilter: p, + nfa: iNFA{ + matchKind: builder.matchKind, + startID: 2, + maxPatternLen: 0, + patternCount: 0, + heapBytes: 0, + prefil: nil, + anchored: builder.anchored, + byteClasses: singletons(), + states: nil, + }, + byteclassBuilder: newByteClassBuilder(), + } +} + +type iNFABuilder struct { + denseDepth int + matchKind matchKind + prefilter bool + anchored bool + asciiCaseInsensitive bool +} + +func newNFABuilder(kind matchKind, asciiCaseInsensitive bool) *iNFABuilder { + return &iNFABuilder{ + denseDepth: 2, + matchKind: kind, + prefilter: true, + anchored: false, + asciiCaseInsensitive: asciiCaseInsensitive, + } +} + +func (b *iNFABuilder) build(patterns [][]byte) *iNFA { + c := newCompiler(*b) + return c.compile(patterns) +} + +type state struct { + trans transitions + fail stateID + matches []pattern + depth int +} + +func (s *state) heapBytes() int { + var i int + intSize := int(unsafe.Sizeof(i)) + return s.trans.heapBytes() + (len(s.matches) * (intSize * 2)) +} + +func (s *state) addMatch(patternID, patternLength int) { + s.matches = append(s.matches, pattern{ + PatternID: patternID, + PatternLength: patternLength, + }) +} + +func (s *state) isMatch() bool { + return len(s.matches) > 0 +} + +func (s *state) getLongestMatch() *int { + if len(s.matches) == 0 { + return nil + } + longest := s.matches[0].PatternLength + return &longest +} + +func (s *state) nextState(input byte) stateID { + return s.trans.nextState(input) +} + +func (s *state) setNextState(input byte, next stateID) { + s.trans.setNextState(input, next) +} + +type transitions struct { + sparse *sparse + dense *dense +} + +func sparseIter(trans []innerSparse, f func(*next)) { + var byte16 uint16 + + for _, tr := range trans { + for byte16 < uint16(tr.b) { + f(&next{ + key: byte(byte16), + id: failedStateID, + }) + byte16 += 1 + } + f(&next{ + key: tr.b, + id: tr.s, + }) + byte16 += 1 + } + + for b := byte16; b < 256; b++ { + f(&next{ + key: byte(b), + id: failedStateID, + }) + } +} + +func (t *transitions) iterAll(byteClasses *byteClasses, f func(tr *next)) { + if byteClasses.isSingleton() { + if t.sparse != nil { + sparseIter(t.sparse.inner, f) + } + + if t.dense != nil { + for b := 0; b < 256; b++ { + f(&next{ + key: byte(b), + id: t.dense.inner[b], + }) + } + } + } else { + if t.sparse != nil { + var lastClass *byte + + sparseIter(t.sparse.inner, func(n *next) { + class := byteClasses.bytes[n.key] + + if lastClass == nil || *lastClass != class { + cc := class + lastClass = &cc + f(n) + } + }) + } + + if t.dense != nil { + bcr := byteClassRepresentatives{ + classes: byteClasses, + bbyte: 0, + lastClass: nil, + } + + for n := bcr.next(); n != nil; n = bcr.next() { + f(&next{ + key: *n, + id: t.dense.inner[*n], + }) + } + } + } + +} + +func (t *transitions) heapBytes() int { + var i int + intSize := int(unsafe.Sizeof(i)) + if t.sparse != nil { + return len(t.sparse.inner) * (2 * intSize) + } + return len(t.dense.inner) * intSize +} + +func (t *transitions) nextState(input byte) stateID { + if t.sparse != nil { + for _, sp := range t.sparse.inner { + if sp.b == input { + return sp.s + } + } + return failedStateID + } + return t.dense.inner[input] +} + +func (t *transitions) setNextState(input byte, next stateID) { + if t.sparse != nil { + idx := sort.Search(len(t.sparse.inner), func(i int) bool { + return t.sparse.inner[i].b >= input + }) + + if idx < len(t.sparse.inner) && t.sparse.inner[idx].b == input { + t.sparse.inner[idx].s = next + } else { + if len(t.sparse.inner) > 0 { + is := innerSparse{ + b: input, + s: next, + } + if idx == len(t.sparse.inner) { + t.sparse.inner = append(t.sparse.inner, is) + } else { + t.sparse.inner = append( + t.sparse.inner[:idx+1], + t.sparse.inner[idx:]...) + t.sparse.inner[idx] = is + } + } else { + t.sparse.inner = []innerSparse{ + { + b: input, + s: next, + }, + } + } + } + return + } + t.dense.inner[int(input)] = next +} + +func newDense() dense { + return dense{inner: make([]stateID, 256)} +} + +type dense struct { + inner []stateID +} + +type innerSparse struct { + b byte + s stateID +} + +type sparse struct { + inner []innerSparse +} diff --git a/vendor/github.com/petar-dambovaliev/aho-corasick/prefilter.go b/vendor/github.com/petar-dambovaliev/aho-corasick/prefilter.go new file mode 100644 index 00000000..54fea322 --- /dev/null +++ b/vendor/github.com/petar-dambovaliev/aho-corasick/prefilter.go @@ -0,0 +1,601 @@ +package aho_corasick + +import ( + "math" +) + +type startBytesThree struct { + byte1 byte + byte2 byte + byte3 byte +} + +func (s startBytesThree) NextCandidate(_ *prefilterState, haystack []byte, at int) (interface{}, candidateType) { + for i, b := range haystack[at:] { + if s.byte1 == b || s.byte2 == b || s.byte3 == b { + return at + i, possibleStartOfMatchCandidate + } + } + return nil, noneCandidate +} + +func (s startBytesThree) HeapBytes() int { + return 0 +} + +func (s startBytesThree) ReportsFalsePositives() bool { + return true +} + +func (s startBytesThree) LooksForNonStartOfMatch() bool { + return false +} + +func (s *startBytesThree) clone() prefilter { + if s == nil { + return nil + } + u := *s + return &u +} + +type startBytesTwo struct { + byte1 byte + byte2 byte +} + +func (s startBytesTwo) NextCandidate(_ *prefilterState, haystack []byte, at int) (interface{}, candidateType) { + for i, b := range haystack[at:] { + if s.byte1 == b || s.byte2 == b { + return at + i, possibleStartOfMatchCandidate + } + } + return nil, noneCandidate +} + +func (s startBytesTwo) HeapBytes() int { + return 0 +} + +func (s startBytesTwo) ReportsFalsePositives() bool { + return true +} + +func (s startBytesTwo) LooksForNonStartOfMatch() bool { + return false +} + +func (s *startBytesTwo) clone() prefilter { + if s == nil { + return nil + } + u := *s + return &u +} + +type startBytesOne struct { + byte1 byte +} + +func (s startBytesOne) NextCandidate(_ *prefilterState, haystack []byte, at int) (interface{}, candidateType) { + for i, b := range haystack[at:] { + if s.byte1 == b { + return at + i, possibleStartOfMatchCandidate + } + } + return nil, noneCandidate +} + +func (s startBytesOne) HeapBytes() int { + return 0 +} + +func (s startBytesOne) ReportsFalsePositives() bool { + return true +} + +func (s startBytesOne) LooksForNonStartOfMatch() bool { + return false +} + +func (s *startBytesOne) clone() prefilter { + if s == nil { + return nil + } + u := *s + return &u +} + +type byteSet [256]bool + +func (b *byteSet) contains(bb byte) bool { + return b[int(bb)] +} + +func (b *byteSet) insert(bb byte) bool { + n := !b.contains(bb) + b[int(bb)] = true + return n +} + +type rareByteOffset struct { + max byte +} + +type rareByteOffsets struct { + rbo [256]rareByteOffset +} + +func (r *rareByteOffsets) set(b byte, off rareByteOffset) { + m := byte(max(int(r.rbo[int(b)].max), int(off.max))) + r.rbo[int(b)].max = m +} + +type prefilterBuilder struct { + count int + asciiCaseInsensitive bool + startBytes startBytesBuilder + rareBytes rareBytesBuilder +} + +func (p *prefilterBuilder) build() prefilter { + startBytes := p.startBytes.build() + rareBytes := p.rareBytes.build() + + switch true { + case startBytes != nil && rareBytes != nil: + hasFewerBytes := p.startBytes.count < p.rareBytes.count + + hasRarerBytes := p.startBytes.rankSum <= p.rareBytes.rankSum+50 + if hasFewerBytes || hasRarerBytes { + return startBytes + } else { + return rareBytes + } + case startBytes != nil: + return startBytes + case rareBytes != nil: + return rareBytes + case p.asciiCaseInsensitive: + return nil + default: + return nil + } +} + +func (p *prefilterBuilder) add(bytes []byte) { + p.count += 1 + p.startBytes.add(bytes) + p.rareBytes.add(bytes) +} + +func newPrefilterBuilder(asciiCaseInsensitive bool) prefilterBuilder { + return prefilterBuilder{ + count: 0, + asciiCaseInsensitive: asciiCaseInsensitive, + startBytes: newStartBytesBuilder(asciiCaseInsensitive), + rareBytes: newRareBytesBuilder(asciiCaseInsensitive), + } +} + +type rareBytesBuilder struct { + asciiCaseInsensitive bool + rareSet byteSet + byteOffsets rareByteOffsets + available bool + count int + rankSum uint16 +} + +type rareBytesOne struct { + byte1 byte + offset rareByteOffset +} + +func (r rareBytesOne) NextCandidate(state *prefilterState, haystack []byte, at int) (interface{}, candidateType) { + for i, b := range haystack[at:] { + if r.byte1 == b { + pos := at + i + state.lastScanAt = pos + r := pos - int(r.offset.max) + if r < 0 { + r = 0 + } + + if at > r { + r = at + } + return r, possibleStartOfMatchCandidate + } + } + return nil, noneCandidate +} + +func (r rareBytesOne) HeapBytes() int { + return 0 +} + +func (r rareBytesOne) ReportsFalsePositives() bool { + return true +} + +func (r rareBytesOne) LooksForNonStartOfMatch() bool { + return true +} + +func (r *rareBytesOne) clone() prefilter { + if r == nil { + return nil + } + u := *r + return &u +} + +type rareBytesTwo struct { + offsets rareByteOffsets + byte1 byte + byte2 byte +} + +func (r rareBytesTwo) NextCandidate(state *prefilterState, haystack []byte, at int) (interface{}, candidateType) { + for i, b := range haystack[at:] { + if r.byte1 == b || r.byte2 == b { + pos := at + i + state.updateAt(pos) + r := pos - int(r.offsets.rbo[haystack[pos]].max) + if r < 0 { + r = 0 + } + + if at > r { + r = at + } + return r, possibleStartOfMatchCandidate + } + } + return nil, noneCandidate +} + +func (r rareBytesTwo) HeapBytes() int { + return 0 +} + +func (r rareBytesTwo) ReportsFalsePositives() bool { + return true +} + +func (r rareBytesTwo) LooksForNonStartOfMatch() bool { + return true +} + +func (r *rareBytesTwo) clone() prefilter { + if r == nil { + return nil + } + u := *r + return &u +} + +type rareBytesThree struct { + offsets rareByteOffsets + byte1 byte + byte2 byte + byte3 byte +} + +func (r rareBytesThree) NextCandidate(state *prefilterState, haystack []byte, at int) (interface{}, candidateType) { + for i, b := range haystack[at:] { + if r.byte1 == b || r.byte2 == b || r.byte3 == b { + pos := at + i + state.updateAt(pos) + r := pos - int(r.offsets.rbo[haystack[pos]].max) + if r < 0 { + r = 0 + } + + if at > r { + r = at + } + return r, possibleStartOfMatchCandidate + } + } + return nil, noneCandidate +} + +func (r rareBytesThree) HeapBytes() int { + return 0 +} + +func (r rareBytesThree) ReportsFalsePositives() bool { + return true +} + +func (r rareBytesThree) LooksForNonStartOfMatch() bool { + return true +} + +func (r *rareBytesThree) clone() prefilter { + if r == nil { + return nil + } + u := *r + return &u +} + +func (r *rareBytesBuilder) build() prefilter { + if !r.available || r.count > 3 { + return nil + } + var length int + bytes := [3]byte{} + + for b := 0; b <= 255; b++ { + if r.rareSet.contains(byte(b)) { + bytes[length] = byte(b) + length += 1 + } + } + + switch length { + case 0: + return nil + case 1: + return &rareBytesOne{ + byte1: bytes[0], + offset: r.byteOffsets.rbo[bytes[0]], + } + case 2: + return &rareBytesTwo{ + offsets: r.byteOffsets, + byte1: bytes[0], + byte2: bytes[1], + } + case 3: + return &rareBytesThree{ + offsets: r.byteOffsets, + byte1: bytes[0], + byte2: bytes[1], + byte3: bytes[2], + } + default: + return nil + } +} + +func (r *rareBytesBuilder) add(bytes []byte) { + if !r.available { + return + } + + if r.count > 3 { + r.available = false + return + } + + if len(bytes) >= 256 { + r.available = false + return + } + + if len(bytes) == 0 { + return + } + + rarest1, rarest2 := bytes[0], freqRank(bytes[0]) + found := false + + for pos, b := range bytes { + r.setOffset(pos, b) + if found { + continue + } + if r.rareSet.contains(b) { + found = true + } + rank := freqRank(b) + if rank < rarest2 { + rarest1 = b + rarest2 = rank + } + + if !found { + r.addRareByte(rarest1) + } + } +} + +func (r *rareBytesBuilder) addRareByte(b byte) { + r.addOneRareByte(b) + if r.asciiCaseInsensitive { + r.addOneRareByte(oppositeAsciiCase(b)) + } +} + +func (r *rareBytesBuilder) addOneRareByte(b byte) { + if r.rareSet.insert(b) { + r.count += 1 + r.rankSum += uint16(freqRank(b)) + } +} + +func newRareByteOffset(i int) rareByteOffset { + if i > math.MaxUint8 { + return rareByteOffset{max: 0} + } + b := byte(i) + return rareByteOffset{max: b} +} + +func (r *rareBytesBuilder) setOffset(pos int, b byte) { + offset := newRareByteOffset(pos) + r.byteOffsets.set(b, offset) + + if r.asciiCaseInsensitive { + r.byteOffsets.set(oppositeAsciiCase(b), offset) + } +} + +func newRareBytesBuilder(asciiCaseInsensitive bool) rareBytesBuilder { + return rareBytesBuilder{ + asciiCaseInsensitive: asciiCaseInsensitive, + rareSet: byteSet{}, + byteOffsets: rareByteOffsets{}, + available: true, + count: 0, + rankSum: 0, + } +} + +type startBytesBuilder struct { + asciiCaseInsensitive bool + byteset []bool + count int + rankSum uint16 +} + +func (s *startBytesBuilder) build() prefilter { + if s.count > 3 { + return nil + } + var length int + bytes := [3]byte{} + + for b := 0; b < 256; b++ { + //todo case insensitive is not set in byteset + if !s.byteset[b] { + continue + } + if b > 0x7F { + return nil + } + bytes[length] = byte(b) + length += 1 + } + + switch length { + case 0: + return nil + case 1: + return &startBytesOne{byte1: bytes[0]} + case 2: + return &startBytesTwo{ + byte1: bytes[0], + byte2: bytes[1], + } + case 3: + return &startBytesThree{ + byte1: bytes[0], + byte2: bytes[1], + byte3: bytes[2], + } + default: + return nil + } +} + +func (s *startBytesBuilder) add(bytes []byte) { + if s.count > 3 || len(bytes) == 0 { + return + } + + b := bytes[0] + + s.addOneByte(b) + if s.asciiCaseInsensitive { + s.addOneByte(oppositeAsciiCase(b)) + } +} + +func (s *startBytesBuilder) addOneByte(b byte) { + if !s.byteset[int(b)] { + s.byteset[int(b)] = true + s.count += 1 + s.rankSum += uint16(freqRank(b)) + } +} + +func freqRank(b byte) byte { + return byteFrequencies[int(b)] +} + +func newStartBytesBuilder(asciiCaseInsensitive bool) startBytesBuilder { + return startBytesBuilder{ + asciiCaseInsensitive: asciiCaseInsensitive, + byteset: make([]bool, 256), + count: 0, + rankSum: 0, + } +} + +const minSkips int = 40 +const minAvgFactor int = 2 + +type prefilterState struct { + skips int + skipped int + maxMatchLen int + inert bool + lastScanAt int +} + +func (p *prefilterState) updateAt(at int) { + if at > p.lastScanAt { + p.lastScanAt = at + } +} + +func (p *prefilterState) IsEffective(at int) bool { + if p.inert || at < p.lastScanAt { + return false + } + + if p.skips < minSkips { + return true + } + + minAvg := minAvgFactor * p.maxMatchLen + + if p.skipped >= minAvg*p.skips { + return true + } + + p.inert = true + return false +} + +func (p *prefilterState) updateSkippedBytes(skipped int) { + p.skips += 1 + p.skipped += skipped +} + +type candidateType uint + +const ( + noneCandidate candidateType = iota + matchCandidate + possibleStartOfMatchCandidate +) + +type prefilter interface { + NextCandidate(state *prefilterState, haystack []byte, at int) (interface{}, candidateType) + HeapBytes() int + ReportsFalsePositives() bool + LooksForNonStartOfMatch() bool + clone() prefilter +} + +func nextPrefilter(state *prefilterState, prefilter prefilter, haystack []byte, at int) (interface{}, candidateType) { + cand, ttype := prefilter.NextCandidate(state, haystack, at) + + switch ttype { + case noneCandidate: + state.updateSkippedBytes(len(haystack) - at) + case matchCandidate: + m := cand.(*Match) + state.updateSkippedBytes(m.Start() - at) + case possibleStartOfMatchCandidate: + i := cand.(int) + state.updateSkippedBytes(i - at) + } + return cand, ttype +} diff --git a/vendor/github.com/pjbgf/sha1cd/Dockerfile.arm b/vendor/github.com/pjbgf/sha1cd/Dockerfile.arm new file mode 100644 index 00000000..99761296 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/Dockerfile.arm @@ -0,0 +1,23 @@ +FROM golang:1.20@sha256:2edf6aab2d57644f3fe7407132a0d1770846867465a39c2083770cf62734b05d + +ENV GOOS=linux +ENV GOARCH=arm +ENV CGO_ENABLED=1 +ENV CC=arm-linux-gnueabihf-gcc +ENV PATH="/go/bin/${GOOS}_${GOARCH}:${PATH}" +ENV PKG_CONFIG_PATH=/usr/lib/arm-linux-gnueabihf/pkgconfig + +RUN dpkg --add-architecture armhf \ + && apt update \ + && apt install -y --no-install-recommends \ + upx \ + gcc-arm-linux-gnueabihf \ + libc6-dev-armhf-cross \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* + +COPY . /src/workdir + +WORKDIR /src/workdir + +RUN go build ./... diff --git a/vendor/github.com/pjbgf/sha1cd/Dockerfile.arm64 b/vendor/github.com/pjbgf/sha1cd/Dockerfile.arm64 new file mode 100644 index 00000000..66bd0947 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/Dockerfile.arm64 @@ -0,0 +1,23 @@ +FROM golang:1.20@sha256:2edf6aab2d57644f3fe7407132a0d1770846867465a39c2083770cf62734b05d + +ENV GOOS=linux +ENV GOARCH=arm64 +ENV CGO_ENABLED=1 +ENV CC=aarch64-linux-gnu-gcc +ENV PATH="/go/bin/${GOOS}_${GOARCH}:${PATH}" +ENV PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig + +# install build & runtime dependencies +RUN dpkg --add-architecture arm64 \ + && apt update \ + && apt install -y --no-install-recommends \ + gcc-aarch64-linux-gnu \ + libc6-dev-arm64-cross \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* + +COPY . /src/workdir + +WORKDIR /src/workdir + +RUN go build ./... diff --git a/vendor/github.com/pjbgf/sha1cd/LICENSE b/vendor/github.com/pjbgf/sha1cd/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/pjbgf/sha1cd/Makefile b/vendor/github.com/pjbgf/sha1cd/Makefile new file mode 100644 index 00000000..b24f2cba --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/Makefile @@ -0,0 +1,40 @@ +FUZZ_TIME ?= 1m + +export CGO_ENABLED := 1 + +.PHONY: test +test: + go test ./... + +.PHONY: bench +bench: + go test -benchmem -run=^$$ -bench ^Benchmark ./... + +.PHONY: fuzz +fuzz: + go test -tags gofuzz -fuzz=. -fuzztime=$(FUZZ_TIME) ./test/ + +# Cross build project in arm/v7. +build-arm: + docker build -t sha1cd-arm -f Dockerfile.arm . + docker run --rm sha1cd-arm + +# Cross build project in arm64. +build-arm64: + docker build -t sha1cd-arm64 -f Dockerfile.arm64 . + docker run --rm sha1cd-arm64 + +# Build with cgo disabled. +build-nocgo: + CGO_ENABLED=0 go build ./cgo + +# Run cross-compilation to assure supported architectures. +cross-build: build-arm build-arm64 build-nocgo + +generate: + go run sha1cdblock_amd64_asm.go -out sha1cdblock_amd64.s + sed -i 's;&\samd64;&\n// +build !noasm,gc,amd64;g' sha1cdblock_amd64.s + +verify: generate + git diff --exit-code + go vet ./... diff --git a/vendor/github.com/pjbgf/sha1cd/README.md b/vendor/github.com/pjbgf/sha1cd/README.md new file mode 100644 index 00000000..378cf78c --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/README.md @@ -0,0 +1,58 @@ +# sha1cd + +A Go implementation of SHA1 with counter-cryptanalysis, which detects +collision attacks. + +The `cgo/lib` code is a carbon copy of the [original code], based on +the award winning [white paper] by Marc Stevens. + +The Go implementation is largely based off Go's generic sha1. +At present no SIMD optimisations have been implemented. + +## Usage + +`sha1cd` can be used as a drop-in replacement for `crypto/sha1`: + +```golang +import "github.com/pjbgf/sha1cd" + +func test(){ + data := []byte("data to be sha1 hashed") + h := sha1cd.Sum(data) + fmt.Printf("hash: %q\n", hex.EncodeToString(h)) +} +``` + +To obtain information as to whether a collision was found, use the +func `CollisionResistantSum`. + +```golang +import "github.com/pjbgf/sha1cd" + +func test(){ + data := []byte("data to be sha1 hashed") + h, col := sha1cd.CollisionResistantSum(data) + if col { + fmt.Println("collision found!") + } + fmt.Printf("hash: %q", hex.EncodeToString(h)) +} +``` + +Note that the algorithm will automatically avoid collision, by +extending the SHA1 to 240-steps, instead of 80 when a collision +attempt is detected. Therefore, inputs that contains the unavoidable +bit conditions will yield a different hash from `sha1cd`, when compared +with results using `crypto/sha1`. Valid inputs will have matching the outputs. + +## References +- https://shattered.io/ +- https://github.com/cr-marcstevens/sha1collisiondetection +- https://csrc.nist.gov/Projects/Cryptographic-Algorithm-Validation-Program/Secure-Hashing#shavs + +## Use of the Original Implementation +- https://github.com/git/git/commit/28dc98e343ca4eb370a29ceec4c19beac9b5c01e +- https://github.com/libgit2/libgit2/pull/4136 + +[original code]: https://github.com/cr-marcstevens/sha1collisiondetection +[white paper]: https://marc-stevens.nl/research/papers/C13-S.pdf diff --git a/vendor/github.com/pjbgf/sha1cd/detection.go b/vendor/github.com/pjbgf/sha1cd/detection.go new file mode 100644 index 00000000..a1458748 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/detection.go @@ -0,0 +1,11 @@ +package sha1cd + +import "hash" + +type CollisionResistantHash interface { + // CollisionResistantSum extends on Sum by returning an additional boolean + // which indicates whether a collision was found during the hashing process. + CollisionResistantSum(b []byte) ([]byte, bool) + + hash.Hash +} diff --git a/vendor/github.com/pjbgf/sha1cd/internal/const.go b/vendor/github.com/pjbgf/sha1cd/internal/const.go new file mode 100644 index 00000000..944a131d --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/internal/const.go @@ -0,0 +1,42 @@ +package shared + +const ( + // Constants for the SHA-1 hash function. + K0 = 0x5A827999 + K1 = 0x6ED9EBA1 + K2 = 0x8F1BBCDC + K3 = 0xCA62C1D6 + + // Initial values for the buffer variables: h0, h1, h2, h3, h4. + Init0 = 0x67452301 + Init1 = 0xEFCDAB89 + Init2 = 0x98BADCFE + Init3 = 0x10325476 + Init4 = 0xC3D2E1F0 + + // Initial values for the temporary variables (ihvtmp0, ihvtmp1, ihvtmp2, ihvtmp3, ihvtmp4) during the SHA recompression step. + InitTmp0 = 0xD5 + InitTmp1 = 0x394 + InitTmp2 = 0x8152A8 + InitTmp3 = 0x0 + InitTmp4 = 0xA7ECE0 + + // SHA1 contains 2 buffers, each based off 5 32-bit words. + WordBuffers = 5 + + // The output of SHA1 is 20 bytes (160 bits). + Size = 20 + + // Rounds represents the number of steps required to process each chunk. + Rounds = 80 + + // SHA1 processes the input data in chunks. Each chunk contains 64 bytes. + Chunk = 64 + + // The number of pre-step compression state to store. + // Currently there are 3 pre-step compression states required: 0, 58, 65. + PreStepState = 3 + + Magic = "shacd\x01" + MarshaledSize = len(Magic) + 5*4 + Chunk + 8 +) diff --git a/vendor/github.com/pjbgf/sha1cd/sha1cd.go b/vendor/github.com/pjbgf/sha1cd/sha1cd.go new file mode 100644 index 00000000..a69e480e --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/sha1cd.go @@ -0,0 +1,227 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sha1cd implements collision detection based on the whitepaper +// Counter-cryptanalysis from Marc Stevens. The original ubc implementation +// was done by Marc Stevens and Dan Shumow, and can be found at: +// https://github.com/cr-marcstevens/sha1collisiondetection +package sha1cd + +// This SHA1 implementation is based on Go's generic SHA1. +// Original: https://github.com/golang/go/blob/master/src/crypto/sha1/sha1.go + +import ( + "crypto" + "encoding/binary" + "errors" + "hash" + + shared "github.com/pjbgf/sha1cd/internal" +) + +func init() { + crypto.RegisterHash(crypto.SHA1, New) +} + +// The size of a SHA-1 checksum in bytes. +const Size = shared.Size + +// The blocksize of SHA-1 in bytes. +const BlockSize = shared.Chunk + +// digest represents the partial evaluation of a checksum. +type digest struct { + h [shared.WordBuffers]uint32 + x [shared.Chunk]byte + nx int + len uint64 + + // col defines whether a collision has been found. + col bool + blockFunc func(dig *digest, p []byte) +} + +func (d *digest) MarshalBinary() ([]byte, error) { + b := make([]byte, 0, shared.MarshaledSize) + b = append(b, shared.Magic...) + b = appendUint32(b, d.h[0]) + b = appendUint32(b, d.h[1]) + b = appendUint32(b, d.h[2]) + b = appendUint32(b, d.h[3]) + b = appendUint32(b, d.h[4]) + b = append(b, d.x[:d.nx]...) + b = b[:len(b)+len(d.x)-d.nx] // already zero + b = appendUint64(b, d.len) + return b, nil +} + +func appendUint32(b []byte, v uint32) []byte { + return append(b, + byte(v>>24), + byte(v>>16), + byte(v>>8), + byte(v), + ) +} + +func appendUint64(b []byte, v uint64) []byte { + return append(b, + byte(v>>56), + byte(v>>48), + byte(v>>40), + byte(v>>32), + byte(v>>24), + byte(v>>16), + byte(v>>8), + byte(v), + ) +} + +func (d *digest) UnmarshalBinary(b []byte) error { + if len(b) < len(shared.Magic) || string(b[:len(shared.Magic)]) != shared.Magic { + return errors.New("crypto/sha1: invalid hash state identifier") + } + if len(b) != shared.MarshaledSize { + return errors.New("crypto/sha1: invalid hash state size") + } + b = b[len(shared.Magic):] + b, d.h[0] = consumeUint32(b) + b, d.h[1] = consumeUint32(b) + b, d.h[2] = consumeUint32(b) + b, d.h[3] = consumeUint32(b) + b, d.h[4] = consumeUint32(b) + b = b[copy(d.x[:], b):] + b, d.len = consumeUint64(b) + d.nx = int(d.len % shared.Chunk) + return nil +} + +func consumeUint64(b []byte) ([]byte, uint64) { + _ = b[7] + x := uint64(b[7]) | uint64(b[6])<<8 | uint64(b[shared.WordBuffers])<<16 | uint64(b[4])<<24 | + uint64(b[3])<<32 | uint64(b[2])<<40 | uint64(b[1])<<48 | uint64(b[0])<<56 + return b[8:], x +} + +func consumeUint32(b []byte) ([]byte, uint32) { + _ = b[3] + x := uint32(b[3]) | uint32(b[2])<<8 | uint32(b[1])<<16 | uint32(b[0])<<24 + return b[4:], x +} + +func (d *digest) Reset() { + d.h[0] = shared.Init0 + d.h[1] = shared.Init1 + d.h[2] = shared.Init2 + d.h[3] = shared.Init3 + d.h[4] = shared.Init4 + d.nx = 0 + d.len = 0 + + d.col = false +} + +// New returns a new hash.Hash computing the SHA1 checksum. The Hash also +// implements encoding.BinaryMarshaler and encoding.BinaryUnmarshaler to +// marshal and unmarshal the internal state of the hash. +func New() hash.Hash { + d := new(digest) + + d.blockFunc = block + d.Reset() + return d +} + +// NewGeneric is equivalent to New but uses the Go generic implementation, +// avoiding any processor-specific optimizations. +func NewGeneric() hash.Hash { + d := new(digest) + + d.blockFunc = blockGeneric + d.Reset() + return d +} + +func (d *digest) Size() int { return Size } + +func (d *digest) BlockSize() int { return BlockSize } + +func (d *digest) Write(p []byte) (nn int, err error) { + if len(p) == 0 { + return + } + + nn = len(p) + d.len += uint64(nn) + if d.nx > 0 { + n := copy(d.x[d.nx:], p) + d.nx += n + if d.nx == shared.Chunk { + d.blockFunc(d, d.x[:]) + d.nx = 0 + } + p = p[n:] + } + if len(p) >= shared.Chunk { + n := len(p) &^ (shared.Chunk - 1) + d.blockFunc(d, p[:n]) + p = p[n:] + } + if len(p) > 0 { + d.nx = copy(d.x[:], p) + } + return +} + +func (d *digest) Sum(in []byte) []byte { + // Make a copy of d so that caller can keep writing and summing. + d0 := *d + hash := d0.checkSum() + return append(in, hash[:]...) +} + +func (d *digest) checkSum() [Size]byte { + len := d.len + // Padding. Add a 1 bit and 0 bits until 56 bytes mod 64. + var tmp [64]byte + tmp[0] = 0x80 + if len%64 < 56 { + d.Write(tmp[0 : 56-len%64]) + } else { + d.Write(tmp[0 : 64+56-len%64]) + } + + // Length in bits. + len <<= 3 + binary.BigEndian.PutUint64(tmp[:], len) + d.Write(tmp[0:8]) + + if d.nx != 0 { + panic("d.nx != 0") + } + + var digest [Size]byte + + binary.BigEndian.PutUint32(digest[0:], d.h[0]) + binary.BigEndian.PutUint32(digest[4:], d.h[1]) + binary.BigEndian.PutUint32(digest[8:], d.h[2]) + binary.BigEndian.PutUint32(digest[12:], d.h[3]) + binary.BigEndian.PutUint32(digest[16:], d.h[4]) + + return digest +} + +// Sum returns the SHA-1 checksum of the data. +func Sum(data []byte) ([Size]byte, bool) { + d := New().(*digest) + d.Write(data) + return d.checkSum(), d.col +} + +func (d *digest) CollisionResistantSum(in []byte) ([]byte, bool) { + // Make a copy of d so that caller can keep writing and summing. + d0 := *d + hash := d0.checkSum() + return append(in, hash[:]...), d0.col +} diff --git a/vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.go b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.go new file mode 100644 index 00000000..95e08308 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.go @@ -0,0 +1,50 @@ +//go:build !noasm && gc && amd64 +// +build !noasm,gc,amd64 + +package sha1cd + +import ( + "math" + "unsafe" + + shared "github.com/pjbgf/sha1cd/internal" +) + +type sliceHeader struct { + base uintptr + len int + cap int +} + +// blockAMD64 hashes the message p into the current state in dig. +// Both m1 and cs are used to store intermediate results which are used by the collision detection logic. +// +//go:noescape +func blockAMD64(dig *digest, p sliceHeader, m1 []uint32, cs [][5]uint32) + +func block(dig *digest, p []byte) { + m1 := [shared.Rounds]uint32{} + cs := [shared.PreStepState][shared.WordBuffers]uint32{} + + for len(p) >= shared.Chunk { + // Only send a block to be processed, as the collission detection + // works on a block by block basis. + ips := sliceHeader{ + base: uintptr(unsafe.Pointer(&p[0])), + len: int(math.Min(float64(len(p)), float64(shared.Chunk))), + cap: shared.Chunk, + } + + blockAMD64(dig, ips, m1[:], cs[:]) + + col := checkCollision(m1, cs, dig.h) + if col { + dig.col = true + + blockAMD64(dig, ips, m1[:], cs[:]) + blockAMD64(dig, ips, m1[:], cs[:]) + } + + p = p[shared.Chunk:] + } +} diff --git a/vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.s b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.s new file mode 100644 index 00000000..86f9821c --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_amd64.s @@ -0,0 +1,2274 @@ +// Code generated by command: go run sha1cdblock_amd64_asm.go -out sha1cdblock_amd64.s. DO NOT EDIT. + +//go:build !noasm && gc && amd64 +// +build !noasm,gc,amd64 + +#include "textflag.h" + +// func blockAMD64(dig *digest, p []byte, m1 []uint32, cs [][5]uint32) +TEXT ·blockAMD64(SB), NOSPLIT, $64-80 + MOVQ dig+0(FP), R8 + MOVQ p_base+8(FP), DI + MOVQ p_len+16(FP), DX + SHRQ $+6, DX + SHLQ $+6, DX + LEAQ (DI)(DX*1), SI + + // Load h0, h1, h2, h3, h4. + MOVL (R8), AX + MOVL 4(R8), BX + MOVL 8(R8), CX + MOVL 12(R8), DX + MOVL 16(R8), BP + + // len(p) >= chunk + CMPQ DI, SI + JEQ end + +loop: + // Initialize registers a, b, c, d, e. + MOVL AX, R10 + MOVL BX, R11 + MOVL CX, R12 + MOVL DX, R13 + MOVL BP, R14 + + // ROUND1 (steps 0-15) + // Load cs + MOVQ cs_base+56(FP), R8 + MOVL R10, (R8) + MOVL R11, 4(R8) + MOVL R12, 8(R8) + MOVL R13, 12(R8) + MOVL R14, 16(R8) + + // ROUND1(0) + // LOAD + MOVL (DI), R9 + BSWAPL R9 + MOVL R9, (SP) + + // FUNC1 + MOVL R13, R15 + XORL R12, R15 + ANDL R11, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1518500249(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL (SP), R9 + MOVL R9, (R8) + + // ROUND1(1) + // LOAD + MOVL 4(DI), R9 + BSWAPL R9 + MOVL R9, 4(SP) + + // FUNC1 + MOVL R12, R15 + XORL R11, R15 + ANDL R10, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1518500249(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 4(SP), R9 + MOVL R9, 4(R8) + + // ROUND1(2) + // LOAD + MOVL 8(DI), R9 + BSWAPL R9 + MOVL R9, 8(SP) + + // FUNC1 + MOVL R11, R15 + XORL R10, R15 + ANDL R14, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1518500249(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 8(SP), R9 + MOVL R9, 8(R8) + + // ROUND1(3) + // LOAD + MOVL 12(DI), R9 + BSWAPL R9 + MOVL R9, 12(SP) + + // FUNC1 + MOVL R10, R15 + XORL R14, R15 + ANDL R13, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1518500249(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 12(SP), R9 + MOVL R9, 12(R8) + + // ROUND1(4) + // LOAD + MOVL 16(DI), R9 + BSWAPL R9 + MOVL R9, 16(SP) + + // FUNC1 + MOVL R14, R15 + XORL R13, R15 + ANDL R12, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1518500249(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 16(SP), R9 + MOVL R9, 16(R8) + + // ROUND1(5) + // LOAD + MOVL 20(DI), R9 + BSWAPL R9 + MOVL R9, 20(SP) + + // FUNC1 + MOVL R13, R15 + XORL R12, R15 + ANDL R11, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1518500249(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 20(SP), R9 + MOVL R9, 20(R8) + + // ROUND1(6) + // LOAD + MOVL 24(DI), R9 + BSWAPL R9 + MOVL R9, 24(SP) + + // FUNC1 + MOVL R12, R15 + XORL R11, R15 + ANDL R10, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1518500249(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 24(SP), R9 + MOVL R9, 24(R8) + + // ROUND1(7) + // LOAD + MOVL 28(DI), R9 + BSWAPL R9 + MOVL R9, 28(SP) + + // FUNC1 + MOVL R11, R15 + XORL R10, R15 + ANDL R14, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1518500249(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 28(SP), R9 + MOVL R9, 28(R8) + + // ROUND1(8) + // LOAD + MOVL 32(DI), R9 + BSWAPL R9 + MOVL R9, 32(SP) + + // FUNC1 + MOVL R10, R15 + XORL R14, R15 + ANDL R13, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1518500249(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 32(SP), R9 + MOVL R9, 32(R8) + + // ROUND1(9) + // LOAD + MOVL 36(DI), R9 + BSWAPL R9 + MOVL R9, 36(SP) + + // FUNC1 + MOVL R14, R15 + XORL R13, R15 + ANDL R12, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1518500249(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 36(SP), R9 + MOVL R9, 36(R8) + + // ROUND1(10) + // LOAD + MOVL 40(DI), R9 + BSWAPL R9 + MOVL R9, 40(SP) + + // FUNC1 + MOVL R13, R15 + XORL R12, R15 + ANDL R11, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1518500249(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 40(SP), R9 + MOVL R9, 40(R8) + + // ROUND1(11) + // LOAD + MOVL 44(DI), R9 + BSWAPL R9 + MOVL R9, 44(SP) + + // FUNC1 + MOVL R12, R15 + XORL R11, R15 + ANDL R10, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1518500249(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 44(SP), R9 + MOVL R9, 44(R8) + + // ROUND1(12) + // LOAD + MOVL 48(DI), R9 + BSWAPL R9 + MOVL R9, 48(SP) + + // FUNC1 + MOVL R11, R15 + XORL R10, R15 + ANDL R14, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1518500249(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 48(SP), R9 + MOVL R9, 48(R8) + + // ROUND1(13) + // LOAD + MOVL 52(DI), R9 + BSWAPL R9 + MOVL R9, 52(SP) + + // FUNC1 + MOVL R10, R15 + XORL R14, R15 + ANDL R13, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1518500249(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 52(SP), R9 + MOVL R9, 52(R8) + + // ROUND1(14) + // LOAD + MOVL 56(DI), R9 + BSWAPL R9 + MOVL R9, 56(SP) + + // FUNC1 + MOVL R14, R15 + XORL R13, R15 + ANDL R12, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1518500249(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 56(SP), R9 + MOVL R9, 56(R8) + + // ROUND1(15) + // LOAD + MOVL 60(DI), R9 + BSWAPL R9 + MOVL R9, 60(SP) + + // FUNC1 + MOVL R13, R15 + XORL R12, R15 + ANDL R11, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1518500249(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 60(SP), R9 + MOVL R9, 60(R8) + + // ROUND1x (steps 16-19) - same as ROUND1 but with no data load. + // ROUND1x(16) + // SHUFFLE + MOVL (SP), R9 + XORL 52(SP), R9 + XORL 32(SP), R9 + XORL 8(SP), R9 + ROLL $+1, R9 + MOVL R9, (SP) + + // FUNC1 + MOVL R12, R15 + XORL R11, R15 + ANDL R10, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1518500249(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL (SP), R9 + MOVL R9, 64(R8) + + // ROUND1x(17) + // SHUFFLE + MOVL 4(SP), R9 + XORL 56(SP), R9 + XORL 36(SP), R9 + XORL 12(SP), R9 + ROLL $+1, R9 + MOVL R9, 4(SP) + + // FUNC1 + MOVL R11, R15 + XORL R10, R15 + ANDL R14, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1518500249(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 4(SP), R9 + MOVL R9, 68(R8) + + // ROUND1x(18) + // SHUFFLE + MOVL 8(SP), R9 + XORL 60(SP), R9 + XORL 40(SP), R9 + XORL 16(SP), R9 + ROLL $+1, R9 + MOVL R9, 8(SP) + + // FUNC1 + MOVL R10, R15 + XORL R14, R15 + ANDL R13, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1518500249(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 8(SP), R9 + MOVL R9, 72(R8) + + // ROUND1x(19) + // SHUFFLE + MOVL 12(SP), R9 + XORL (SP), R9 + XORL 44(SP), R9 + XORL 20(SP), R9 + ROLL $+1, R9 + MOVL R9, 12(SP) + + // FUNC1 + MOVL R14, R15 + XORL R13, R15 + ANDL R12, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1518500249(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 12(SP), R9 + MOVL R9, 76(R8) + + // ROUND2 (steps 20-39) + // ROUND2(20) + // SHUFFLE + MOVL 16(SP), R9 + XORL 4(SP), R9 + XORL 48(SP), R9 + XORL 24(SP), R9 + ROLL $+1, R9 + MOVL R9, 16(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1859775393(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 16(SP), R9 + MOVL R9, 80(R8) + + // ROUND2(21) + // SHUFFLE + MOVL 20(SP), R9 + XORL 8(SP), R9 + XORL 52(SP), R9 + XORL 28(SP), R9 + ROLL $+1, R9 + MOVL R9, 20(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1859775393(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 20(SP), R9 + MOVL R9, 84(R8) + + // ROUND2(22) + // SHUFFLE + MOVL 24(SP), R9 + XORL 12(SP), R9 + XORL 56(SP), R9 + XORL 32(SP), R9 + ROLL $+1, R9 + MOVL R9, 24(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1859775393(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 24(SP), R9 + MOVL R9, 88(R8) + + // ROUND2(23) + // SHUFFLE + MOVL 28(SP), R9 + XORL 16(SP), R9 + XORL 60(SP), R9 + XORL 36(SP), R9 + ROLL $+1, R9 + MOVL R9, 28(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1859775393(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 28(SP), R9 + MOVL R9, 92(R8) + + // ROUND2(24) + // SHUFFLE + MOVL 32(SP), R9 + XORL 20(SP), R9 + XORL (SP), R9 + XORL 40(SP), R9 + ROLL $+1, R9 + MOVL R9, 32(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1859775393(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 32(SP), R9 + MOVL R9, 96(R8) + + // ROUND2(25) + // SHUFFLE + MOVL 36(SP), R9 + XORL 24(SP), R9 + XORL 4(SP), R9 + XORL 44(SP), R9 + ROLL $+1, R9 + MOVL R9, 36(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1859775393(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 36(SP), R9 + MOVL R9, 100(R8) + + // ROUND2(26) + // SHUFFLE + MOVL 40(SP), R9 + XORL 28(SP), R9 + XORL 8(SP), R9 + XORL 48(SP), R9 + ROLL $+1, R9 + MOVL R9, 40(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1859775393(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 40(SP), R9 + MOVL R9, 104(R8) + + // ROUND2(27) + // SHUFFLE + MOVL 44(SP), R9 + XORL 32(SP), R9 + XORL 12(SP), R9 + XORL 52(SP), R9 + ROLL $+1, R9 + MOVL R9, 44(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1859775393(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 44(SP), R9 + MOVL R9, 108(R8) + + // ROUND2(28) + // SHUFFLE + MOVL 48(SP), R9 + XORL 36(SP), R9 + XORL 16(SP), R9 + XORL 56(SP), R9 + ROLL $+1, R9 + MOVL R9, 48(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1859775393(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 48(SP), R9 + MOVL R9, 112(R8) + + // ROUND2(29) + // SHUFFLE + MOVL 52(SP), R9 + XORL 40(SP), R9 + XORL 20(SP), R9 + XORL 60(SP), R9 + ROLL $+1, R9 + MOVL R9, 52(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1859775393(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 52(SP), R9 + MOVL R9, 116(R8) + + // ROUND2(30) + // SHUFFLE + MOVL 56(SP), R9 + XORL 44(SP), R9 + XORL 24(SP), R9 + XORL (SP), R9 + ROLL $+1, R9 + MOVL R9, 56(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1859775393(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 56(SP), R9 + MOVL R9, 120(R8) + + // ROUND2(31) + // SHUFFLE + MOVL 60(SP), R9 + XORL 48(SP), R9 + XORL 28(SP), R9 + XORL 4(SP), R9 + ROLL $+1, R9 + MOVL R9, 60(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1859775393(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 60(SP), R9 + MOVL R9, 124(R8) + + // ROUND2(32) + // SHUFFLE + MOVL (SP), R9 + XORL 52(SP), R9 + XORL 32(SP), R9 + XORL 8(SP), R9 + ROLL $+1, R9 + MOVL R9, (SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1859775393(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL (SP), R9 + MOVL R9, 128(R8) + + // ROUND2(33) + // SHUFFLE + MOVL 4(SP), R9 + XORL 56(SP), R9 + XORL 36(SP), R9 + XORL 12(SP), R9 + ROLL $+1, R9 + MOVL R9, 4(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1859775393(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 4(SP), R9 + MOVL R9, 132(R8) + + // ROUND2(34) + // SHUFFLE + MOVL 8(SP), R9 + XORL 60(SP), R9 + XORL 40(SP), R9 + XORL 16(SP), R9 + ROLL $+1, R9 + MOVL R9, 8(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1859775393(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 8(SP), R9 + MOVL R9, 136(R8) + + // ROUND2(35) + // SHUFFLE + MOVL 12(SP), R9 + XORL (SP), R9 + XORL 44(SP), R9 + XORL 20(SP), R9 + ROLL $+1, R9 + MOVL R9, 12(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 1859775393(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 12(SP), R9 + MOVL R9, 140(R8) + + // ROUND2(36) + // SHUFFLE + MOVL 16(SP), R9 + XORL 4(SP), R9 + XORL 48(SP), R9 + XORL 24(SP), R9 + ROLL $+1, R9 + MOVL R9, 16(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 1859775393(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 16(SP), R9 + MOVL R9, 144(R8) + + // ROUND2(37) + // SHUFFLE + MOVL 20(SP), R9 + XORL 8(SP), R9 + XORL 52(SP), R9 + XORL 28(SP), R9 + ROLL $+1, R9 + MOVL R9, 20(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 1859775393(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 20(SP), R9 + MOVL R9, 148(R8) + + // ROUND2(38) + // SHUFFLE + MOVL 24(SP), R9 + XORL 12(SP), R9 + XORL 56(SP), R9 + XORL 32(SP), R9 + ROLL $+1, R9 + MOVL R9, 24(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 1859775393(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 24(SP), R9 + MOVL R9, 152(R8) + + // ROUND2(39) + // SHUFFLE + MOVL 28(SP), R9 + XORL 16(SP), R9 + XORL 60(SP), R9 + XORL 36(SP), R9 + ROLL $+1, R9 + MOVL R9, 28(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 1859775393(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 28(SP), R9 + MOVL R9, 156(R8) + + // ROUND3 (steps 40-59) + // ROUND3(40) + // SHUFFLE + MOVL 32(SP), R9 + XORL 20(SP), R9 + XORL (SP), R9 + XORL 40(SP), R9 + ROLL $+1, R9 + MOVL R9, 32(SP) + + // FUNC3 + MOVL R11, R8 + ORL R12, R8 + ANDL R13, R8 + MOVL R11, R15 + ANDL R12, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 2400959708(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 32(SP), R9 + MOVL R9, 160(R8) + + // ROUND3(41) + // SHUFFLE + MOVL 36(SP), R9 + XORL 24(SP), R9 + XORL 4(SP), R9 + XORL 44(SP), R9 + ROLL $+1, R9 + MOVL R9, 36(SP) + + // FUNC3 + MOVL R10, R8 + ORL R11, R8 + ANDL R12, R8 + MOVL R10, R15 + ANDL R11, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 2400959708(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 36(SP), R9 + MOVL R9, 164(R8) + + // ROUND3(42) + // SHUFFLE + MOVL 40(SP), R9 + XORL 28(SP), R9 + XORL 8(SP), R9 + XORL 48(SP), R9 + ROLL $+1, R9 + MOVL R9, 40(SP) + + // FUNC3 + MOVL R14, R8 + ORL R10, R8 + ANDL R11, R8 + MOVL R14, R15 + ANDL R10, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 2400959708(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 40(SP), R9 + MOVL R9, 168(R8) + + // ROUND3(43) + // SHUFFLE + MOVL 44(SP), R9 + XORL 32(SP), R9 + XORL 12(SP), R9 + XORL 52(SP), R9 + ROLL $+1, R9 + MOVL R9, 44(SP) + + // FUNC3 + MOVL R13, R8 + ORL R14, R8 + ANDL R10, R8 + MOVL R13, R15 + ANDL R14, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 2400959708(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 44(SP), R9 + MOVL R9, 172(R8) + + // ROUND3(44) + // SHUFFLE + MOVL 48(SP), R9 + XORL 36(SP), R9 + XORL 16(SP), R9 + XORL 56(SP), R9 + ROLL $+1, R9 + MOVL R9, 48(SP) + + // FUNC3 + MOVL R12, R8 + ORL R13, R8 + ANDL R14, R8 + MOVL R12, R15 + ANDL R13, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 2400959708(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 48(SP), R9 + MOVL R9, 176(R8) + + // ROUND3(45) + // SHUFFLE + MOVL 52(SP), R9 + XORL 40(SP), R9 + XORL 20(SP), R9 + XORL 60(SP), R9 + ROLL $+1, R9 + MOVL R9, 52(SP) + + // FUNC3 + MOVL R11, R8 + ORL R12, R8 + ANDL R13, R8 + MOVL R11, R15 + ANDL R12, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 2400959708(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 52(SP), R9 + MOVL R9, 180(R8) + + // ROUND3(46) + // SHUFFLE + MOVL 56(SP), R9 + XORL 44(SP), R9 + XORL 24(SP), R9 + XORL (SP), R9 + ROLL $+1, R9 + MOVL R9, 56(SP) + + // FUNC3 + MOVL R10, R8 + ORL R11, R8 + ANDL R12, R8 + MOVL R10, R15 + ANDL R11, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 2400959708(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 56(SP), R9 + MOVL R9, 184(R8) + + // ROUND3(47) + // SHUFFLE + MOVL 60(SP), R9 + XORL 48(SP), R9 + XORL 28(SP), R9 + XORL 4(SP), R9 + ROLL $+1, R9 + MOVL R9, 60(SP) + + // FUNC3 + MOVL R14, R8 + ORL R10, R8 + ANDL R11, R8 + MOVL R14, R15 + ANDL R10, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 2400959708(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 60(SP), R9 + MOVL R9, 188(R8) + + // ROUND3(48) + // SHUFFLE + MOVL (SP), R9 + XORL 52(SP), R9 + XORL 32(SP), R9 + XORL 8(SP), R9 + ROLL $+1, R9 + MOVL R9, (SP) + + // FUNC3 + MOVL R13, R8 + ORL R14, R8 + ANDL R10, R8 + MOVL R13, R15 + ANDL R14, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 2400959708(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL (SP), R9 + MOVL R9, 192(R8) + + // ROUND3(49) + // SHUFFLE + MOVL 4(SP), R9 + XORL 56(SP), R9 + XORL 36(SP), R9 + XORL 12(SP), R9 + ROLL $+1, R9 + MOVL R9, 4(SP) + + // FUNC3 + MOVL R12, R8 + ORL R13, R8 + ANDL R14, R8 + MOVL R12, R15 + ANDL R13, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 2400959708(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 4(SP), R9 + MOVL R9, 196(R8) + + // ROUND3(50) + // SHUFFLE + MOVL 8(SP), R9 + XORL 60(SP), R9 + XORL 40(SP), R9 + XORL 16(SP), R9 + ROLL $+1, R9 + MOVL R9, 8(SP) + + // FUNC3 + MOVL R11, R8 + ORL R12, R8 + ANDL R13, R8 + MOVL R11, R15 + ANDL R12, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 2400959708(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 8(SP), R9 + MOVL R9, 200(R8) + + // ROUND3(51) + // SHUFFLE + MOVL 12(SP), R9 + XORL (SP), R9 + XORL 44(SP), R9 + XORL 20(SP), R9 + ROLL $+1, R9 + MOVL R9, 12(SP) + + // FUNC3 + MOVL R10, R8 + ORL R11, R8 + ANDL R12, R8 + MOVL R10, R15 + ANDL R11, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 2400959708(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 12(SP), R9 + MOVL R9, 204(R8) + + // ROUND3(52) + // SHUFFLE + MOVL 16(SP), R9 + XORL 4(SP), R9 + XORL 48(SP), R9 + XORL 24(SP), R9 + ROLL $+1, R9 + MOVL R9, 16(SP) + + // FUNC3 + MOVL R14, R8 + ORL R10, R8 + ANDL R11, R8 + MOVL R14, R15 + ANDL R10, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 2400959708(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 16(SP), R9 + MOVL R9, 208(R8) + + // ROUND3(53) + // SHUFFLE + MOVL 20(SP), R9 + XORL 8(SP), R9 + XORL 52(SP), R9 + XORL 28(SP), R9 + ROLL $+1, R9 + MOVL R9, 20(SP) + + // FUNC3 + MOVL R13, R8 + ORL R14, R8 + ANDL R10, R8 + MOVL R13, R15 + ANDL R14, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 2400959708(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 20(SP), R9 + MOVL R9, 212(R8) + + // ROUND3(54) + // SHUFFLE + MOVL 24(SP), R9 + XORL 12(SP), R9 + XORL 56(SP), R9 + XORL 32(SP), R9 + ROLL $+1, R9 + MOVL R9, 24(SP) + + // FUNC3 + MOVL R12, R8 + ORL R13, R8 + ANDL R14, R8 + MOVL R12, R15 + ANDL R13, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 2400959708(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 24(SP), R9 + MOVL R9, 216(R8) + + // ROUND3(55) + // SHUFFLE + MOVL 28(SP), R9 + XORL 16(SP), R9 + XORL 60(SP), R9 + XORL 36(SP), R9 + ROLL $+1, R9 + MOVL R9, 28(SP) + + // FUNC3 + MOVL R11, R8 + ORL R12, R8 + ANDL R13, R8 + MOVL R11, R15 + ANDL R12, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 2400959708(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 28(SP), R9 + MOVL R9, 220(R8) + + // ROUND3(56) + // SHUFFLE + MOVL 32(SP), R9 + XORL 20(SP), R9 + XORL (SP), R9 + XORL 40(SP), R9 + ROLL $+1, R9 + MOVL R9, 32(SP) + + // FUNC3 + MOVL R10, R8 + ORL R11, R8 + ANDL R12, R8 + MOVL R10, R15 + ANDL R11, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 2400959708(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 32(SP), R9 + MOVL R9, 224(R8) + + // ROUND3(57) + // SHUFFLE + MOVL 36(SP), R9 + XORL 24(SP), R9 + XORL 4(SP), R9 + XORL 44(SP), R9 + ROLL $+1, R9 + MOVL R9, 36(SP) + + // FUNC3 + MOVL R14, R8 + ORL R10, R8 + ANDL R11, R8 + MOVL R14, R15 + ANDL R10, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 2400959708(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 36(SP), R9 + MOVL R9, 228(R8) + + // Load cs + MOVQ cs_base+56(FP), R8 + MOVL R12, 20(R8) + MOVL R13, 24(R8) + MOVL R14, 28(R8) + MOVL R10, 32(R8) + MOVL R11, 36(R8) + + // ROUND3(58) + // SHUFFLE + MOVL 40(SP), R9 + XORL 28(SP), R9 + XORL 8(SP), R9 + XORL 48(SP), R9 + ROLL $+1, R9 + MOVL R9, 40(SP) + + // FUNC3 + MOVL R13, R8 + ORL R14, R8 + ANDL R10, R8 + MOVL R13, R15 + ANDL R14, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 2400959708(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 40(SP), R9 + MOVL R9, 232(R8) + + // ROUND3(59) + // SHUFFLE + MOVL 44(SP), R9 + XORL 32(SP), R9 + XORL 12(SP), R9 + XORL 52(SP), R9 + ROLL $+1, R9 + MOVL R9, 44(SP) + + // FUNC3 + MOVL R12, R8 + ORL R13, R8 + ANDL R14, R8 + MOVL R12, R15 + ANDL R13, R15 + ORL R8, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 2400959708(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 44(SP), R9 + MOVL R9, 236(R8) + + // ROUND4 (steps 60-79) + // ROUND4(60) + // SHUFFLE + MOVL 48(SP), R9 + XORL 36(SP), R9 + XORL 16(SP), R9 + XORL 56(SP), R9 + ROLL $+1, R9 + MOVL R9, 48(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 3395469782(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 48(SP), R9 + MOVL R9, 240(R8) + + // ROUND4(61) + // SHUFFLE + MOVL 52(SP), R9 + XORL 40(SP), R9 + XORL 20(SP), R9 + XORL 60(SP), R9 + ROLL $+1, R9 + MOVL R9, 52(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 3395469782(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 52(SP), R9 + MOVL R9, 244(R8) + + // ROUND4(62) + // SHUFFLE + MOVL 56(SP), R9 + XORL 44(SP), R9 + XORL 24(SP), R9 + XORL (SP), R9 + ROLL $+1, R9 + MOVL R9, 56(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 3395469782(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 56(SP), R9 + MOVL R9, 248(R8) + + // ROUND4(63) + // SHUFFLE + MOVL 60(SP), R9 + XORL 48(SP), R9 + XORL 28(SP), R9 + XORL 4(SP), R9 + ROLL $+1, R9 + MOVL R9, 60(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 3395469782(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 60(SP), R9 + MOVL R9, 252(R8) + + // ROUND4(64) + // SHUFFLE + MOVL (SP), R9 + XORL 52(SP), R9 + XORL 32(SP), R9 + XORL 8(SP), R9 + ROLL $+1, R9 + MOVL R9, (SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 3395469782(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL (SP), R9 + MOVL R9, 256(R8) + + // Load cs + MOVQ cs_base+56(FP), R8 + MOVL R10, 40(R8) + MOVL R11, 44(R8) + MOVL R12, 48(R8) + MOVL R13, 52(R8) + MOVL R14, 56(R8) + + // ROUND4(65) + // SHUFFLE + MOVL 4(SP), R9 + XORL 56(SP), R9 + XORL 36(SP), R9 + XORL 12(SP), R9 + ROLL $+1, R9 + MOVL R9, 4(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 3395469782(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 4(SP), R9 + MOVL R9, 260(R8) + + // ROUND4(66) + // SHUFFLE + MOVL 8(SP), R9 + XORL 60(SP), R9 + XORL 40(SP), R9 + XORL 16(SP), R9 + ROLL $+1, R9 + MOVL R9, 8(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 3395469782(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 8(SP), R9 + MOVL R9, 264(R8) + + // ROUND4(67) + // SHUFFLE + MOVL 12(SP), R9 + XORL (SP), R9 + XORL 44(SP), R9 + XORL 20(SP), R9 + ROLL $+1, R9 + MOVL R9, 12(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 3395469782(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 12(SP), R9 + MOVL R9, 268(R8) + + // ROUND4(68) + // SHUFFLE + MOVL 16(SP), R9 + XORL 4(SP), R9 + XORL 48(SP), R9 + XORL 24(SP), R9 + ROLL $+1, R9 + MOVL R9, 16(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 3395469782(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 16(SP), R9 + MOVL R9, 272(R8) + + // ROUND4(69) + // SHUFFLE + MOVL 20(SP), R9 + XORL 8(SP), R9 + XORL 52(SP), R9 + XORL 28(SP), R9 + ROLL $+1, R9 + MOVL R9, 20(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 3395469782(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 20(SP), R9 + MOVL R9, 276(R8) + + // ROUND4(70) + // SHUFFLE + MOVL 24(SP), R9 + XORL 12(SP), R9 + XORL 56(SP), R9 + XORL 32(SP), R9 + ROLL $+1, R9 + MOVL R9, 24(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 3395469782(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 24(SP), R9 + MOVL R9, 280(R8) + + // ROUND4(71) + // SHUFFLE + MOVL 28(SP), R9 + XORL 16(SP), R9 + XORL 60(SP), R9 + XORL 36(SP), R9 + ROLL $+1, R9 + MOVL R9, 28(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 3395469782(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 28(SP), R9 + MOVL R9, 284(R8) + + // ROUND4(72) + // SHUFFLE + MOVL 32(SP), R9 + XORL 20(SP), R9 + XORL (SP), R9 + XORL 40(SP), R9 + ROLL $+1, R9 + MOVL R9, 32(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 3395469782(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 32(SP), R9 + MOVL R9, 288(R8) + + // ROUND4(73) + // SHUFFLE + MOVL 36(SP), R9 + XORL 24(SP), R9 + XORL 4(SP), R9 + XORL 44(SP), R9 + ROLL $+1, R9 + MOVL R9, 36(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 3395469782(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 36(SP), R9 + MOVL R9, 292(R8) + + // ROUND4(74) + // SHUFFLE + MOVL 40(SP), R9 + XORL 28(SP), R9 + XORL 8(SP), R9 + XORL 48(SP), R9 + ROLL $+1, R9 + MOVL R9, 40(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 3395469782(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 40(SP), R9 + MOVL R9, 296(R8) + + // ROUND4(75) + // SHUFFLE + MOVL 44(SP), R9 + XORL 32(SP), R9 + XORL 12(SP), R9 + XORL 52(SP), R9 + ROLL $+1, R9 + MOVL R9, 44(SP) + + // FUNC2 + MOVL R11, R15 + XORL R12, R15 + XORL R13, R15 + + // MIX + ROLL $+30, R11 + ADDL R15, R14 + MOVL R10, R8 + ROLL $+5, R8 + LEAL 3395469782(R14)(R9*1), R14 + ADDL R8, R14 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 44(SP), R9 + MOVL R9, 300(R8) + + // ROUND4(76) + // SHUFFLE + MOVL 48(SP), R9 + XORL 36(SP), R9 + XORL 16(SP), R9 + XORL 56(SP), R9 + ROLL $+1, R9 + MOVL R9, 48(SP) + + // FUNC2 + MOVL R10, R15 + XORL R11, R15 + XORL R12, R15 + + // MIX + ROLL $+30, R10 + ADDL R15, R13 + MOVL R14, R8 + ROLL $+5, R8 + LEAL 3395469782(R13)(R9*1), R13 + ADDL R8, R13 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 48(SP), R9 + MOVL R9, 304(R8) + + // ROUND4(77) + // SHUFFLE + MOVL 52(SP), R9 + XORL 40(SP), R9 + XORL 20(SP), R9 + XORL 60(SP), R9 + ROLL $+1, R9 + MOVL R9, 52(SP) + + // FUNC2 + MOVL R14, R15 + XORL R10, R15 + XORL R11, R15 + + // MIX + ROLL $+30, R14 + ADDL R15, R12 + MOVL R13, R8 + ROLL $+5, R8 + LEAL 3395469782(R12)(R9*1), R12 + ADDL R8, R12 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 52(SP), R9 + MOVL R9, 308(R8) + + // ROUND4(78) + // SHUFFLE + MOVL 56(SP), R9 + XORL 44(SP), R9 + XORL 24(SP), R9 + XORL (SP), R9 + ROLL $+1, R9 + MOVL R9, 56(SP) + + // FUNC2 + MOVL R13, R15 + XORL R14, R15 + XORL R10, R15 + + // MIX + ROLL $+30, R13 + ADDL R15, R11 + MOVL R12, R8 + ROLL $+5, R8 + LEAL 3395469782(R11)(R9*1), R11 + ADDL R8, R11 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 56(SP), R9 + MOVL R9, 312(R8) + + // ROUND4(79) + // SHUFFLE + MOVL 60(SP), R9 + XORL 48(SP), R9 + XORL 28(SP), R9 + XORL 4(SP), R9 + ROLL $+1, R9 + MOVL R9, 60(SP) + + // FUNC2 + MOVL R12, R15 + XORL R13, R15 + XORL R14, R15 + + // MIX + ROLL $+30, R12 + ADDL R15, R10 + MOVL R11, R8 + ROLL $+5, R8 + LEAL 3395469782(R10)(R9*1), R10 + ADDL R8, R10 + + // Load m1 + MOVQ m1_base+32(FP), R8 + MOVL 60(SP), R9 + MOVL R9, 316(R8) + + // Add registers to temp hash. + ADDL R10, AX + ADDL R11, BX + ADDL R12, CX + ADDL R13, DX + ADDL R14, BP + ADDQ $+64, DI + CMPQ DI, SI + JB loop + +end: + MOVQ dig+0(FP), SI + MOVL AX, (SI) + MOVL BX, 4(SI) + MOVL CX, 8(SI) + MOVL DX, 12(SI) + MOVL BP, 16(SI) + RET diff --git a/vendor/github.com/pjbgf/sha1cd/sha1cdblock_generic.go b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_generic.go new file mode 100644 index 00000000..ba8b96e8 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_generic.go @@ -0,0 +1,268 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Originally from: https://github.com/go/blob/master/src/crypto/sha1/sha1block.go +// It has been modified to support collision detection. + +package sha1cd + +import ( + "fmt" + "math/bits" + + shared "github.com/pjbgf/sha1cd/internal" + "github.com/pjbgf/sha1cd/ubc" +) + +// blockGeneric is a portable, pure Go version of the SHA-1 block step. +// It's used by sha1block_generic.go and tests. +func blockGeneric(dig *digest, p []byte) { + var w [16]uint32 + + // cs stores the pre-step compression state for only the steps required for the + // collision detection, which are 0, 58 and 65. + // Refer to ubc/const.go for more details. + cs := [shared.PreStepState][shared.WordBuffers]uint32{} + + h0, h1, h2, h3, h4 := dig.h[0], dig.h[1], dig.h[2], dig.h[3], dig.h[4] + for len(p) >= shared.Chunk { + m1 := [shared.Rounds]uint32{} + hi := 1 + + // Collision attacks are thwarted by hashing a detected near-collision block 3 times. + // Think of it as extending SHA-1 from 80-steps to 240-steps for such blocks: + // The best collision attacks against SHA-1 have complexity about 2^60, + // thus for 240-steps an immediate lower-bound for the best cryptanalytic attacks would be 2^180. + // An attacker would be better off using a generic birthday search of complexity 2^80. + rehash: + a, b, c, d, e := h0, h1, h2, h3, h4 + + // Each of the four 20-iteration rounds + // differs only in the computation of f and + // the choice of K (K0, K1, etc). + i := 0 + + // Store pre-step compression state for the collision detection. + cs[0] = [shared.WordBuffers]uint32{a, b, c, d, e} + + for ; i < 16; i++ { + // load step + j := i * 4 + w[i] = uint32(p[j])<<24 | uint32(p[j+1])<<16 | uint32(p[j+2])<<8 | uint32(p[j+3]) + + f := b&c | (^b)&d + t := bits.RotateLeft32(a, 5) + f + e + w[i&0xf] + shared.K0 + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + + // Store compression state for the collision detection. + m1[i] = w[i&0xf] + } + for ; i < 20; i++ { + tmp := w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf] + w[i&0xf] = tmp<<1 | tmp>>(32-1) + + f := b&c | (^b)&d + t := bits.RotateLeft32(a, 5) + f + e + w[i&0xf] + shared.K0 + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + + // Store compression state for the collision detection. + m1[i] = w[i&0xf] + } + for ; i < 40; i++ { + tmp := w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf] + w[i&0xf] = tmp<<1 | tmp>>(32-1) + + f := b ^ c ^ d + t := bits.RotateLeft32(a, 5) + f + e + w[i&0xf] + shared.K1 + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + + // Store compression state for the collision detection. + m1[i] = w[i&0xf] + } + for ; i < 60; i++ { + if i == 58 { + // Store pre-step compression state for the collision detection. + cs[1] = [shared.WordBuffers]uint32{a, b, c, d, e} + } + + tmp := w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf] + w[i&0xf] = tmp<<1 | tmp>>(32-1) + + f := ((b | c) & d) | (b & c) + t := bits.RotateLeft32(a, 5) + f + e + w[i&0xf] + shared.K2 + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + + // Store compression state for the collision detection. + m1[i] = w[i&0xf] + } + for ; i < 80; i++ { + if i == 65 { + // Store pre-step compression state for the collision detection. + cs[2] = [shared.WordBuffers]uint32{a, b, c, d, e} + } + + tmp := w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf] + w[i&0xf] = tmp<<1 | tmp>>(32-1) + + f := b ^ c ^ d + t := bits.RotateLeft32(a, 5) + f + e + w[i&0xf] + shared.K3 + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + + // Store compression state for the collision detection. + m1[i] = w[i&0xf] + } + + h0 += a + h1 += b + h2 += c + h3 += d + h4 += e + + if hi == 2 { + hi++ + goto rehash + } + + if hi == 1 { + col := checkCollision(m1, cs, [shared.WordBuffers]uint32{h0, h1, h2, h3, h4}) + if col { + dig.col = true + hi++ + goto rehash + } + } + + p = p[shared.Chunk:] + } + + dig.h[0], dig.h[1], dig.h[2], dig.h[3], dig.h[4] = h0, h1, h2, h3, h4 +} + +func checkCollision( + m1 [shared.Rounds]uint32, + cs [shared.PreStepState][shared.WordBuffers]uint32, + state [shared.WordBuffers]uint32) bool { + + if mask := ubc.CalculateDvMask(m1); mask != 0 { + dvs := ubc.SHA1_dvs() + + for i := 0; dvs[i].DvType != 0; i++ { + if (mask & ((uint32)(1) << uint32(dvs[i].MaskB))) != 0 { + var csState [shared.WordBuffers]uint32 + switch dvs[i].TestT { + case 58: + csState = cs[1] + case 65: + csState = cs[2] + case 0: + csState = cs[0] + default: + panic(fmt.Sprintf("dvs data is trying to use a testT that isn't available: %d", dvs[i].TestT)) + } + + col := hasCollided( + dvs[i].TestT, // testT is the step number + // m2 is a secondary message created XORing with + // ubc's DM prior to the SHA recompression step. + m1, dvs[i].Dm, + csState, + state) + + if col { + return true + } + } + } + } + return false +} + +func hasCollided(step uint32, m1, dm [shared.Rounds]uint32, + state [shared.WordBuffers]uint32, h [shared.WordBuffers]uint32) bool { + // Intermediary Hash Value. + ihv := [shared.WordBuffers]uint32{} + + a, b, c, d, e := state[0], state[1], state[2], state[3], state[4] + + // Walk backwards from current step to undo previous compression. + // The existing collision detection does not have dvs higher than 65, + // start value of i accordingly. + for i := uint32(64); i >= 60; i-- { + a, b, c, d, e = b, c, d, e, a + if step > i { + b = bits.RotateLeft32(b, -30) + f := b ^ c ^ d + e -= bits.RotateLeft32(a, 5) + f + shared.K3 + (m1[i] ^ dm[i]) // m2 = m1 ^ dm. + } + } + for i := uint32(59); i >= 40; i-- { + a, b, c, d, e = b, c, d, e, a + if step > i { + b = bits.RotateLeft32(b, -30) + f := ((b | c) & d) | (b & c) + e -= bits.RotateLeft32(a, 5) + f + shared.K2 + (m1[i] ^ dm[i]) + } + } + for i := uint32(39); i >= 20; i-- { + a, b, c, d, e = b, c, d, e, a + if step > i { + b = bits.RotateLeft32(b, -30) + f := b ^ c ^ d + e -= bits.RotateLeft32(a, 5) + f + shared.K1 + (m1[i] ^ dm[i]) + } + } + for i := uint32(20); i > 0; i-- { + j := i - 1 + a, b, c, d, e = b, c, d, e, a + if step > j { + b = bits.RotateLeft32(b, -30) // undo the rotate left + f := b&c | (^b)&d + // subtract from e + e -= bits.RotateLeft32(a, 5) + f + shared.K0 + (m1[j] ^ dm[j]) + } + } + + ihv[0] = a + ihv[1] = b + ihv[2] = c + ihv[3] = d + ihv[4] = e + a = state[0] + b = state[1] + c = state[2] + d = state[3] + e = state[4] + + // Recompress blocks based on the current step. + // The existing collision detection does not have dvs below 58, so they have been removed + // from the source code. If new dvs are added which target rounds below 40, that logic + // will need to be readded here. + for i := uint32(40); i < 60; i++ { + if step <= i { + f := ((b | c) & d) | (b & c) + t := bits.RotateLeft32(a, 5) + f + e + shared.K2 + (m1[i] ^ dm[i]) + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + } + } + for i := uint32(60); i < 80; i++ { + if step <= i { + f := b ^ c ^ d + t := bits.RotateLeft32(a, 5) + f + e + shared.K3 + (m1[i] ^ dm[i]) + a, b, c, d, e = t, a, bits.RotateLeft32(b, 30), c, d + } + } + + ihv[0] += a + ihv[1] += b + ihv[2] += c + ihv[3] += d + ihv[4] += e + + if ((ihv[0] ^ h[0]) | (ihv[1] ^ h[1]) | + (ihv[2] ^ h[2]) | (ihv[3] ^ h[3]) | (ihv[4] ^ h[4])) == 0 { + return true + } + + return false +} diff --git a/vendor/github.com/pjbgf/sha1cd/sha1cdblock_noasm.go b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_noasm.go new file mode 100644 index 00000000..15bae5a7 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/sha1cdblock_noasm.go @@ -0,0 +1,8 @@ +//go:build !amd64 || noasm || !gc +// +build !amd64 noasm !gc + +package sha1cd + +func block(dig *digest, p []byte) { + blockGeneric(dig, p) +} diff --git a/vendor/github.com/pjbgf/sha1cd/ubc/check.go b/vendor/github.com/pjbgf/sha1cd/ubc/check.go new file mode 100644 index 00000000..167a5558 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/ubc/check.go @@ -0,0 +1,368 @@ +// Based on the C implementation from Marc Stevens and Dan Shumow. +// https://github.com/cr-marcstevens/sha1collisiondetection + +package ubc + +type DvInfo struct { + // DvType, DvK and DvB define the DV: I(K,B) or II(K,B) (see the paper). + // https://marc-stevens.nl/research/papers/C13-S.pdf + DvType uint32 + DvK uint32 + DvB uint32 + + // TestT is the step to do the recompression from for collision detection. + TestT uint32 + + // MaskI and MaskB define the bit to check for each DV in the dvmask returned by ubc_check. + MaskI uint32 + MaskB uint32 + + // Dm is the expanded message block XOR-difference defined by the DV. + Dm [80]uint32 +} + +// CalculateDvMask takes as input an expanded message block and verifies the unavoidable bitconditions +// for all listed DVs. It returns a dvmask where each bit belonging to a DV is set if all +// unavoidable bitconditions for that DV have been met. +// Thus, one needs to do the recompression check for each DV that has its bit set. +func CalculateDvMask(W [80]uint32) uint32 { + mask := uint32(0xFFFFFFFF) + mask &= (((((W[44] ^ W[45]) >> 29) & 1) - 1) | ^(DV_I_48_0_bit | DV_I_51_0_bit | DV_I_52_0_bit | DV_II_45_0_bit | DV_II_46_0_bit | DV_II_50_0_bit | DV_II_51_0_bit)) + mask &= (((((W[49] ^ W[50]) >> 29) & 1) - 1) | ^(DV_I_46_0_bit | DV_II_45_0_bit | DV_II_50_0_bit | DV_II_51_0_bit | DV_II_55_0_bit | DV_II_56_0_bit)) + mask &= (((((W[48] ^ W[49]) >> 29) & 1) - 1) | ^(DV_I_45_0_bit | DV_I_52_0_bit | DV_II_49_0_bit | DV_II_50_0_bit | DV_II_54_0_bit | DV_II_55_0_bit)) + mask &= ((((W[47] ^ (W[50] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_47_0_bit | DV_I_49_0_bit | DV_I_51_0_bit | DV_II_45_0_bit | DV_II_51_0_bit | DV_II_56_0_bit)) + mask &= (((((W[47] ^ W[48]) >> 29) & 1) - 1) | ^(DV_I_44_0_bit | DV_I_51_0_bit | DV_II_48_0_bit | DV_II_49_0_bit | DV_II_53_0_bit | DV_II_54_0_bit)) + mask &= (((((W[46] >> 4) ^ (W[49] >> 29)) & 1) - 1) | ^(DV_I_46_0_bit | DV_I_48_0_bit | DV_I_50_0_bit | DV_I_52_0_bit | DV_II_50_0_bit | DV_II_55_0_bit)) + mask &= (((((W[46] ^ W[47]) >> 29) & 1) - 1) | ^(DV_I_43_0_bit | DV_I_50_0_bit | DV_II_47_0_bit | DV_II_48_0_bit | DV_II_52_0_bit | DV_II_53_0_bit)) + mask &= (((((W[45] >> 4) ^ (W[48] >> 29)) & 1) - 1) | ^(DV_I_45_0_bit | DV_I_47_0_bit | DV_I_49_0_bit | DV_I_51_0_bit | DV_II_49_0_bit | DV_II_54_0_bit)) + mask &= (((((W[45] ^ W[46]) >> 29) & 1) - 1) | ^(DV_I_49_0_bit | DV_I_52_0_bit | DV_II_46_0_bit | DV_II_47_0_bit | DV_II_51_0_bit | DV_II_52_0_bit)) + mask &= (((((W[44] >> 4) ^ (W[47] >> 29)) & 1) - 1) | ^(DV_I_44_0_bit | DV_I_46_0_bit | DV_I_48_0_bit | DV_I_50_0_bit | DV_II_48_0_bit | DV_II_53_0_bit)) + mask &= (((((W[43] >> 4) ^ (W[46] >> 29)) & 1) - 1) | ^(DV_I_43_0_bit | DV_I_45_0_bit | DV_I_47_0_bit | DV_I_49_0_bit | DV_II_47_0_bit | DV_II_52_0_bit)) + mask &= (((((W[43] ^ W[44]) >> 29) & 1) - 1) | ^(DV_I_47_0_bit | DV_I_50_0_bit | DV_I_51_0_bit | DV_II_45_0_bit | DV_II_49_0_bit | DV_II_50_0_bit)) + mask &= (((((W[42] >> 4) ^ (W[45] >> 29)) & 1) - 1) | ^(DV_I_44_0_bit | DV_I_46_0_bit | DV_I_48_0_bit | DV_I_52_0_bit | DV_II_46_0_bit | DV_II_51_0_bit)) + mask &= (((((W[41] >> 4) ^ (W[44] >> 29)) & 1) - 1) | ^(DV_I_43_0_bit | DV_I_45_0_bit | DV_I_47_0_bit | DV_I_51_0_bit | DV_II_45_0_bit | DV_II_50_0_bit)) + mask &= (((((W[40] ^ W[41]) >> 29) & 1) - 1) | ^(DV_I_44_0_bit | DV_I_47_0_bit | DV_I_48_0_bit | DV_II_46_0_bit | DV_II_47_0_bit | DV_II_56_0_bit)) + mask &= (((((W[54] ^ W[55]) >> 29) & 1) - 1) | ^(DV_I_51_0_bit | DV_II_47_0_bit | DV_II_50_0_bit | DV_II_55_0_bit | DV_II_56_0_bit)) + mask &= (((((W[53] ^ W[54]) >> 29) & 1) - 1) | ^(DV_I_50_0_bit | DV_II_46_0_bit | DV_II_49_0_bit | DV_II_54_0_bit | DV_II_55_0_bit)) + mask &= (((((W[52] ^ W[53]) >> 29) & 1) - 1) | ^(DV_I_49_0_bit | DV_II_45_0_bit | DV_II_48_0_bit | DV_II_53_0_bit | DV_II_54_0_bit)) + mask &= ((((W[50] ^ (W[53] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_50_0_bit | DV_I_52_0_bit | DV_II_46_0_bit | DV_II_48_0_bit | DV_II_54_0_bit)) + mask &= (((((W[50] ^ W[51]) >> 29) & 1) - 1) | ^(DV_I_47_0_bit | DV_II_46_0_bit | DV_II_51_0_bit | DV_II_52_0_bit | DV_II_56_0_bit)) + mask &= ((((W[49] ^ (W[52] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_49_0_bit | DV_I_51_0_bit | DV_II_45_0_bit | DV_II_47_0_bit | DV_II_53_0_bit)) + mask &= ((((W[48] ^ (W[51] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_48_0_bit | DV_I_50_0_bit | DV_I_52_0_bit | DV_II_46_0_bit | DV_II_52_0_bit)) + mask &= (((((W[42] ^ W[43]) >> 29) & 1) - 1) | ^(DV_I_46_0_bit | DV_I_49_0_bit | DV_I_50_0_bit | DV_II_48_0_bit | DV_II_49_0_bit)) + mask &= (((((W[41] ^ W[42]) >> 29) & 1) - 1) | ^(DV_I_45_0_bit | DV_I_48_0_bit | DV_I_49_0_bit | DV_II_47_0_bit | DV_II_48_0_bit)) + mask &= (((((W[40] >> 4) ^ (W[43] >> 29)) & 1) - 1) | ^(DV_I_44_0_bit | DV_I_46_0_bit | DV_I_50_0_bit | DV_II_49_0_bit | DV_II_56_0_bit)) + mask &= (((((W[39] >> 4) ^ (W[42] >> 29)) & 1) - 1) | ^(DV_I_43_0_bit | DV_I_45_0_bit | DV_I_49_0_bit | DV_II_48_0_bit | DV_II_55_0_bit)) + + if (mask & (DV_I_44_0_bit | DV_I_48_0_bit | DV_II_47_0_bit | DV_II_54_0_bit | DV_II_56_0_bit)) != 0 { + mask &= (((((W[38] >> 4) ^ (W[41] >> 29)) & 1) - 1) | ^(DV_I_44_0_bit | DV_I_48_0_bit | DV_II_47_0_bit | DV_II_54_0_bit | DV_II_56_0_bit)) + } + mask &= (((((W[37] >> 4) ^ (W[40] >> 29)) & 1) - 1) | ^(DV_I_43_0_bit | DV_I_47_0_bit | DV_II_46_0_bit | DV_II_53_0_bit | DV_II_55_0_bit)) + if (mask & (DV_I_52_0_bit | DV_II_48_0_bit | DV_II_51_0_bit | DV_II_56_0_bit)) != 0 { + mask &= (((((W[55] ^ W[56]) >> 29) & 1) - 1) | ^(DV_I_52_0_bit | DV_II_48_0_bit | DV_II_51_0_bit | DV_II_56_0_bit)) + } + if (mask & (DV_I_52_0_bit | DV_II_48_0_bit | DV_II_50_0_bit | DV_II_56_0_bit)) != 0 { + mask &= ((((W[52] ^ (W[55] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_52_0_bit | DV_II_48_0_bit | DV_II_50_0_bit | DV_II_56_0_bit)) + } + if (mask & (DV_I_51_0_bit | DV_II_47_0_bit | DV_II_49_0_bit | DV_II_55_0_bit)) != 0 { + mask &= ((((W[51] ^ (W[54] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_51_0_bit | DV_II_47_0_bit | DV_II_49_0_bit | DV_II_55_0_bit)) + } + if (mask & (DV_I_48_0_bit | DV_II_47_0_bit | DV_II_52_0_bit | DV_II_53_0_bit)) != 0 { + mask &= (((((W[51] ^ W[52]) >> 29) & 1) - 1) | ^(DV_I_48_0_bit | DV_II_47_0_bit | DV_II_52_0_bit | DV_II_53_0_bit)) + } + if (mask & (DV_I_46_0_bit | DV_I_49_0_bit | DV_II_45_0_bit | DV_II_48_0_bit)) != 0 { + mask &= (((((W[36] >> 4) ^ (W[40] >> 29)) & 1) - 1) | ^(DV_I_46_0_bit | DV_I_49_0_bit | DV_II_45_0_bit | DV_II_48_0_bit)) + } + if (mask & (DV_I_52_0_bit | DV_II_48_0_bit | DV_II_49_0_bit)) != 0 { + mask &= ((0 - (((W[53] ^ W[56]) >> 29) & 1)) | ^(DV_I_52_0_bit | DV_II_48_0_bit | DV_II_49_0_bit)) + } + if (mask & (DV_I_50_0_bit | DV_II_46_0_bit | DV_II_47_0_bit)) != 0 { + mask &= ((0 - (((W[51] ^ W[54]) >> 29) & 1)) | ^(DV_I_50_0_bit | DV_II_46_0_bit | DV_II_47_0_bit)) + } + if (mask & (DV_I_49_0_bit | DV_I_51_0_bit | DV_II_45_0_bit)) != 0 { + mask &= ((0 - (((W[50] ^ W[52]) >> 29) & 1)) | ^(DV_I_49_0_bit | DV_I_51_0_bit | DV_II_45_0_bit)) + } + if (mask & (DV_I_48_0_bit | DV_I_50_0_bit | DV_I_52_0_bit)) != 0 { + mask &= ((0 - (((W[49] ^ W[51]) >> 29) & 1)) | ^(DV_I_48_0_bit | DV_I_50_0_bit | DV_I_52_0_bit)) + } + if (mask & (DV_I_47_0_bit | DV_I_49_0_bit | DV_I_51_0_bit)) != 0 { + mask &= ((0 - (((W[48] ^ W[50]) >> 29) & 1)) | ^(DV_I_47_0_bit | DV_I_49_0_bit | DV_I_51_0_bit)) + } + if (mask & (DV_I_46_0_bit | DV_I_48_0_bit | DV_I_50_0_bit)) != 0 { + mask &= ((0 - (((W[47] ^ W[49]) >> 29) & 1)) | ^(DV_I_46_0_bit | DV_I_48_0_bit | DV_I_50_0_bit)) + } + if (mask & (DV_I_45_0_bit | DV_I_47_0_bit | DV_I_49_0_bit)) != 0 { + mask &= ((0 - (((W[46] ^ W[48]) >> 29) & 1)) | ^(DV_I_45_0_bit | DV_I_47_0_bit | DV_I_49_0_bit)) + } + mask &= ((((W[45] ^ W[47]) & (1 << 6)) - (1 << 6)) | ^(DV_I_47_2_bit | DV_I_49_2_bit | DV_I_51_2_bit)) + if (mask & (DV_I_44_0_bit | DV_I_46_0_bit | DV_I_48_0_bit)) != 0 { + mask &= ((0 - (((W[45] ^ W[47]) >> 29) & 1)) | ^(DV_I_44_0_bit | DV_I_46_0_bit | DV_I_48_0_bit)) + } + mask &= (((((W[44] ^ W[46]) >> 6) & 1) - 1) | ^(DV_I_46_2_bit | DV_I_48_2_bit | DV_I_50_2_bit)) + if (mask & (DV_I_43_0_bit | DV_I_45_0_bit | DV_I_47_0_bit)) != 0 { + mask &= ((0 - (((W[44] ^ W[46]) >> 29) & 1)) | ^(DV_I_43_0_bit | DV_I_45_0_bit | DV_I_47_0_bit)) + } + mask &= ((0 - ((W[41] ^ (W[42] >> 5)) & (1 << 1))) | ^(DV_I_48_2_bit | DV_II_46_2_bit | DV_II_51_2_bit)) + mask &= ((0 - ((W[40] ^ (W[41] >> 5)) & (1 << 1))) | ^(DV_I_47_2_bit | DV_I_51_2_bit | DV_II_50_2_bit)) + if (mask & (DV_I_44_0_bit | DV_I_46_0_bit | DV_II_56_0_bit)) != 0 { + mask &= ((0 - (((W[40] ^ W[42]) >> 4) & 1)) | ^(DV_I_44_0_bit | DV_I_46_0_bit | DV_II_56_0_bit)) + } + mask &= ((0 - ((W[39] ^ (W[40] >> 5)) & (1 << 1))) | ^(DV_I_46_2_bit | DV_I_50_2_bit | DV_II_49_2_bit)) + if (mask & (DV_I_43_0_bit | DV_I_45_0_bit | DV_II_55_0_bit)) != 0 { + mask &= ((0 - (((W[39] ^ W[41]) >> 4) & 1)) | ^(DV_I_43_0_bit | DV_I_45_0_bit | DV_II_55_0_bit)) + } + if (mask & (DV_I_44_0_bit | DV_II_54_0_bit | DV_II_56_0_bit)) != 0 { + mask &= ((0 - (((W[38] ^ W[40]) >> 4) & 1)) | ^(DV_I_44_0_bit | DV_II_54_0_bit | DV_II_56_0_bit)) + } + if (mask & (DV_I_43_0_bit | DV_II_53_0_bit | DV_II_55_0_bit)) != 0 { + mask &= ((0 - (((W[37] ^ W[39]) >> 4) & 1)) | ^(DV_I_43_0_bit | DV_II_53_0_bit | DV_II_55_0_bit)) + } + mask &= ((0 - ((W[36] ^ (W[37] >> 5)) & (1 << 1))) | ^(DV_I_47_2_bit | DV_I_50_2_bit | DV_II_46_2_bit)) + if (mask & (DV_I_45_0_bit | DV_I_48_0_bit | DV_II_47_0_bit)) != 0 { + mask &= (((((W[35] >> 4) ^ (W[39] >> 29)) & 1) - 1) | ^(DV_I_45_0_bit | DV_I_48_0_bit | DV_II_47_0_bit)) + } + if (mask & (DV_I_48_0_bit | DV_II_48_0_bit)) != 0 { + mask &= ((0 - ((W[63] ^ (W[64] >> 5)) & (1 << 0))) | ^(DV_I_48_0_bit | DV_II_48_0_bit)) + } + if (mask & (DV_I_45_0_bit | DV_II_45_0_bit)) != 0 { + mask &= ((0 - ((W[63] ^ (W[64] >> 5)) & (1 << 1))) | ^(DV_I_45_0_bit | DV_II_45_0_bit)) + } + if (mask & (DV_I_47_0_bit | DV_II_47_0_bit)) != 0 { + mask &= ((0 - ((W[62] ^ (W[63] >> 5)) & (1 << 0))) | ^(DV_I_47_0_bit | DV_II_47_0_bit)) + } + if (mask & (DV_I_46_0_bit | DV_II_46_0_bit)) != 0 { + mask &= ((0 - ((W[61] ^ (W[62] >> 5)) & (1 << 0))) | ^(DV_I_46_0_bit | DV_II_46_0_bit)) + } + mask &= ((0 - ((W[61] ^ (W[62] >> 5)) & (1 << 2))) | ^(DV_I_46_2_bit | DV_II_46_2_bit)) + if (mask & (DV_I_45_0_bit | DV_II_45_0_bit)) != 0 { + mask &= ((0 - ((W[60] ^ (W[61] >> 5)) & (1 << 0))) | ^(DV_I_45_0_bit | DV_II_45_0_bit)) + } + if (mask & (DV_II_51_0_bit | DV_II_54_0_bit)) != 0 { + mask &= (((((W[58] ^ W[59]) >> 29) & 1) - 1) | ^(DV_II_51_0_bit | DV_II_54_0_bit)) + } + if (mask & (DV_II_50_0_bit | DV_II_53_0_bit)) != 0 { + mask &= (((((W[57] ^ W[58]) >> 29) & 1) - 1) | ^(DV_II_50_0_bit | DV_II_53_0_bit)) + } + if (mask & (DV_II_52_0_bit | DV_II_54_0_bit)) != 0 { + mask &= ((((W[56] ^ (W[59] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_II_52_0_bit | DV_II_54_0_bit)) + } + if (mask & (DV_II_51_0_bit | DV_II_52_0_bit)) != 0 { + mask &= ((0 - (((W[56] ^ W[59]) >> 29) & 1)) | ^(DV_II_51_0_bit | DV_II_52_0_bit)) + } + if (mask & (DV_II_49_0_bit | DV_II_52_0_bit)) != 0 { + mask &= (((((W[56] ^ W[57]) >> 29) & 1) - 1) | ^(DV_II_49_0_bit | DV_II_52_0_bit)) + } + if (mask & (DV_II_51_0_bit | DV_II_53_0_bit)) != 0 { + mask &= ((((W[55] ^ (W[58] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_II_51_0_bit | DV_II_53_0_bit)) + } + if (mask & (DV_II_50_0_bit | DV_II_52_0_bit)) != 0 { + mask &= ((((W[54] ^ (W[57] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_II_50_0_bit | DV_II_52_0_bit)) + } + if (mask & (DV_II_49_0_bit | DV_II_51_0_bit)) != 0 { + mask &= ((((W[53] ^ (W[56] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_II_49_0_bit | DV_II_51_0_bit)) + } + mask &= ((((W[51] ^ (W[50] >> 5)) & (1 << 1)) - (1 << 1)) | ^(DV_I_50_2_bit | DV_II_46_2_bit)) + mask &= ((((W[48] ^ W[50]) & (1 << 6)) - (1 << 6)) | ^(DV_I_50_2_bit | DV_II_46_2_bit)) + if (mask & (DV_I_51_0_bit | DV_I_52_0_bit)) != 0 { + mask &= ((0 - (((W[48] ^ W[55]) >> 29) & 1)) | ^(DV_I_51_0_bit | DV_I_52_0_bit)) + } + mask &= ((((W[47] ^ W[49]) & (1 << 6)) - (1 << 6)) | ^(DV_I_49_2_bit | DV_I_51_2_bit)) + mask &= ((((W[48] ^ (W[47] >> 5)) & (1 << 1)) - (1 << 1)) | ^(DV_I_47_2_bit | DV_II_51_2_bit)) + mask &= ((((W[46] ^ W[48]) & (1 << 6)) - (1 << 6)) | ^(DV_I_48_2_bit | DV_I_50_2_bit)) + mask &= ((((W[47] ^ (W[46] >> 5)) & (1 << 1)) - (1 << 1)) | ^(DV_I_46_2_bit | DV_II_50_2_bit)) + mask &= ((0 - ((W[44] ^ (W[45] >> 5)) & (1 << 1))) | ^(DV_I_51_2_bit | DV_II_49_2_bit)) + mask &= ((((W[43] ^ W[45]) & (1 << 6)) - (1 << 6)) | ^(DV_I_47_2_bit | DV_I_49_2_bit)) + mask &= (((((W[42] ^ W[44]) >> 6) & 1) - 1) | ^(DV_I_46_2_bit | DV_I_48_2_bit)) + mask &= ((((W[43] ^ (W[42] >> 5)) & (1 << 1)) - (1 << 1)) | ^(DV_II_46_2_bit | DV_II_51_2_bit)) + mask &= ((((W[42] ^ (W[41] >> 5)) & (1 << 1)) - (1 << 1)) | ^(DV_I_51_2_bit | DV_II_50_2_bit)) + mask &= ((((W[41] ^ (W[40] >> 5)) & (1 << 1)) - (1 << 1)) | ^(DV_I_50_2_bit | DV_II_49_2_bit)) + if (mask & (DV_I_52_0_bit | DV_II_51_0_bit)) != 0 { + mask &= ((((W[39] ^ (W[43] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_52_0_bit | DV_II_51_0_bit)) + } + if (mask & (DV_I_51_0_bit | DV_II_50_0_bit)) != 0 { + mask &= ((((W[38] ^ (W[42] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_51_0_bit | DV_II_50_0_bit)) + } + if (mask & (DV_I_48_2_bit | DV_I_51_2_bit)) != 0 { + mask &= ((0 - ((W[37] ^ (W[38] >> 5)) & (1 << 1))) | ^(DV_I_48_2_bit | DV_I_51_2_bit)) + } + if (mask & (DV_I_50_0_bit | DV_II_49_0_bit)) != 0 { + mask &= ((((W[37] ^ (W[41] >> 25)) & (1 << 4)) - (1 << 4)) | ^(DV_I_50_0_bit | DV_II_49_0_bit)) + } + if (mask & (DV_II_52_0_bit | DV_II_54_0_bit)) != 0 { + mask &= ((0 - ((W[36] ^ W[38]) & (1 << 4))) | ^(DV_II_52_0_bit | DV_II_54_0_bit)) + } + mask &= ((0 - ((W[35] ^ (W[36] >> 5)) & (1 << 1))) | ^(DV_I_46_2_bit | DV_I_49_2_bit)) + if (mask & (DV_I_51_0_bit | DV_II_47_0_bit)) != 0 { + mask &= ((((W[35] ^ (W[39] >> 25)) & (1 << 3)) - (1 << 3)) | ^(DV_I_51_0_bit | DV_II_47_0_bit)) + } + + if mask != 0 { + if (mask & DV_I_43_0_bit) != 0 { + if not((W[61]^(W[62]>>5))&(1<<1)) != 0 || + not(not((W[59]^(W[63]>>25))&(1<<5))) != 0 || + not((W[58]^(W[63]>>30))&(1<<0)) != 0 { + mask &= ^DV_I_43_0_bit + } + } + if (mask & DV_I_44_0_bit) != 0 { + if not((W[62]^(W[63]>>5))&(1<<1)) != 0 || + not(not((W[60]^(W[64]>>25))&(1<<5))) != 0 || + not((W[59]^(W[64]>>30))&(1<<0)) != 0 { + mask &= ^DV_I_44_0_bit + } + } + if (mask & DV_I_46_2_bit) != 0 { + mask &= ((^((W[40] ^ W[42]) >> 2)) | ^DV_I_46_2_bit) + } + if (mask & DV_I_47_2_bit) != 0 { + if not((W[62]^(W[63]>>5))&(1<<2)) != 0 || + not(not((W[41]^W[43])&(1<<6))) != 0 { + mask &= ^DV_I_47_2_bit + } + } + if (mask & DV_I_48_2_bit) != 0 { + if not((W[63]^(W[64]>>5))&(1<<2)) != 0 || + not(not((W[48]^(W[49]<<5))&(1<<6))) != 0 { + mask &= ^DV_I_48_2_bit + } + } + if (mask & DV_I_49_2_bit) != 0 { + if not(not((W[49]^(W[50]<<5))&(1<<6))) != 0 || + not((W[42]^W[50])&(1<<1)) != 0 || + not(not((W[39]^(W[40]<<5))&(1<<6))) != 0 || + not((W[38]^W[40])&(1<<1)) != 0 { + mask &= ^DV_I_49_2_bit + } + } + if (mask & DV_I_50_0_bit) != 0 { + mask &= (((W[36] ^ W[37]) << 7) | ^DV_I_50_0_bit) + } + if (mask & DV_I_50_2_bit) != 0 { + mask &= (((W[43] ^ W[51]) << 11) | ^DV_I_50_2_bit) + } + if (mask & DV_I_51_0_bit) != 0 { + mask &= (((W[37] ^ W[38]) << 9) | ^DV_I_51_0_bit) + } + if (mask & DV_I_51_2_bit) != 0 { + if not(not((W[51]^(W[52]<<5))&(1<<6))) != 0 || + not(not((W[49]^W[51])&(1<<6))) != 0 || + not(not((W[37]^(W[37]>>5))&(1<<1))) != 0 || + not(not((W[35]^(W[39]>>25))&(1<<5))) != 0 { + mask &= ^DV_I_51_2_bit + } + } + if (mask & DV_I_52_0_bit) != 0 { + mask &= (((W[38] ^ W[39]) << 11) | ^DV_I_52_0_bit) + } + if (mask & DV_II_46_2_bit) != 0 { + mask &= (((W[47] ^ W[51]) << 17) | ^DV_II_46_2_bit) + } + if (mask & DV_II_48_0_bit) != 0 { + if not(not((W[36]^(W[40]>>25))&(1<<3))) != 0 || + not((W[35]^(W[40]<<2))&(1<<30)) != 0 { + mask &= ^DV_II_48_0_bit + } + } + if (mask & DV_II_49_0_bit) != 0 { + if not(not((W[37]^(W[41]>>25))&(1<<3))) != 0 || + not((W[36]^(W[41]<<2))&(1<<30)) != 0 { + mask &= ^DV_II_49_0_bit + } + } + if (mask & DV_II_49_2_bit) != 0 { + if not(not((W[53]^(W[54]<<5))&(1<<6))) != 0 || + not(not((W[51]^W[53])&(1<<6))) != 0 || + not((W[50]^W[54])&(1<<1)) != 0 || + not(not((W[45]^(W[46]<<5))&(1<<6))) != 0 || + not(not((W[37]^(W[41]>>25))&(1<<5))) != 0 || + not((W[36]^(W[41]>>30))&(1<<0)) != 0 { + mask &= ^DV_II_49_2_bit + } + } + if (mask & DV_II_50_0_bit) != 0 { + if not((W[55]^W[58])&(1<<29)) != 0 || + not(not((W[38]^(W[42]>>25))&(1<<3))) != 0 || + not((W[37]^(W[42]<<2))&(1<<30)) != 0 { + mask &= ^DV_II_50_0_bit + } + } + if (mask & DV_II_50_2_bit) != 0 { + if not(not((W[54]^(W[55]<<5))&(1<<6))) != 0 || + not(not((W[52]^W[54])&(1<<6))) != 0 || + not((W[51]^W[55])&(1<<1)) != 0 || + not((W[45]^W[47])&(1<<1)) != 0 || + not(not((W[38]^(W[42]>>25))&(1<<5))) != 0 || + not((W[37]^(W[42]>>30))&(1<<0)) != 0 { + mask &= ^DV_II_50_2_bit + } + } + if (mask & DV_II_51_0_bit) != 0 { + if not(not((W[39]^(W[43]>>25))&(1<<3))) != 0 || + not((W[38]^(W[43]<<2))&(1<<30)) != 0 { + mask &= ^DV_II_51_0_bit + } + } + if (mask & DV_II_51_2_bit) != 0 { + if not(not((W[55]^(W[56]<<5))&(1<<6))) != 0 || + not(not((W[53]^W[55])&(1<<6))) != 0 || + not((W[52]^W[56])&(1<<1)) != 0 || + not((W[46]^W[48])&(1<<1)) != 0 || + not(not((W[39]^(W[43]>>25))&(1<<5))) != 0 || + not((W[38]^(W[43]>>30))&(1<<0)) != 0 { + mask &= ^DV_II_51_2_bit + } + } + if (mask & DV_II_52_0_bit) != 0 { + if not(not((W[59]^W[60])&(1<<29))) != 0 || + not(not((W[40]^(W[44]>>25))&(1<<3))) != 0 || + not(not((W[40]^(W[44]>>25))&(1<<4))) != 0 || + not((W[39]^(W[44]<<2))&(1<<30)) != 0 { + mask &= ^DV_II_52_0_bit + } + } + if (mask & DV_II_53_0_bit) != 0 { + if not((W[58]^W[61])&(1<<29)) != 0 || + not(not((W[57]^(W[61]>>25))&(1<<4))) != 0 || + not(not((W[41]^(W[45]>>25))&(1<<3))) != 0 || + not(not((W[41]^(W[45]>>25))&(1<<4))) != 0 { + mask &= ^DV_II_53_0_bit + } + } + if (mask & DV_II_54_0_bit) != 0 { + if not(not((W[58]^(W[62]>>25))&(1<<4))) != 0 || + not(not((W[42]^(W[46]>>25))&(1<<3))) != 0 || + not(not((W[42]^(W[46]>>25))&(1<<4))) != 0 { + mask &= ^DV_II_54_0_bit + } + } + if (mask & DV_II_55_0_bit) != 0 { + if not(not((W[59]^(W[63]>>25))&(1<<4))) != 0 || + not(not((W[57]^(W[59]>>25))&(1<<4))) != 0 || + not(not((W[43]^(W[47]>>25))&(1<<3))) != 0 || + not(not((W[43]^(W[47]>>25))&(1<<4))) != 0 { + mask &= ^DV_II_55_0_bit + } + } + if (mask & DV_II_56_0_bit) != 0 { + if not(not((W[60]^(W[64]>>25))&(1<<4))) != 0 || + not(not((W[44]^(W[48]>>25))&(1<<3))) != 0 || + not(not((W[44]^(W[48]>>25))&(1<<4))) != 0 { + mask &= ^DV_II_56_0_bit + } + } + } + + return mask +} + +func not(x uint32) uint32 { + if x == 0 { + return 1 + } + + return 0 +} + +func SHA1_dvs() []DvInfo { + return sha1_dvs +} diff --git a/vendor/github.com/pjbgf/sha1cd/ubc/const.go b/vendor/github.com/pjbgf/sha1cd/ubc/const.go new file mode 100644 index 00000000..eac14f46 --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/ubc/const.go @@ -0,0 +1,624 @@ +// Based on the C implementation from Marc Stevens and Dan Shumow. +// https://github.com/cr-marcstevens/sha1collisiondetection + +package ubc + +const ( + CheckSize = 80 + + DV_I_43_0_bit = (uint32)(1 << 0) + DV_I_44_0_bit = (uint32)(1 << 1) + DV_I_45_0_bit = (uint32)(1 << 2) + DV_I_46_0_bit = (uint32)(1 << 3) + DV_I_46_2_bit = (uint32)(1 << 4) + DV_I_47_0_bit = (uint32)(1 << 5) + DV_I_47_2_bit = (uint32)(1 << 6) + DV_I_48_0_bit = (uint32)(1 << 7) + DV_I_48_2_bit = (uint32)(1 << 8) + DV_I_49_0_bit = (uint32)(1 << 9) + DV_I_49_2_bit = (uint32)(1 << 10) + DV_I_50_0_bit = (uint32)(1 << 11) + DV_I_50_2_bit = (uint32)(1 << 12) + DV_I_51_0_bit = (uint32)(1 << 13) + DV_I_51_2_bit = (uint32)(1 << 14) + DV_I_52_0_bit = (uint32)(1 << 15) + DV_II_45_0_bit = (uint32)(1 << 16) + DV_II_46_0_bit = (uint32)(1 << 17) + DV_II_46_2_bit = (uint32)(1 << 18) + DV_II_47_0_bit = (uint32)(1 << 19) + DV_II_48_0_bit = (uint32)(1 << 20) + DV_II_49_0_bit = (uint32)(1 << 21) + DV_II_49_2_bit = (uint32)(1 << 22) + DV_II_50_0_bit = (uint32)(1 << 23) + DV_II_50_2_bit = (uint32)(1 << 24) + DV_II_51_0_bit = (uint32)(1 << 25) + DV_II_51_2_bit = (uint32)(1 << 26) + DV_II_52_0_bit = (uint32)(1 << 27) + DV_II_53_0_bit = (uint32)(1 << 28) + DV_II_54_0_bit = (uint32)(1 << 29) + DV_II_55_0_bit = (uint32)(1 << 30) + DV_II_56_0_bit = (uint32)(1 << 31) +) + +// sha1_dvs contains a list of SHA-1 Disturbance Vectors (DV) which defines the +// unavoidable bit conditions when a collision attack is in progress. +var sha1_dvs = []DvInfo{ + { + DvType: 1, DvK: 43, DvB: 0, TestT: 58, MaskI: 0, MaskB: 0, + Dm: [CheckSize]uint32{ + 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, 0xb8000010, 0x98000000, 0x60000000, + 0x00000008, 0xc0000000, 0x90000014, 0x10000010, 0xb8000014, 0x28000000, 0x20000010, + 0x48000000, 0x08000018, 0x60000000, 0x90000010, 0xf0000010, 0x90000008, 0xc0000000, + 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, 0x90000000, 0xf0000010, 0x90000018, + 0x60000000, 0x90000010, 0x90000010, 0x90000000, 0x80000000, 0x00000010, 0xa0000000, + 0x20000000, 0xa0000000, 0x20000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, + 0x20000000, 0x00000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000040, + 0x40000002, 0x80000004, 0x80000080, 0x80000006, 0x00000049, 0x00000103, 0x80000009, + 0x80000012, 0x80000202, 0x00000018, 0x00000164, 0x00000408, 0x800000e6, 0x8000004c, + 0x00000803, 0x80000161, 0x80000599}, + }, { + DvType: 1, DvK: 44, DvB: 0, TestT: 58, MaskI: 0, MaskB: 1, + Dm: [CheckSize]uint32{ + 0xb4000008, 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, 0xb8000010, 0x98000000, + 0x60000000, 0x00000008, 0xc0000000, 0x90000014, 0x10000010, 0xb8000014, 0x28000000, + 0x20000010, 0x48000000, 0x08000018, 0x60000000, 0x90000010, 0xf0000010, 0x90000008, + 0xc0000000, 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, 0x90000000, 0xf0000010, + 0x90000018, 0x60000000, 0x90000010, 0x90000010, 0x90000000, 0x80000000, 0x00000010, + 0xa0000000, 0x20000000, 0xa0000000, 0x20000010, 0x00000000, 0x20000010, 0x20000000, + 0x00000010, 0x20000000, 0x00000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, + 0x40000040, 0x40000002, 0x80000004, 0x80000080, 0x80000006, 0x00000049, 0x00000103, + 0x80000009, 0x80000012, 0x80000202, 0x00000018, 0x00000164, 0x00000408, 0x800000e6, + 0x8000004c, 0x00000803, 0x80000161}, + }, + { + DvType: 1, DvK: 45, DvB: 0, TestT: 58, MaskI: 0, MaskB: 2, + Dm: [CheckSize]uint32{ + 0xf4000014, 0xb4000008, 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, 0xb8000010, + 0x98000000, 0x60000000, 0x00000008, 0xc0000000, 0x90000014, 0x10000010, 0xb8000014, + 0x28000000, 0x20000010, 0x48000000, 0x08000018, 0x60000000, 0x90000010, 0xf0000010, + 0x90000008, 0xc0000000, 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, 0x90000000, + 0xf0000010, 0x90000018, 0x60000000, 0x90000010, 0x90000010, 0x90000000, 0x80000000, + 0x00000010, 0xa0000000, 0x20000000, 0xa0000000, 0x20000010, 0x00000000, 0x20000010, + 0x20000000, 0x00000010, 0x20000000, 0x00000010, 0xa0000000, 0x00000000, 0x20000000, + 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, + 0x40000002, 0x40000040, 0x40000002, 0x80000004, 0x80000080, 0x80000006, 0x00000049, + 0x00000103, 0x80000009, 0x80000012, 0x80000202, 0x00000018, 0x00000164, 0x00000408, + 0x800000e6, 0x8000004c, 0x00000803}, + }, + { + DvType: 1, DvK: 46, DvB: 0, TestT: 58, MaskI: 0, MaskB: 3, + Dm: [CheckSize]uint32{ + 0x2c000010, 0xf4000014, 0xb4000008, 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, + 0xb8000010, 0x98000000, 0x60000000, 0x00000008, 0xc0000000, 0x90000014, 0x10000010, + 0xb8000014, 0x28000000, 0x20000010, 0x48000000, 0x08000018, 0x60000000, 0x90000010, + 0xf0000010, 0x90000008, 0xc0000000, 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, + 0x90000000, 0xf0000010, 0x90000018, 0x60000000, 0x90000010, 0x90000010, 0x90000000, + 0x80000000, 0x00000010, 0xa0000000, 0x20000000, 0xa0000000, 0x20000010, 0x00000000, + 0x20000010, 0x20000000, 0x00000010, 0x20000000, 0x00000010, 0xa0000000, 0x00000000, + 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, + 0x00000001, 0x40000002, 0x40000040, 0x40000002, 0x80000004, 0x80000080, 0x80000006, + 0x00000049, 0x00000103, 0x80000009, 0x80000012, 0x80000202, 0x00000018, 0x00000164, + 0x00000408, 0x800000e6, 0x8000004c}, + }, + { + DvType: 1, DvK: 46, DvB: 2, TestT: 58, MaskI: 0, MaskB: 4, + Dm: [CheckSize]uint32{ + 0xb0000040, 0xd0000053, 0xd0000022, 0x20000000, 0x60000032, 0x60000043, + 0x20000040, 0xe0000042, 0x60000002, 0x80000001, 0x00000020, 0x00000003, + 0x40000052, 0x40000040, 0xe0000052, 0xa0000000, 0x80000040, 0x20000001, + 0x20000060, 0x80000001, 0x40000042, 0xc0000043, 0x40000022, 0x00000003, + 0x40000042, 0xc0000043, 0xc0000022, 0x00000001, 0x40000002, 0xc0000043, + 0x40000062, 0x80000001, 0x40000042, 0x40000042, 0x40000002, 0x00000002, + 0x00000040, 0x80000002, 0x80000000, 0x80000002, 0x80000040, 0x00000000, + 0x80000040, 0x80000000, 0x00000040, 0x80000000, 0x00000040, 0x80000002, + 0x00000000, 0x80000000, 0x80000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000004, 0x00000080, 0x00000004, 0x00000009, 0x00000101, + 0x00000009, 0x00000012, 0x00000202, 0x0000001a, 0x00000124, 0x0000040c, + 0x00000026, 0x0000004a, 0x0000080a, 0x00000060, 0x00000590, 0x00001020, + 0x0000039a, 0x00000132}, + }, + { + DvType: 1, DvK: 47, DvB: 0, TestT: 58, MaskI: 0, MaskB: 5, + Dm: [CheckSize]uint32{ + 0xc8000010, 0x2c000010, 0xf4000014, 0xb4000008, 0x08000000, 0x9800000c, + 0xd8000010, 0x08000010, 0xb8000010, 0x98000000, 0x60000000, 0x00000008, + 0xc0000000, 0x90000014, 0x10000010, 0xb8000014, 0x28000000, 0x20000010, + 0x48000000, 0x08000018, 0x60000000, 0x90000010, 0xf0000010, 0x90000008, + 0xc0000000, 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, 0x90000000, + 0xf0000010, 0x90000018, 0x60000000, 0x90000010, 0x90000010, 0x90000000, + 0x80000000, 0x00000010, 0xa0000000, 0x20000000, 0xa0000000, 0x20000010, + 0x00000000, 0x20000010, 0x20000000, 0x00000010, 0x20000000, 0x00000010, + 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, + 0x40000040, 0x40000002, 0x80000004, 0x80000080, 0x80000006, 0x00000049, + 0x00000103, 0x80000009, 0x80000012, 0x80000202, 0x00000018, 0x00000164, + 0x00000408, 0x800000e6}, + }, + { + DvType: 1, DvK: 47, DvB: 2, TestT: 58, MaskI: 0, MaskB: 6, + Dm: [CheckSize]uint32{ + 0x20000043, 0xb0000040, 0xd0000053, 0xd0000022, 0x20000000, 0x60000032, + 0x60000043, 0x20000040, 0xe0000042, 0x60000002, 0x80000001, 0x00000020, + 0x00000003, 0x40000052, 0x40000040, 0xe0000052, 0xa0000000, 0x80000040, + 0x20000001, 0x20000060, 0x80000001, 0x40000042, 0xc0000043, 0x40000022, + 0x00000003, 0x40000042, 0xc0000043, 0xc0000022, 0x00000001, 0x40000002, + 0xc0000043, 0x40000062, 0x80000001, 0x40000042, 0x40000042, 0x40000002, + 0x00000002, 0x00000040, 0x80000002, 0x80000000, 0x80000002, 0x80000040, + 0x00000000, 0x80000040, 0x80000000, 0x00000040, 0x80000000, 0x00000040, + 0x80000002, 0x00000000, 0x80000000, 0x80000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000004, 0x00000080, 0x00000004, 0x00000009, + 0x00000101, 0x00000009, 0x00000012, 0x00000202, 0x0000001a, 0x00000124, + 0x0000040c, 0x00000026, 0x0000004a, 0x0000080a, 0x00000060, 0x00000590, + 0x00001020, 0x0000039a, + }, + }, + { + DvType: 1, DvK: 48, DvB: 0, TestT: 58, MaskI: 0, MaskB: 7, + Dm: [CheckSize]uint32{ + 0xb800000a, 0xc8000010, 0x2c000010, 0xf4000014, 0xb4000008, 0x08000000, + 0x9800000c, 0xd8000010, 0x08000010, 0xb8000010, 0x98000000, 0x60000000, + 0x00000008, 0xc0000000, 0x90000014, 0x10000010, 0xb8000014, 0x28000000, + 0x20000010, 0x48000000, 0x08000018, 0x60000000, 0x90000010, 0xf0000010, + 0x90000008, 0xc0000000, 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, + 0x90000000, 0xf0000010, 0x90000018, 0x60000000, 0x90000010, 0x90000010, + 0x90000000, 0x80000000, 0x00000010, 0xa0000000, 0x20000000, 0xa0000000, + 0x20000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, 0x20000000, + 0x00000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, + 0x40000002, 0x40000040, 0x40000002, 0x80000004, 0x80000080, 0x80000006, + 0x00000049, 0x00000103, 0x80000009, 0x80000012, 0x80000202, 0x00000018, + 0x00000164, 0x00000408, + }, + }, + { + DvType: 1, DvK: 48, DvB: 2, TestT: 58, MaskI: 0, MaskB: 8, + Dm: [CheckSize]uint32{ + 0xe000002a, 0x20000043, 0xb0000040, 0xd0000053, 0xd0000022, 0x20000000, + 0x60000032, 0x60000043, 0x20000040, 0xe0000042, 0x60000002, 0x80000001, + 0x00000020, 0x00000003, 0x40000052, 0x40000040, 0xe0000052, 0xa0000000, + 0x80000040, 0x20000001, 0x20000060, 0x80000001, 0x40000042, 0xc0000043, + 0x40000022, 0x00000003, 0x40000042, 0xc0000043, 0xc0000022, 0x00000001, + 0x40000002, 0xc0000043, 0x40000062, 0x80000001, 0x40000042, 0x40000042, + 0x40000002, 0x00000002, 0x00000040, 0x80000002, 0x80000000, 0x80000002, + 0x80000040, 0x00000000, 0x80000040, 0x80000000, 0x00000040, 0x80000000, + 0x00000040, 0x80000002, 0x00000000, 0x80000000, 0x80000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000004, 0x00000080, 0x00000004, + 0x00000009, 0x00000101, 0x00000009, 0x00000012, 0x00000202, 0x0000001a, + 0x00000124, 0x0000040c, 0x00000026, 0x0000004a, 0x0000080a, 0x00000060, + 0x00000590, 0x00001020}, + }, + { + DvType: 1, DvK: 49, DvB: 0, TestT: 58, MaskI: 0, MaskB: 9, + Dm: [CheckSize]uint32{ + 0x18000000, 0xb800000a, 0xc8000010, 0x2c000010, 0xf4000014, 0xb4000008, + 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, 0xb8000010, 0x98000000, + 0x60000000, 0x00000008, 0xc0000000, 0x90000014, 0x10000010, 0xb8000014, + 0x28000000, 0x20000010, 0x48000000, 0x08000018, 0x60000000, 0x90000010, + 0xf0000010, 0x90000008, 0xc0000000, 0x90000010, 0xf0000010, 0xb0000008, + 0x40000000, 0x90000000, 0xf0000010, 0x90000018, 0x60000000, 0x90000010, + 0x90000010, 0x90000000, 0x80000000, 0x00000010, 0xa0000000, 0x20000000, + 0xa0000000, 0x20000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, + 0x20000000, 0x00000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, + 0x00000001, 0x40000002, 0x40000040, 0x40000002, 0x80000004, 0x80000080, + 0x80000006, 0x00000049, 0x00000103, 0x80000009, 0x80000012, 0x80000202, + 0x00000018, 0x00000164}, + }, + { + DvType: 1, DvK: 49, DvB: 2, TestT: 58, MaskI: 0, MaskB: 10, + Dm: [CheckSize]uint32{ + 0x60000000, 0xe000002a, 0x20000043, 0xb0000040, 0xd0000053, 0xd0000022, + 0x20000000, 0x60000032, 0x60000043, 0x20000040, 0xe0000042, 0x60000002, + 0x80000001, 0x00000020, 0x00000003, 0x40000052, 0x40000040, 0xe0000052, + 0xa0000000, 0x80000040, 0x20000001, 0x20000060, 0x80000001, 0x40000042, + 0xc0000043, 0x40000022, 0x00000003, 0x40000042, 0xc0000043, 0xc0000022, + 0x00000001, 0x40000002, 0xc0000043, 0x40000062, 0x80000001, 0x40000042, + 0x40000042, 0x40000002, 0x00000002, 0x00000040, 0x80000002, 0x80000000, + 0x80000002, 0x80000040, 0x00000000, 0x80000040, 0x80000000, 0x00000040, + 0x80000000, 0x00000040, 0x80000002, 0x00000000, 0x80000000, 0x80000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000004, 0x00000080, + 0x00000004, 0x00000009, 0x00000101, 0x00000009, 0x00000012, 0x00000202, + 0x0000001a, 0x00000124, 0x0000040c, 0x00000026, 0x0000004a, 0x0000080a, + 0x00000060, 0x00000590}, + }, + { + DvType: 1, DvK: 50, DvB: 0, TestT: 65, MaskI: 0, MaskB: 11, + Dm: [CheckSize]uint32{ + 0x0800000c, 0x18000000, 0xb800000a, 0xc8000010, 0x2c000010, 0xf4000014, + 0xb4000008, 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, 0xb8000010, + 0x98000000, 0x60000000, 0x00000008, 0xc0000000, 0x90000014, 0x10000010, + 0xb8000014, 0x28000000, 0x20000010, 0x48000000, 0x08000018, 0x60000000, + 0x90000010, 0xf0000010, 0x90000008, 0xc0000000, 0x90000010, 0xf0000010, + 0xb0000008, 0x40000000, 0x90000000, 0xf0000010, 0x90000018, 0x60000000, + 0x90000010, 0x90000010, 0x90000000, 0x80000000, 0x00000010, 0xa0000000, + 0x20000000, 0xa0000000, 0x20000010, 0x00000000, 0x20000010, 0x20000000, + 0x00000010, 0x20000000, 0x00000010, 0xa0000000, 0x00000000, 0x20000000, + 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, + 0x00000020, 0x00000001, 0x40000002, 0x40000040, 0x40000002, 0x80000004, + 0x80000080, 0x80000006, 0x00000049, 0x00000103, 0x80000009, 0x80000012, + 0x80000202, 0x00000018, + }, + }, + { + DvType: 1, DvK: 50, DvB: 2, TestT: 65, MaskI: 0, MaskB: 12, + Dm: [CheckSize]uint32{ + 0x20000030, 0x60000000, 0xe000002a, 0x20000043, 0xb0000040, 0xd0000053, + 0xd0000022, 0x20000000, 0x60000032, 0x60000043, 0x20000040, 0xe0000042, + 0x60000002, 0x80000001, 0x00000020, 0x00000003, 0x40000052, 0x40000040, + 0xe0000052, 0xa0000000, 0x80000040, 0x20000001, 0x20000060, 0x80000001, + 0x40000042, 0xc0000043, 0x40000022, 0x00000003, 0x40000042, 0xc0000043, + 0xc0000022, 0x00000001, 0x40000002, 0xc0000043, 0x40000062, 0x80000001, + 0x40000042, 0x40000042, 0x40000002, 0x00000002, 0x00000040, 0x80000002, + 0x80000000, 0x80000002, 0x80000040, 0x00000000, 0x80000040, 0x80000000, + 0x00000040, 0x80000000, 0x00000040, 0x80000002, 0x00000000, 0x80000000, + 0x80000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000004, + 0x00000080, 0x00000004, 0x00000009, 0x00000101, 0x00000009, 0x00000012, + 0x00000202, 0x0000001a, 0x00000124, 0x0000040c, 0x00000026, 0x0000004a, + 0x0000080a, 0x00000060}, + }, + { + DvType: 1, DvK: 51, DvB: 0, TestT: 65, MaskI: 0, MaskB: 13, + Dm: [CheckSize]uint32{ + 0xe8000000, 0x0800000c, 0x18000000, 0xb800000a, 0xc8000010, 0x2c000010, + 0xf4000014, 0xb4000008, 0x08000000, 0x9800000c, 0xd8000010, 0x08000010, + 0xb8000010, 0x98000000, 0x60000000, 0x00000008, 0xc0000000, 0x90000014, + 0x10000010, 0xb8000014, 0x28000000, 0x20000010, 0x48000000, 0x08000018, + 0x60000000, 0x90000010, 0xf0000010, 0x90000008, 0xc0000000, 0x90000010, + 0xf0000010, 0xb0000008, 0x40000000, 0x90000000, 0xf0000010, 0x90000018, + 0x60000000, 0x90000010, 0x90000010, 0x90000000, 0x80000000, 0x00000010, + 0xa0000000, 0x20000000, 0xa0000000, 0x20000010, 0x00000000, 0x20000010, + 0x20000000, 0x00000010, 0x20000000, 0x00000010, 0xa0000000, 0x00000000, + 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000040, 0x40000002, + 0x80000004, 0x80000080, 0x80000006, 0x00000049, 0x00000103, 0x80000009, + 0x80000012, 0x80000202}, + }, + { + DvType: 1, DvK: 51, DvB: 2, TestT: 65, MaskI: 0, MaskB: 14, + Dm: [CheckSize]uint32{ + 0xa0000003, 0x20000030, 0x60000000, 0xe000002a, 0x20000043, 0xb0000040, + 0xd0000053, 0xd0000022, 0x20000000, 0x60000032, 0x60000043, 0x20000040, + 0xe0000042, 0x60000002, 0x80000001, 0x00000020, 0x00000003, 0x40000052, + 0x40000040, 0xe0000052, 0xa0000000, 0x80000040, 0x20000001, 0x20000060, + 0x80000001, 0x40000042, 0xc0000043, 0x40000022, 0x00000003, 0x40000042, + 0xc0000043, 0xc0000022, 0x00000001, 0x40000002, 0xc0000043, 0x40000062, + 0x80000001, 0x40000042, 0x40000042, 0x40000002, 0x00000002, 0x00000040, + 0x80000002, 0x80000000, 0x80000002, 0x80000040, 0x00000000, 0x80000040, + 0x80000000, 0x00000040, 0x80000000, 0x00000040, 0x80000002, 0x00000000, + 0x80000000, 0x80000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000004, 0x00000080, 0x00000004, 0x00000009, 0x00000101, 0x00000009, + 0x00000012, 0x00000202, 0x0000001a, 0x00000124, 0x0000040c, 0x00000026, + 0x0000004a, 0x0000080a}, + }, + { + DvType: 1, DvK: 52, DvB: 0, TestT: 65, MaskI: 0, MaskB: 15, + Dm: [CheckSize]uint32{ + 0x04000010, 0xe8000000, 0x0800000c, 0x18000000, 0xb800000a, 0xc8000010, + 0x2c000010, 0xf4000014, 0xb4000008, 0x08000000, 0x9800000c, 0xd8000010, + 0x08000010, 0xb8000010, 0x98000000, 0x60000000, 0x00000008, 0xc0000000, + 0x90000014, 0x10000010, 0xb8000014, 0x28000000, 0x20000010, 0x48000000, + 0x08000018, 0x60000000, 0x90000010, 0xf0000010, 0x90000008, 0xc0000000, + 0x90000010, 0xf0000010, 0xb0000008, 0x40000000, 0x90000000, 0xf0000010, + 0x90000018, 0x60000000, 0x90000010, 0x90000010, 0x90000000, 0x80000000, + 0x00000010, 0xa0000000, 0x20000000, 0xa0000000, 0x20000010, 0x00000000, + 0x20000010, 0x20000000, 0x00000010, 0x20000000, 0x00000010, 0xa0000000, + 0x00000000, 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000040, + 0x40000002, 0x80000004, 0x80000080, 0x80000006, 0x00000049, 0x00000103, + 0x80000009, 0x80000012}, + }, + { + DvType: 2, DvK: 45, DvB: 0, TestT: 58, MaskI: 0, MaskB: 16, + Dm: [CheckSize]uint32{ + 0xec000014, 0x0c000002, 0xc0000010, 0xb400001c, 0x2c000004, 0xbc000018, + 0xb0000010, 0x0000000c, 0xb8000010, 0x08000018, 0x78000010, 0x08000014, + 0x70000010, 0xb800001c, 0xe8000000, 0xb0000004, 0x58000010, 0xb000000c, + 0x48000000, 0xb0000000, 0xb8000010, 0x98000010, 0xa0000000, 0x00000000, + 0x00000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, + 0x20000000, 0x00000010, 0x60000000, 0x00000018, 0xe0000000, 0x90000000, + 0x30000010, 0xb0000000, 0x20000000, 0x20000000, 0xa0000000, 0x00000010, + 0x80000000, 0x20000000, 0x20000000, 0x20000000, 0x80000000, 0x00000010, + 0x00000000, 0x20000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000041, 0x40000022, + 0x80000005, 0xc0000082, 0xc0000046, 0x4000004b, 0x80000107, 0x00000089, + 0x00000014, 0x8000024b, 0x0000011b, 0x8000016d, 0x8000041a, 0x000002e4, + 0x80000054, 0x00000967}, + }, + { + DvType: 2, DvK: 46, DvB: 0, TestT: 58, MaskI: 0, MaskB: 17, + Dm: [CheckSize]uint32{ + 0x2400001c, 0xec000014, 0x0c000002, 0xc0000010, 0xb400001c, 0x2c000004, + 0xbc000018, 0xb0000010, 0x0000000c, 0xb8000010, 0x08000018, 0x78000010, + 0x08000014, 0x70000010, 0xb800001c, 0xe8000000, 0xb0000004, 0x58000010, + 0xb000000c, 0x48000000, 0xb0000000, 0xb8000010, 0x98000010, 0xa0000000, + 0x00000000, 0x00000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, + 0x20000010, 0x20000000, 0x00000010, 0x60000000, 0x00000018, 0xe0000000, + 0x90000000, 0x30000010, 0xb0000000, 0x20000000, 0x20000000, 0xa0000000, + 0x00000010, 0x80000000, 0x20000000, 0x20000000, 0x20000000, 0x80000000, + 0x00000010, 0x00000000, 0x20000010, 0xa0000000, 0x00000000, 0x20000000, + 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000041, + 0x40000022, 0x80000005, 0xc0000082, 0xc0000046, 0x4000004b, 0x80000107, + 0x00000089, 0x00000014, 0x8000024b, 0x0000011b, 0x8000016d, 0x8000041a, + 0x000002e4, 0x80000054}, + }, + { + DvType: 2, DvK: 46, DvB: 2, TestT: 58, MaskI: 0, MaskB: 18, + Dm: [CheckSize]uint32{ + 0x90000070, 0xb0000053, 0x30000008, 0x00000043, 0xd0000072, 0xb0000010, + 0xf0000062, 0xc0000042, 0x00000030, 0xe0000042, 0x20000060, 0xe0000041, + 0x20000050, 0xc0000041, 0xe0000072, 0xa0000003, 0xc0000012, 0x60000041, + 0xc0000032, 0x20000001, 0xc0000002, 0xe0000042, 0x60000042, 0x80000002, + 0x00000000, 0x00000000, 0x80000000, 0x00000002, 0x00000040, 0x00000000, + 0x80000040, 0x80000000, 0x00000040, 0x80000001, 0x00000060, 0x80000003, + 0x40000002, 0xc0000040, 0xc0000002, 0x80000000, 0x80000000, 0x80000002, + 0x00000040, 0x00000002, 0x80000000, 0x80000000, 0x80000000, 0x00000002, + 0x00000040, 0x00000000, 0x80000040, 0x80000002, 0x00000000, 0x80000000, + 0x80000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000004, 0x00000080, 0x00000004, 0x00000009, 0x00000105, + 0x00000089, 0x00000016, 0x0000020b, 0x0000011b, 0x0000012d, 0x0000041e, + 0x00000224, 0x00000050, 0x0000092e, 0x0000046c, 0x000005b6, 0x0000106a, + 0x00000b90, 0x00000152}, + }, + { + DvType: 2, DvK: 47, DvB: 0, TestT: 58, MaskI: 0, MaskB: 19, + Dm: [CheckSize]uint32{ + 0x20000010, 0x2400001c, 0xec000014, 0x0c000002, 0xc0000010, 0xb400001c, + 0x2c000004, 0xbc000018, 0xb0000010, 0x0000000c, 0xb8000010, 0x08000018, + 0x78000010, 0x08000014, 0x70000010, 0xb800001c, 0xe8000000, 0xb0000004, + 0x58000010, 0xb000000c, 0x48000000, 0xb0000000, 0xb8000010, 0x98000010, + 0xa0000000, 0x00000000, 0x00000000, 0x20000000, 0x80000000, 0x00000010, + 0x00000000, 0x20000010, 0x20000000, 0x00000010, 0x60000000, 0x00000018, + 0xe0000000, 0x90000000, 0x30000010, 0xb0000000, 0x20000000, 0x20000000, + 0xa0000000, 0x00000010, 0x80000000, 0x20000000, 0x20000000, 0x20000000, + 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0xa0000000, 0x00000000, + 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, + 0x40000041, 0x40000022, 0x80000005, 0xc0000082, 0xc0000046, 0x4000004b, + 0x80000107, 0x00000089, 0x00000014, 0x8000024b, 0x0000011b, 0x8000016d, + 0x8000041a, 0x000002e4}, + }, + { + DvType: 2, DvK: 48, DvB: 0, TestT: 58, MaskI: 0, MaskB: 20, + Dm: [CheckSize]uint32{ + 0xbc00001a, 0x20000010, 0x2400001c, 0xec000014, 0x0c000002, 0xc0000010, + 0xb400001c, 0x2c000004, 0xbc000018, 0xb0000010, 0x0000000c, 0xb8000010, + 0x08000018, 0x78000010, 0x08000014, 0x70000010, 0xb800001c, 0xe8000000, + 0xb0000004, 0x58000010, 0xb000000c, 0x48000000, 0xb0000000, 0xb8000010, + 0x98000010, 0xa0000000, 0x00000000, 0x00000000, 0x20000000, 0x80000000, + 0x00000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, 0x60000000, + 0x00000018, 0xe0000000, 0x90000000, 0x30000010, 0xb0000000, 0x20000000, + 0x20000000, 0xa0000000, 0x00000010, 0x80000000, 0x20000000, 0x20000000, + 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0xa0000000, + 0x00000000, 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, + 0x40000002, 0x40000041, 0x40000022, 0x80000005, 0xc0000082, 0xc0000046, + 0x4000004b, 0x80000107, 0x00000089, 0x00000014, 0x8000024b, 0x0000011b, + 0x8000016d, 0x8000041a}, + }, + { + DvType: 2, DvK: 49, DvB: 0, TestT: 58, MaskI: 0, MaskB: 21, + Dm: [CheckSize]uint32{ + 0x3c000004, 0xbc00001a, 0x20000010, 0x2400001c, 0xec000014, 0x0c000002, + 0xc0000010, 0xb400001c, 0x2c000004, 0xbc000018, 0xb0000010, 0x0000000c, + 0xb8000010, 0x08000018, 0x78000010, 0x08000014, 0x70000010, 0xb800001c, + 0xe8000000, 0xb0000004, 0x58000010, 0xb000000c, 0x48000000, 0xb0000000, + 0xb8000010, 0x98000010, 0xa0000000, 0x00000000, 0x00000000, 0x20000000, + 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, + 0x60000000, 0x00000018, 0xe0000000, 0x90000000, 0x30000010, 0xb0000000, + 0x20000000, 0x20000000, 0xa0000000, 0x00000010, 0x80000000, 0x20000000, + 0x20000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, + 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, + 0x00000001, 0x40000002, 0x40000041, 0x40000022, 0x80000005, 0xc0000082, + 0xc0000046, 0x4000004b, 0x80000107, 0x00000089, 0x00000014, 0x8000024b, + 0x0000011b, 0x8000016d}, + }, + { + DvType: 2, DvK: 49, DvB: 2, TestT: 58, MaskI: 0, MaskB: 22, + Dm: [CheckSize]uint32{ + 0xf0000010, 0xf000006a, 0x80000040, 0x90000070, 0xb0000053, 0x30000008, + 0x00000043, 0xd0000072, 0xb0000010, 0xf0000062, 0xc0000042, 0x00000030, + 0xe0000042, 0x20000060, 0xe0000041, 0x20000050, 0xc0000041, 0xe0000072, + 0xa0000003, 0xc0000012, 0x60000041, 0xc0000032, 0x20000001, 0xc0000002, + 0xe0000042, 0x60000042, 0x80000002, 0x00000000, 0x00000000, 0x80000000, + 0x00000002, 0x00000040, 0x00000000, 0x80000040, 0x80000000, 0x00000040, + 0x80000001, 0x00000060, 0x80000003, 0x40000002, 0xc0000040, 0xc0000002, + 0x80000000, 0x80000000, 0x80000002, 0x00000040, 0x00000002, 0x80000000, + 0x80000000, 0x80000000, 0x00000002, 0x00000040, 0x00000000, 0x80000040, + 0x80000002, 0x00000000, 0x80000000, 0x80000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000004, 0x00000080, + 0x00000004, 0x00000009, 0x00000105, 0x00000089, 0x00000016, 0x0000020b, + 0x0000011b, 0x0000012d, 0x0000041e, 0x00000224, 0x00000050, 0x0000092e, + 0x0000046c, 0x000005b6}, + }, + { + DvType: 2, DvK: 50, DvB: 0, TestT: 65, MaskI: 0, MaskB: 23, + Dm: [CheckSize]uint32{ + 0xb400001c, 0x3c000004, 0xbc00001a, 0x20000010, 0x2400001c, 0xec000014, + 0x0c000002, 0xc0000010, 0xb400001c, 0x2c000004, 0xbc000018, 0xb0000010, + 0x0000000c, 0xb8000010, 0x08000018, 0x78000010, 0x08000014, 0x70000010, + 0xb800001c, 0xe8000000, 0xb0000004, 0x58000010, 0xb000000c, 0x48000000, + 0xb0000000, 0xb8000010, 0x98000010, 0xa0000000, 0x00000000, 0x00000000, + 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0x20000000, + 0x00000010, 0x60000000, 0x00000018, 0xe0000000, 0x90000000, 0x30000010, + 0xb0000000, 0x20000000, 0x20000000, 0xa0000000, 0x00000010, 0x80000000, + 0x20000000, 0x20000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, + 0x20000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, + 0x00000020, 0x00000001, 0x40000002, 0x40000041, 0x40000022, 0x80000005, + 0xc0000082, 0xc0000046, 0x4000004b, 0x80000107, 0x00000089, 0x00000014, + 0x8000024b, 0x0000011b}, + }, + { + DvType: 2, DvK: 50, DvB: 2, TestT: 65, MaskI: 0, MaskB: 24, + Dm: [CheckSize]uint32{ + 0xd0000072, 0xf0000010, 0xf000006a, 0x80000040, 0x90000070, 0xb0000053, + 0x30000008, 0x00000043, 0xd0000072, 0xb0000010, 0xf0000062, 0xc0000042, + 0x00000030, 0xe0000042, 0x20000060, 0xe0000041, 0x20000050, 0xc0000041, + 0xe0000072, 0xa0000003, 0xc0000012, 0x60000041, 0xc0000032, 0x20000001, + 0xc0000002, 0xe0000042, 0x60000042, 0x80000002, 0x00000000, 0x00000000, + 0x80000000, 0x00000002, 0x00000040, 0x00000000, 0x80000040, 0x80000000, + 0x00000040, 0x80000001, 0x00000060, 0x80000003, 0x40000002, 0xc0000040, + 0xc0000002, 0x80000000, 0x80000000, 0x80000002, 0x00000040, 0x00000002, + 0x80000000, 0x80000000, 0x80000000, 0x00000002, 0x00000040, 0x00000000, + 0x80000040, 0x80000002, 0x00000000, 0x80000000, 0x80000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000004, + 0x00000080, 0x00000004, 0x00000009, 0x00000105, 0x00000089, 0x00000016, + 0x0000020b, 0x0000011b, 0x0000012d, 0x0000041e, 0x00000224, 0x00000050, + 0x0000092e, 0x0000046c}, + }, + { + DvType: 2, DvK: 51, DvB: 0, TestT: 65, MaskI: 0, MaskB: 25, + Dm: [CheckSize]uint32{ + 0xc0000010, 0xb400001c, 0x3c000004, 0xbc00001a, 0x20000010, 0x2400001c, + 0xec000014, 0x0c000002, 0xc0000010, 0xb400001c, 0x2c000004, 0xbc000018, + 0xb0000010, 0x0000000c, 0xb8000010, 0x08000018, 0x78000010, 0x08000014, + 0x70000010, 0xb800001c, 0xe8000000, 0xb0000004, 0x58000010, 0xb000000c, + 0x48000000, 0xb0000000, 0xb8000010, 0x98000010, 0xa0000000, 0x00000000, + 0x00000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, + 0x20000000, 0x00000010, 0x60000000, 0x00000018, 0xe0000000, 0x90000000, + 0x30000010, 0xb0000000, 0x20000000, 0x20000000, 0xa0000000, 0x00000010, + 0x80000000, 0x20000000, 0x20000000, 0x20000000, 0x80000000, 0x00000010, + 0x00000000, 0x20000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000041, 0x40000022, + 0x80000005, 0xc0000082, 0xc0000046, 0x4000004b, 0x80000107, 0x00000089, + 0x00000014, 0x8000024b}, + }, + { + DvType: 2, DvK: 51, DvB: 2, TestT: 65, MaskI: 0, MaskB: 26, + Dm: [CheckSize]uint32{ + 0x00000043, 0xd0000072, 0xf0000010, 0xf000006a, 0x80000040, 0x90000070, + 0xb0000053, 0x30000008, 0x00000043, 0xd0000072, 0xb0000010, 0xf0000062, + 0xc0000042, 0x00000030, 0xe0000042, 0x20000060, 0xe0000041, 0x20000050, + 0xc0000041, 0xe0000072, 0xa0000003, 0xc0000012, 0x60000041, 0xc0000032, + 0x20000001, 0xc0000002, 0xe0000042, 0x60000042, 0x80000002, 0x00000000, + 0x00000000, 0x80000000, 0x00000002, 0x00000040, 0x00000000, 0x80000040, + 0x80000000, 0x00000040, 0x80000001, 0x00000060, 0x80000003, 0x40000002, + 0xc0000040, 0xc0000002, 0x80000000, 0x80000000, 0x80000002, 0x00000040, + 0x00000002, 0x80000000, 0x80000000, 0x80000000, 0x00000002, 0x00000040, + 0x00000000, 0x80000040, 0x80000002, 0x00000000, 0x80000000, 0x80000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000004, 0x00000080, 0x00000004, 0x00000009, 0x00000105, 0x00000089, + 0x00000016, 0x0000020b, 0x0000011b, 0x0000012d, 0x0000041e, 0x00000224, + 0x00000050, 0x0000092e}, + }, + { + DvType: 2, DvK: 52, DvB: 0, TestT: 65, MaskI: 0, MaskB: 27, + Dm: [CheckSize]uint32{ + 0x0c000002, 0xc0000010, 0xb400001c, 0x3c000004, 0xbc00001a, 0x20000010, + 0x2400001c, 0xec000014, 0x0c000002, 0xc0000010, 0xb400001c, 0x2c000004, + 0xbc000018, 0xb0000010, 0x0000000c, 0xb8000010, 0x08000018, 0x78000010, + 0x08000014, 0x70000010, 0xb800001c, 0xe8000000, 0xb0000004, 0x58000010, + 0xb000000c, 0x48000000, 0xb0000000, 0xb8000010, 0x98000010, 0xa0000000, + 0x00000000, 0x00000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, + 0x20000010, 0x20000000, 0x00000010, 0x60000000, 0x00000018, 0xe0000000, + 0x90000000, 0x30000010, 0xb0000000, 0x20000000, 0x20000000, 0xa0000000, + 0x00000010, 0x80000000, 0x20000000, 0x20000000, 0x20000000, 0x80000000, + 0x00000010, 0x00000000, 0x20000010, 0xa0000000, 0x00000000, 0x20000000, + 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, 0x40000041, + 0x40000022, 0x80000005, 0xc0000082, 0xc0000046, 0x4000004b, 0x80000107, + 0x00000089, 0x00000014}, + }, + { + DvType: 2, DvK: 53, DvB: 0, TestT: 65, MaskI: 0, MaskB: 28, + Dm: [CheckSize]uint32{ + 0xcc000014, 0x0c000002, 0xc0000010, 0xb400001c, 0x3c000004, 0xbc00001a, + 0x20000010, 0x2400001c, 0xec000014, 0x0c000002, 0xc0000010, 0xb400001c, + 0x2c000004, 0xbc000018, 0xb0000010, 0x0000000c, 0xb8000010, 0x08000018, + 0x78000010, 0x08000014, 0x70000010, 0xb800001c, 0xe8000000, 0xb0000004, + 0x58000010, 0xb000000c, 0x48000000, 0xb0000000, 0xb8000010, 0x98000010, + 0xa0000000, 0x00000000, 0x00000000, 0x20000000, 0x80000000, 0x00000010, + 0x00000000, 0x20000010, 0x20000000, 0x00000010, 0x60000000, 0x00000018, + 0xe0000000, 0x90000000, 0x30000010, 0xb0000000, 0x20000000, 0x20000000, + 0xa0000000, 0x00000010, 0x80000000, 0x20000000, 0x20000000, 0x20000000, + 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0xa0000000, 0x00000000, + 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, 0x40000002, + 0x40000041, 0x40000022, 0x80000005, 0xc0000082, 0xc0000046, 0x4000004b, + 0x80000107, 0x00000089}, + }, + { + DvType: 2, DvK: 54, DvB: 0, TestT: 65, MaskI: 0, MaskB: 29, + Dm: [CheckSize]uint32{ + 0x0400001c, 0xcc000014, 0x0c000002, 0xc0000010, 0xb400001c, 0x3c000004, + 0xbc00001a, 0x20000010, 0x2400001c, 0xec000014, 0x0c000002, 0xc0000010, + 0xb400001c, 0x2c000004, 0xbc000018, 0xb0000010, 0x0000000c, 0xb8000010, + 0x08000018, 0x78000010, 0x08000014, 0x70000010, 0xb800001c, 0xe8000000, + 0xb0000004, 0x58000010, 0xb000000c, 0x48000000, 0xb0000000, 0xb8000010, + 0x98000010, 0xa0000000, 0x00000000, 0x00000000, 0x20000000, 0x80000000, + 0x00000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, 0x60000000, + 0x00000018, 0xe0000000, 0x90000000, 0x30000010, 0xb0000000, 0x20000000, + 0x20000000, 0xa0000000, 0x00000010, 0x80000000, 0x20000000, 0x20000000, + 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0xa0000000, + 0x00000000, 0x20000000, 0x20000000, 0x00000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, 0x00000001, + 0x40000002, 0x40000041, 0x40000022, 0x80000005, 0xc0000082, 0xc0000046, + 0x4000004b, 0x80000107}, + }, + { + DvType: 2, DvK: 55, DvB: 0, TestT: 65, MaskI: 0, MaskB: 30, + Dm: [CheckSize]uint32{ + 0x00000010, 0x0400001c, 0xcc000014, 0x0c000002, 0xc0000010, 0xb400001c, + 0x3c000004, 0xbc00001a, 0x20000010, 0x2400001c, 0xec000014, 0x0c000002, + 0xc0000010, 0xb400001c, 0x2c000004, 0xbc000018, 0xb0000010, 0x0000000c, + 0xb8000010, 0x08000018, 0x78000010, 0x08000014, 0x70000010, 0xb800001c, + 0xe8000000, 0xb0000004, 0x58000010, 0xb000000c, 0x48000000, 0xb0000000, + 0xb8000010, 0x98000010, 0xa0000000, 0x00000000, 0x00000000, 0x20000000, + 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0x20000000, 0x00000010, + 0x60000000, 0x00000018, 0xe0000000, 0x90000000, 0x30000010, 0xb0000000, + 0x20000000, 0x20000000, 0xa0000000, 0x00000010, 0x80000000, 0x20000000, + 0x20000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, + 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000020, + 0x00000001, 0x40000002, 0x40000041, 0x40000022, 0x80000005, 0xc0000082, + 0xc0000046, 0x4000004b}, + }, + { + DvType: 2, DvK: 56, DvB: 0, TestT: 65, MaskI: 0, MaskB: 31, + Dm: [CheckSize]uint32{ + 0x2600001a, 0x00000010, 0x0400001c, 0xcc000014, 0x0c000002, 0xc0000010, + 0xb400001c, 0x3c000004, 0xbc00001a, 0x20000010, 0x2400001c, 0xec000014, + 0x0c000002, 0xc0000010, 0xb400001c, 0x2c000004, 0xbc000018, 0xb0000010, + 0x0000000c, 0xb8000010, 0x08000018, 0x78000010, 0x08000014, 0x70000010, + 0xb800001c, 0xe8000000, 0xb0000004, 0x58000010, 0xb000000c, 0x48000000, + 0xb0000000, 0xb8000010, 0x98000010, 0xa0000000, 0x00000000, 0x00000000, + 0x20000000, 0x80000000, 0x00000010, 0x00000000, 0x20000010, 0x20000000, + 0x00000010, 0x60000000, 0x00000018, 0xe0000000, 0x90000000, 0x30000010, + 0xb0000000, 0x20000000, 0x20000000, 0xa0000000, 0x00000010, 0x80000000, + 0x20000000, 0x20000000, 0x20000000, 0x80000000, 0x00000010, 0x00000000, + 0x20000010, 0xa0000000, 0x00000000, 0x20000000, 0x20000000, 0x00000000, + 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000001, + 0x00000020, 0x00000001, 0x40000002, 0x40000041, 0x40000022, 0x80000005, + 0xc0000082, 0xc0000046}, + }, + { + DvType: 0, DvK: 0, DvB: 0, TestT: 0, MaskI: 0, MaskB: 0, + Dm: [CheckSize]uint32{ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0}, + }, +} diff --git a/vendor/github.com/pjbgf/sha1cd/ubc/doc.go b/vendor/github.com/pjbgf/sha1cd/ubc/doc.go new file mode 100644 index 00000000..0090e36b --- /dev/null +++ b/vendor/github.com/pjbgf/sha1cd/ubc/doc.go @@ -0,0 +1,3 @@ +// ubc package provides ways for SHA1 blocks to be checked for +// Unavoidable Bit Conditions that arise from crypto analysis attacks. +package ubc diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE new file mode 100644 index 00000000..6a66aea5 --- /dev/null +++ b/vendor/golang.org/x/tools/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS new file mode 100644 index 00000000..73309904 --- /dev/null +++ b/vendor/golang.org/x/tools/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/godoc/util/throttle.go b/vendor/golang.org/x/tools/godoc/util/throttle.go new file mode 100644 index 00000000..7852a328 --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/util/throttle.go @@ -0,0 +1,85 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package util + +import "time" + +// A Throttle permits throttling of a goroutine by +// calling the Throttle method repeatedly. +type Throttle struct { + f float64 // f = (1-r)/r for 0 < r < 1 + dt time.Duration // minimum run time slice; >= 0 + tr time.Duration // accumulated time running + ts time.Duration // accumulated time stopped + tt time.Time // earliest throttle time (= time Throttle returned + tm) +} + +// NewThrottle creates a new Throttle with a throttle value r and +// a minimum allocated run time slice of dt: +// +// r == 0: "empty" throttle; the goroutine is always sleeping +// r == 1: full throttle; the goroutine is never sleeping +// +// A value of r == 0.6 throttles a goroutine such that it runs +// approx. 60% of the time, and sleeps approx. 40% of the time. +// Values of r < 0 or r > 1 are clamped down to values between 0 and 1. +// Values of dt < 0 are set to 0. +func NewThrottle(r float64, dt time.Duration) *Throttle { + var f float64 + switch { + case r <= 0: + f = -1 // indicates always sleep + case r >= 1: + f = 0 // assume r == 1 (never sleep) + default: + // 0 < r < 1 + f = (1 - r) / r + } + if dt < 0 { + dt = 0 + } + return &Throttle{f: f, dt: dt, tt: time.Now().Add(dt)} +} + +// Throttle calls time.Sleep such that over time the ratio tr/ts between +// accumulated run (tr) and sleep times (ts) approximates the value 1/(1-r) +// where r is the throttle value. Throttle returns immediately (w/o sleeping) +// if less than tm ns have passed since the last call to Throttle. +func (p *Throttle) Throttle() { + if p.f < 0 { + select {} // always sleep + } + + t0 := time.Now() + if t0.Before(p.tt) { + return // keep running (minimum time slice not exhausted yet) + } + + // accumulate running time + p.tr += t0.Sub(p.tt) + p.dt + + // compute sleep time + // Over time we want: + // + // tr/ts = r/(1-r) + // + // Thus: + // + // ts = tr*f with f = (1-r)/r + // + // After some incremental run time δr added to the total run time + // tr, the incremental sleep-time δs to get to the same ratio again + // after waking up from time.Sleep is: + if δs := time.Duration(float64(p.tr)*p.f) - p.ts; δs > 0 { + time.Sleep(δs) + } + + // accumulate (actual) sleep time + t1 := time.Now() + p.ts += t1.Sub(t0) + + // set earliest next throttle time + p.tt = t1.Add(p.dt) +} diff --git a/vendor/golang.org/x/tools/godoc/util/util.go b/vendor/golang.org/x/tools/godoc/util/util.go new file mode 100644 index 00000000..c08ca785 --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/util/util.go @@ -0,0 +1,90 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package util contains utility types and functions for godoc. +package util // import "golang.org/x/tools/godoc/util" + +import ( + pathpkg "path" + "sync" + "time" + "unicode/utf8" + + "golang.org/x/tools/godoc/vfs" +) + +// An RWValue wraps a value and permits mutually exclusive +// access to it and records the time the value was last set. +type RWValue struct { + mutex sync.RWMutex + value interface{} + timestamp time.Time // time of last set() +} + +func (v *RWValue) Set(value interface{}) { + v.mutex.Lock() + v.value = value + v.timestamp = time.Now() + v.mutex.Unlock() +} + +func (v *RWValue) Get() (interface{}, time.Time) { + v.mutex.RLock() + defer v.mutex.RUnlock() + return v.value, v.timestamp +} + +// IsText reports whether a significant prefix of s looks like correct UTF-8; +// that is, if it is likely that s is human-readable text. +func IsText(s []byte) bool { + const max = 1024 // at least utf8.UTFMax + if len(s) > max { + s = s[0:max] + } + for i, c := range string(s) { + if i+utf8.UTFMax > len(s) { + // last char may be incomplete - ignore + break + } + if c == 0xFFFD || c < ' ' && c != '\n' && c != '\t' && c != '\f' { + // decoding error or control character - not a text file + return false + } + } + return true +} + +// textExt[x] is true if the extension x indicates a text file, and false otherwise. +var textExt = map[string]bool{ + ".css": false, // must be served raw + ".js": false, // must be served raw + ".svg": false, // must be served raw +} + +// IsTextFile reports whether the file has a known extension indicating +// a text file, or if a significant chunk of the specified file looks like +// correct UTF-8; that is, if it is likely that the file contains human- +// readable text. +func IsTextFile(fs vfs.Opener, filename string) bool { + // if the extension is known, use it for decision making + if isText, found := textExt[pathpkg.Ext(filename)]; found { + return isText + } + + // the extension is not known; read an initial chunk + // of the file and check if it looks like text + f, err := fs.Open(filename) + if err != nil { + return false + } + defer f.Close() + + var buf [1024]byte + n, err := f.Read(buf[0:]) + if err != nil { + return false + } + + return IsText(buf[0:n]) +} diff --git a/vendor/golang.org/x/tools/godoc/vfs/emptyvfs.go b/vendor/golang.org/x/tools/godoc/vfs/emptyvfs.go new file mode 100644 index 00000000..8712d5eb --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/vfs/emptyvfs.go @@ -0,0 +1,89 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package vfs + +import ( + "fmt" + "os" + "time" +) + +// NewNameSpace returns a NameSpace pre-initialized with an empty +// emulated directory mounted on the root mount point "/". This +// allows directory traversal routines to work properly even if +// a folder is not explicitly mounted at root by the user. +func NewNameSpace() NameSpace { + ns := NameSpace{} + ns.Bind("/", &emptyVFS{}, "/", BindReplace) + return ns +} + +// type emptyVFS emulates a FileSystem consisting of an empty directory +type emptyVFS struct{} + +// Open implements Opener. Since emptyVFS is an empty directory, all +// attempts to open a file should returns errors. +func (e *emptyVFS) Open(path string) (ReadSeekCloser, error) { + if path == "/" { + return nil, fmt.Errorf("open: / is a directory") + } + return nil, os.ErrNotExist +} + +// Stat returns os.FileInfo for an empty directory if the path is +// is root "/" or error. os.FileInfo is implemented by emptyVFS +func (e *emptyVFS) Stat(path string) (os.FileInfo, error) { + if path == "/" { + return e, nil + } + return nil, os.ErrNotExist +} + +func (e *emptyVFS) Lstat(path string) (os.FileInfo, error) { + return e.Stat(path) +} + +// ReadDir returns an empty os.FileInfo slice for "/", else error. +func (e *emptyVFS) ReadDir(path string) ([]os.FileInfo, error) { + if path == "/" { + return []os.FileInfo{}, nil + } + return nil, os.ErrNotExist +} + +func (e *emptyVFS) String() string { + return "emptyVFS(/)" +} + +func (e *emptyVFS) RootType(path string) RootType { + return "" +} + +// These functions below implement os.FileInfo for the single +// empty emulated directory. + +func (e *emptyVFS) Name() string { + return "/" +} + +func (e *emptyVFS) Size() int64 { + return 0 +} + +func (e *emptyVFS) Mode() os.FileMode { + return os.ModeDir | os.ModePerm +} + +func (e *emptyVFS) ModTime() time.Time { + return time.Time{} +} + +func (e *emptyVFS) IsDir() bool { + return true +} + +func (e *emptyVFS) Sys() interface{} { + return nil +} diff --git a/vendor/golang.org/x/tools/godoc/vfs/fs.go b/vendor/golang.org/x/tools/godoc/vfs/fs.go new file mode 100644 index 00000000..f12d653f --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/vfs/fs.go @@ -0,0 +1,80 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.16 +// +build go1.16 + +package vfs + +import ( + "io/fs" + "os" + "path" + "strings" +) + +// FromFS converts an fs.FS to the FileSystem interface. +func FromFS(fsys fs.FS) FileSystem { + return &fsysToFileSystem{fsys} +} + +type fsysToFileSystem struct { + fsys fs.FS +} + +func (f *fsysToFileSystem) fsPath(name string) string { + name = path.Clean(name) + if name == "/" { + return "." + } + return strings.TrimPrefix(name, "/") +} + +func (f *fsysToFileSystem) Open(name string) (ReadSeekCloser, error) { + file, err := f.fsys.Open(f.fsPath(name)) + if err != nil { + return nil, err + } + if rsc, ok := file.(ReadSeekCloser); ok { + return rsc, nil + } + return &noSeekFile{f.fsPath(name), file}, nil +} + +func (f *fsysToFileSystem) Lstat(name string) (os.FileInfo, error) { + return fs.Stat(f.fsys, f.fsPath(name)) +} + +func (f *fsysToFileSystem) Stat(name string) (os.FileInfo, error) { + return fs.Stat(f.fsys, f.fsPath(name)) +} + +func (f *fsysToFileSystem) RootType(name string) RootType { return "" } + +func (f *fsysToFileSystem) ReadDir(name string) ([]os.FileInfo, error) { + dirs, err := fs.ReadDir(f.fsys, f.fsPath(name)) + var infos []os.FileInfo + for _, d := range dirs { + info, err1 := d.Info() + if err1 != nil { + if err == nil { + err = err1 + } + continue + } + infos = append(infos, info) + } + return infos, err +} + +func (f *fsysToFileSystem) String() string { return "io/fs" } + +type noSeekFile struct { + path string + fs.File +} + +func (f *noSeekFile) Seek(offset int64, whence int) (int64, error) { + return 0, &fs.PathError{Op: "seek", Path: f.path, Err: fs.ErrInvalid} +} diff --git a/vendor/golang.org/x/tools/godoc/vfs/namespace.go b/vendor/golang.org/x/tools/godoc/vfs/namespace.go new file mode 100644 index 00000000..23dd9794 --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/vfs/namespace.go @@ -0,0 +1,387 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package vfs + +import ( + "fmt" + "io" + "os" + pathpkg "path" + "sort" + "strings" + "time" +) + +// Setting debugNS = true will enable debugging prints about +// name space translations. +const debugNS = false + +// A NameSpace is a file system made up of other file systems +// mounted at specific locations in the name space. +// +// The representation is a map from mount point locations +// to the list of file systems mounted at that location. A traditional +// Unix mount table would use a single file system per mount point, +// but we want to be able to mount multiple file systems on a single +// mount point and have the system behave as if the union of those +// file systems were present at the mount point. +// For example, if the OS file system has a Go installation in +// c:\Go and additional Go path trees in d:\Work1 and d:\Work2, then +// this name space creates the view we want for the godoc server: +// +// NameSpace{ +// "/": { +// {old: "/", fs: OS(`c:\Go`), new: "/"}, +// }, +// "/src/pkg": { +// {old: "/src/pkg", fs: OS(`c:\Go`), new: "/src/pkg"}, +// {old: "/src/pkg", fs: OS(`d:\Work1`), new: "/src"}, +// {old: "/src/pkg", fs: OS(`d:\Work2`), new: "/src"}, +// }, +// } +// +// This is created by executing: +// +// ns := NameSpace{} +// ns.Bind("/", OS(`c:\Go`), "/", BindReplace) +// ns.Bind("/src/pkg", OS(`d:\Work1`), "/src", BindAfter) +// ns.Bind("/src/pkg", OS(`d:\Work2`), "/src", BindAfter) +// +// A particular mount point entry is a triple (old, fs, new), meaning that to +// operate on a path beginning with old, replace that prefix (old) with new +// and then pass that path to the FileSystem implementation fs. +// +// If you do not explicitly mount a FileSystem at the root mountpoint "/" of the +// NameSpace like above, Stat("/") will return a "not found" error which could +// break typical directory traversal routines. In such cases, use NewNameSpace() +// to get a NameSpace pre-initialized with an emulated empty directory at root. +// +// Given this name space, a ReadDir of /src/pkg/code will check each prefix +// of the path for a mount point (first /src/pkg/code, then /src/pkg, then /src, +// then /), stopping when it finds one. For the above example, /src/pkg/code +// will find the mount point at /src/pkg: +// +// {old: "/src/pkg", fs: OS(`c:\Go`), new: "/src/pkg"}, +// {old: "/src/pkg", fs: OS(`d:\Work1`), new: "/src"}, +// {old: "/src/pkg", fs: OS(`d:\Work2`), new: "/src"}, +// +// ReadDir will when execute these three calls and merge the results: +// +// OS(`c:\Go`).ReadDir("/src/pkg/code") +// OS(`d:\Work1').ReadDir("/src/code") +// OS(`d:\Work2').ReadDir("/src/code") +// +// Note that the "/src/pkg" in "/src/pkg/code" has been replaced by +// just "/src" in the final two calls. +// +// OS is itself an implementation of a file system: it implements +// OS(`c:\Go`).ReadDir("/src/pkg/code") as ioutil.ReadDir(`c:\Go\src\pkg\code`). +// +// Because the new path is evaluated by fs (here OS(root)), another way +// to read the mount table is to mentally combine fs+new, so that this table: +// +// {old: "/src/pkg", fs: OS(`c:\Go`), new: "/src/pkg"}, +// {old: "/src/pkg", fs: OS(`d:\Work1`), new: "/src"}, +// {old: "/src/pkg", fs: OS(`d:\Work2`), new: "/src"}, +// +// reads as: +// +// "/src/pkg" -> c:\Go\src\pkg +// "/src/pkg" -> d:\Work1\src +// "/src/pkg" -> d:\Work2\src +// +// An invariant (a redundancy) of the name space representation is that +// ns[mtpt][i].old is always equal to mtpt (in the example, ns["/src/pkg"]'s +// mount table entries always have old == "/src/pkg"). The 'old' field is +// useful to callers, because they receive just a []mountedFS and not any +// other indication of which mount point was found. +type NameSpace map[string][]mountedFS + +// A mountedFS handles requests for path by replacing +// a prefix 'old' with 'new' and then calling the fs methods. +type mountedFS struct { + old string + fs FileSystem + new string +} + +// hasPathPrefix reports whether x == y or x == y + "/" + more. +func hasPathPrefix(x, y string) bool { + return x == y || strings.HasPrefix(x, y) && (strings.HasSuffix(y, "/") || strings.HasPrefix(x[len(y):], "/")) +} + +// translate translates path for use in m, replacing old with new. +// +// mountedFS{"/src/pkg", fs, "/src"}.translate("/src/pkg/code") == "/src/code". +func (m mountedFS) translate(path string) string { + path = pathpkg.Clean("/" + path) + if !hasPathPrefix(path, m.old) { + panic("translate " + path + " but old=" + m.old) + } + return pathpkg.Join(m.new, path[len(m.old):]) +} + +func (NameSpace) String() string { + return "ns" +} + +// Fprint writes a text representation of the name space to w. +func (ns NameSpace) Fprint(w io.Writer) { + fmt.Fprint(w, "name space {\n") + var all []string + for mtpt := range ns { + all = append(all, mtpt) + } + sort.Strings(all) + for _, mtpt := range all { + fmt.Fprintf(w, "\t%s:\n", mtpt) + for _, m := range ns[mtpt] { + fmt.Fprintf(w, "\t\t%s %s\n", m.fs, m.new) + } + } + fmt.Fprint(w, "}\n") +} + +// clean returns a cleaned, rooted path for evaluation. +// It canonicalizes the path so that we can use string operations +// to analyze it. +func (NameSpace) clean(path string) string { + return pathpkg.Clean("/" + path) +} + +type BindMode int + +const ( + BindReplace BindMode = iota + BindBefore + BindAfter +) + +// Bind causes references to old to redirect to the path new in newfs. +// If mode is BindReplace, old redirections are discarded. +// If mode is BindBefore, this redirection takes priority over existing ones, +// but earlier ones are still consulted for paths that do not exist in newfs. +// If mode is BindAfter, this redirection happens only after existing ones +// have been tried and failed. +func (ns NameSpace) Bind(old string, newfs FileSystem, new string, mode BindMode) { + old = ns.clean(old) + new = ns.clean(new) + m := mountedFS{old, newfs, new} + var mtpt []mountedFS + switch mode { + case BindReplace: + mtpt = append(mtpt, m) + case BindAfter: + mtpt = append(mtpt, ns.resolve(old)...) + mtpt = append(mtpt, m) + case BindBefore: + mtpt = append(mtpt, m) + mtpt = append(mtpt, ns.resolve(old)...) + } + + // Extend m.old, m.new in inherited mount point entries. + for i := range mtpt { + m := &mtpt[i] + if m.old != old { + if !hasPathPrefix(old, m.old) { + // This should not happen. If it does, panic so + // that we can see the call trace that led to it. + panic(fmt.Sprintf("invalid Bind: old=%q m={%q, %s, %q}", old, m.old, m.fs.String(), m.new)) + } + suffix := old[len(m.old):] + m.old = pathpkg.Join(m.old, suffix) + m.new = pathpkg.Join(m.new, suffix) + } + } + + ns[old] = mtpt +} + +// resolve resolves a path to the list of mountedFS to use for path. +func (ns NameSpace) resolve(path string) []mountedFS { + path = ns.clean(path) + for { + if m := ns[path]; m != nil { + if debugNS { + fmt.Printf("resolve %s: %v\n", path, m) + } + return m + } + if path == "/" { + break + } + path = pathpkg.Dir(path) + } + return nil +} + +// Open implements the FileSystem Open method. +func (ns NameSpace) Open(path string) (ReadSeekCloser, error) { + var err error + for _, m := range ns.resolve(path) { + if debugNS { + fmt.Printf("tx %s: %v\n", path, m.translate(path)) + } + tp := m.translate(path) + r, err1 := m.fs.Open(tp) + if err1 == nil { + return r, nil + } + // IsNotExist errors in overlay FSes can mask real errors in + // the underlying FS, so ignore them if there is another error. + if err == nil || os.IsNotExist(err) { + err = err1 + } + } + if err == nil { + err = &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist} + } + return nil, err +} + +// stat implements the FileSystem Stat and Lstat methods. +func (ns NameSpace) stat(path string, f func(FileSystem, string) (os.FileInfo, error)) (os.FileInfo, error) { + var err error + for _, m := range ns.resolve(path) { + fi, err1 := f(m.fs, m.translate(path)) + if err1 == nil { + return fi, nil + } + if err == nil { + err = err1 + } + } + if err == nil { + err = &os.PathError{Op: "stat", Path: path, Err: os.ErrNotExist} + } + return nil, err +} + +func (ns NameSpace) Stat(path string) (os.FileInfo, error) { + return ns.stat(path, FileSystem.Stat) +} + +func (ns NameSpace) Lstat(path string) (os.FileInfo, error) { + return ns.stat(path, FileSystem.Lstat) +} + +// dirInfo is a trivial implementation of os.FileInfo for a directory. +type dirInfo string + +func (d dirInfo) Name() string { return string(d) } +func (d dirInfo) Size() int64 { return 0 } +func (d dirInfo) Mode() os.FileMode { return os.ModeDir | 0555 } +func (d dirInfo) ModTime() time.Time { return startTime } +func (d dirInfo) IsDir() bool { return true } +func (d dirInfo) Sys() interface{} { return nil } + +var startTime = time.Now() + +// ReadDir implements the FileSystem ReadDir method. It's where most of the magic is. +// (The rest is in resolve.) +// +// Logically, ReadDir must return the union of all the directories that are named +// by path. In order to avoid misinterpreting Go packages, of all the directories +// that contain Go source code, we only include the files from the first, +// but we include subdirectories from all. +// +// ReadDir must also return directory entries needed to reach mount points. +// If the name space looks like the example in the type NameSpace comment, +// but c:\Go does not have a src/pkg subdirectory, we still want to be able +// to find that subdirectory, because we've mounted d:\Work1 and d:\Work2 +// there. So if we don't see "src" in the directory listing for c:\Go, we add an +// entry for it before returning. +func (ns NameSpace) ReadDir(path string) ([]os.FileInfo, error) { + path = ns.clean(path) + + // List matching directories and determine whether any of them contain + // Go files. + var ( + dirs [][]os.FileInfo + goDirIndex = -1 + readDirErr error + ) + + for _, m := range ns.resolve(path) { + dir, err := m.fs.ReadDir(m.translate(path)) + if err != nil { + if readDirErr == nil { + readDirErr = err + } + continue + } + + dirs = append(dirs, dir) + + if goDirIndex < 0 { + for _, f := range dir { + if !f.IsDir() && strings.HasSuffix(f.Name(), ".go") { + goDirIndex = len(dirs) - 1 + break + } + } + } + } + + // Build a list of files and subdirectories. If a directory contains Go files, + // only include files from that directory. Otherwise, include files from + // all directories. Include subdirectories from all directories regardless + // of whether Go files are present. + haveName := make(map[string]bool) + var all []os.FileInfo + for i, dir := range dirs { + for _, f := range dir { + name := f.Name() + if !haveName[name] && (f.IsDir() || goDirIndex < 0 || goDirIndex == i) { + all = append(all, f) + haveName[name] = true + } + } + } + + // Add any missing directories needed to reach mount points. + for old := range ns { + if hasPathPrefix(old, path) && old != path { + // Find next element after path in old. + elem := old[len(path):] + elem = strings.TrimPrefix(elem, "/") + if i := strings.Index(elem, "/"); i >= 0 { + elem = elem[:i] + } + if !haveName[elem] { + haveName[elem] = true + all = append(all, dirInfo(elem)) + } + } + } + + if len(all) == 0 { + return nil, readDirErr + } + + sort.Sort(byName(all)) + return all, nil +} + +// RootType returns the RootType for the given path in the namespace. +func (ns NameSpace) RootType(path string) RootType { + // We resolve the given path to a list of mountedFS and then return + // the root type for the filesystem which contains the path. + for _, m := range ns.resolve(path) { + _, err := m.fs.ReadDir(m.translate(path)) + // Found a match, return the filesystem's root type + if err == nil { + return m.fs.RootType(path) + } + } + return "" +} + +// byName implements sort.Interface. +type byName []os.FileInfo + +func (f byName) Len() int { return len(f) } +func (f byName) Less(i, j int) bool { return f[i].Name() < f[j].Name() } +func (f byName) Swap(i, j int) { f[i], f[j] = f[j], f[i] } diff --git a/vendor/golang.org/x/tools/godoc/vfs/os.go b/vendor/golang.org/x/tools/godoc/vfs/os.go new file mode 100644 index 00000000..35d05094 --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/vfs/os.go @@ -0,0 +1,105 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package vfs + +import ( + "fmt" + "go/build" + "io/ioutil" + "os" + pathpkg "path" + "path/filepath" + "runtime" +) + +// We expose a new variable because otherwise we need to copy the findGOROOT logic again +// from cmd/godoc which is already copied twice from the standard library. + +// GOROOT is the GOROOT path under which the godoc binary is running. +// It is needed to check whether a filesystem root is under GOROOT or not. +// This is set from cmd/godoc/main.go. +var GOROOT = runtime.GOROOT() + +// OS returns an implementation of FileSystem reading from the +// tree rooted at root. Recording a root is convenient everywhere +// but necessary on Windows, because the slash-separated path +// passed to Open has no way to specify a drive letter. Using a root +// lets code refer to OS(`c:\`), OS(`d:\`) and so on. +func OS(root string) FileSystem { + var t RootType + switch { + case root == GOROOT: + t = RootTypeGoRoot + case isGoPath(root): + t = RootTypeGoPath + } + return osFS{rootPath: root, rootType: t} +} + +type osFS struct { + rootPath string + rootType RootType +} + +func isGoPath(path string) bool { + for _, bp := range filepath.SplitList(build.Default.GOPATH) { + for _, gp := range filepath.SplitList(path) { + if bp == gp { + return true + } + } + } + return false +} + +func (root osFS) String() string { return "os(" + root.rootPath + ")" } + +// RootType returns the root type for the filesystem. +// +// Note that we ignore the path argument because roottype is a property of +// this filesystem. But for other filesystems, the roottype might need to be +// dynamically deduced at call time. +func (root osFS) RootType(path string) RootType { + return root.rootType +} + +func (root osFS) resolve(path string) string { + // Clean the path so that it cannot possibly begin with ../. + // If it did, the result of filepath.Join would be outside the + // tree rooted at root. We probably won't ever see a path + // with .. in it, but be safe anyway. + path = pathpkg.Clean("/" + path) + + return filepath.Join(root.rootPath, path) +} + +func (root osFS) Open(path string) (ReadSeekCloser, error) { + f, err := os.Open(root.resolve(path)) + if err != nil { + return nil, err + } + fi, err := f.Stat() + if err != nil { + f.Close() + return nil, err + } + if fi.IsDir() { + f.Close() + return nil, fmt.Errorf("Open: %s is a directory", path) + } + return f, nil +} + +func (root osFS) Lstat(path string) (os.FileInfo, error) { + return os.Lstat(root.resolve(path)) +} + +func (root osFS) Stat(path string) (os.FileInfo, error) { + return os.Stat(root.resolve(path)) +} + +func (root osFS) ReadDir(path string) ([]os.FileInfo, error) { + return ioutil.ReadDir(root.resolve(path)) // is sorted +} diff --git a/vendor/golang.org/x/tools/godoc/vfs/vfs.go b/vendor/golang.org/x/tools/godoc/vfs/vfs.go new file mode 100644 index 00000000..d70526d5 --- /dev/null +++ b/vendor/golang.org/x/tools/godoc/vfs/vfs.go @@ -0,0 +1,58 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package vfs defines types for abstract file system access and provides an +// implementation accessing the file system of the underlying OS. +package vfs // import "golang.org/x/tools/godoc/vfs" + +import ( + "io" + "io/ioutil" + "os" +) + +// RootType indicates the type of files contained within a directory. +// +// It is used to indicate whether a directory is the root +// of a GOROOT, a GOPATH, or neither. +// An empty string represents the case when a directory is neither. +type RootType string + +const ( + RootTypeGoRoot RootType = "GOROOT" + RootTypeGoPath RootType = "GOPATH" +) + +// The FileSystem interface specifies the methods godoc is using +// to access the file system for which it serves documentation. +type FileSystem interface { + Opener + Lstat(path string) (os.FileInfo, error) + Stat(path string) (os.FileInfo, error) + ReadDir(path string) ([]os.FileInfo, error) + RootType(path string) RootType + String() string +} + +// Opener is a minimal virtual filesystem that can only open regular files. +type Opener interface { + Open(name string) (ReadSeekCloser, error) +} + +// A ReadSeekCloser can Read, Seek, and Close. +type ReadSeekCloser interface { + io.Reader + io.Seeker + io.Closer +} + +// ReadFile reads the file named by path from fs and returns the contents. +func ReadFile(fs Opener, path string) ([]byte, error) { + rc, err := fs.Open(path) + if err != nil { + return nil, err + } + defer rc.Close() + return ioutil.ReadAll(rc) +} diff --git a/vendor/modules.txt b/vendor/modules.txt index b4021597..4fe8f022 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -16,6 +16,10 @@ github.com/davecgh/go-spew/spew # github.com/fsnotify/fsnotify v1.6.0 ## explicit; go 1.16 github.com/fsnotify/fsnotify +# github.com/go-git/go-git/v5 v5.7.0 +## explicit; go 1.18 +github.com/go-git/go-git/v5/plumbing +github.com/go-git/go-git/v5/plumbing/hash # github.com/golang/protobuf v1.5.3 ## explicit; go 1.9 github.com/golang/protobuf/proto @@ -78,15 +82,17 @@ github.com/launchdarkly/api-client-go/v7 # github.com/launchdarkly/json-patch v0.0.0-20180720210516-dd68d883319f ## explicit github.com/launchdarkly/json-patch -# github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230627211718-c0eec7327a20 +# github.com/launchdarkly/ld-find-code-refs/v2 v2.10.1-0.20230628134336-ed386199c5f4 ## explicit; go 1.18 github.com/launchdarkly/ld-find-code-refs/v2/aliases +github.com/launchdarkly/ld-find-code-refs/v2/flags github.com/launchdarkly/ld-find-code-refs/v2/internal/helpers github.com/launchdarkly/ld-find-code-refs/v2/internal/ld github.com/launchdarkly/ld-find-code-refs/v2/internal/log github.com/launchdarkly/ld-find-code-refs/v2/internal/validation github.com/launchdarkly/ld-find-code-refs/v2/internal/version github.com/launchdarkly/ld-find-code-refs/v2/options +github.com/launchdarkly/ld-find-code-refs/v2/search # github.com/magiconair/properties v1.8.7 ## explicit; go 1.19 github.com/magiconair/properties @@ -121,6 +127,14 @@ github.com/pelletier/go-toml/v2/internal/characters github.com/pelletier/go-toml/v2/internal/danger github.com/pelletier/go-toml/v2/internal/tracker github.com/pelletier/go-toml/v2/unstable +# github.com/petar-dambovaliev/aho-corasick v0.0.0-20211021192214-5ab2d9280aa9 +## explicit; go 1.15 +github.com/petar-dambovaliev/aho-corasick +# github.com/pjbgf/sha1cd v0.3.0 +## explicit; go 1.19 +github.com/pjbgf/sha1cd +github.com/pjbgf/sha1cd/internal +github.com/pjbgf/sha1cd/ubc # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib @@ -188,6 +202,10 @@ golang.org/x/sys/windows golang.org/x/text/runes golang.org/x/text/transform golang.org/x/text/unicode/norm +# golang.org/x/tools v0.10.0 +## explicit; go 1.18 +golang.org/x/tools/godoc/util +golang.org/x/tools/godoc/vfs # google.golang.org/appengine v1.6.7 ## explicit; go 1.11 google.golang.org/appengine/internal @@ -226,8 +244,6 @@ google.golang.org/protobuf/reflect/protoregistry google.golang.org/protobuf/runtime/protoiface google.golang.org/protobuf/runtime/protoimpl google.golang.org/protobuf/types/descriptorpb -# gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c -## explicit; go 1.11 # gopkg.in/ini.v1 v1.67.0 ## explicit gopkg.in/ini.v1