From 0020d7f12bc88ec998179c32116412745420ae1a Mon Sep 17 00:00:00 2001 From: Stewart Boyd Date: Mon, 23 Sep 2024 10:16:21 -0700 Subject: [PATCH] Improve schema registry support (#12) # Improve schema registry support 1. Added formatters which properly use schema registry formatters (zfmt attempted previously, but had missed the mark) 1. Updated tests to be controlled with envvar instead of build tags 2. Updated golang-ci to remove deprecated options 3. Updated `make` to add proto generation for schema registry evolution tests 4. Because protobuf has protections against named type collisions, run tests with a special envvar to ignore this warning 5. Added `LifecyclePostReadImmediate` for better confirmation of read errors 6. Updated ReaderOption to not update KWriter directly and instead update an indirect settings object 7. Updated producer clientId to be `clientid + topic` this helps avoid accidental collisions seen in production with DLT'd consumer 8. Updated compose.yml used for local testing to standup schema registry --- .github/workflows/go.yml | 1 + .gitignore | 1 - .golangci.yml | 14 +- Makefile | 16 +- README.md | 16 +- changelog.md | 8 + client.go | 126 ++-- client_test.go | 215 +++++-- config.go | 50 +- config_test.go | 6 +- coverage.sh | 14 +- example/compose.yaml | 19 +- example/producer_avro/dummy_event.avsc | 13 + example/producer_avro/dummy_event_gen.go | 31 + example/producer_avro/go.mod | 26 + example/producer_avro/go.sum | 489 ++++++++++++++++ example/producer_avro/main.go | 55 ++ example/worker_avro/dummy_event.avsc | 11 + example/worker_avro/dummy_event_gen.go | 29 + example/worker_avro/go.mod | 38 ++ example/worker_avro/go.sum | 507 ++++++++++++++++ example/worker_avro/main.go | 85 +++ formatter.go | 161 +++++- formatter_test.go | 10 +- go.mod | 18 +- go.sum | 95 +++ heap_test.go | 2 +- lifecycle.go | 72 ++- lifecycle_test.go | 160 +++++- message.go | 17 +- message_test.go | 10 +- reader.go | 66 ++- reader_test.go | 132 ++++- schemareg.go | 146 +++++ test/evolution/avro1/schema_1_gen.go | 29 + test/evolution/avro2/schema_2_gen.go | 30 + test/evolution/json1/schema_1.pb.go | 9 + test/evolution/json2/schema_2.go | 10 + test/evolution/proto1/schema_1.pb.go | 182 ++++++ test/evolution/proto2/schema_2.pb.go | 192 +++++++ test/evolution/schema_1.avsc | 11 + test/evolution/schema_1.proto | 13 + test/evolution/schema_2.avsc | 12 + test/evolution/schema_2.proto | 14 + test/integration_test.go | 203 ++++++- test/schema_registry_evo_test.go | 700 +++++++++++++++++++++++ test/schema_registry_test.go | 211 +++++++ test/worker_test.go | 52 +- testhelper.go | 66 ++- work.go | 26 +- work_test.go | 29 +- writer.go | 79 ++- writer_test.go | 81 +-- 53 files changed, 4252 insertions(+), 356 deletions(-) create mode 100644 example/producer_avro/dummy_event.avsc create mode 100644 example/producer_avro/dummy_event_gen.go create mode 100644 example/producer_avro/go.mod create mode 100644 example/producer_avro/go.sum create mode 100644 example/producer_avro/main.go create mode 100644 example/worker_avro/dummy_event.avsc create mode 100644 example/worker_avro/dummy_event_gen.go create mode 100644 example/worker_avro/go.mod create mode 100644 example/worker_avro/go.sum create mode 100644 example/worker_avro/main.go create mode 100644 schemareg.go create mode 100644 test/evolution/avro1/schema_1_gen.go create mode 100644 test/evolution/avro2/schema_2_gen.go create mode 100644 test/evolution/json1/schema_1.pb.go create mode 100644 test/evolution/json2/schema_2.go create mode 100644 test/evolution/proto1/schema_1.pb.go create mode 100644 test/evolution/proto2/schema_2.pb.go create mode 100644 test/evolution/schema_1.avsc create mode 100644 test/evolution/schema_1.proto create mode 100644 test/evolution/schema_2.avsc create mode 100644 test/evolution/schema_2.proto create mode 100644 test/schema_registry_evo_test.go create mode 100644 test/schema_registry_test.go diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b2c7d15..9a142b7 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -41,6 +41,7 @@ jobs: - name: Test env: KAFKA_BOOTSTRAP_SERVER: ${{ env.kafka_runner_address }}:9092 + ENABLE_KAFKA_BROKER_TESTS: true run: make cover - name: Upload coverage reports to Codecov diff --git a/.gitignore b/.gitignore index 6b401df..986e774 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,6 @@ go.work.sum .idea/ .run/ zk-multiple-kafka-multiple/ -*.out *.res *.lsif *.prof diff --git a/.golangci.yml b/.golangci.yml index 33a5523..bc3b462 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,11 +1,11 @@ run: - skip-dirs: - - docs - - datadog - - kustomize - skip-files: - - 'wire_gen.go' tests: false + go: '1.22' +issues: + exclude-files: + - 'wire_gen.go' + exclude-dirs: + - docs linters-settings: errcheck: check-type-assertions: true @@ -14,8 +14,6 @@ linters-settings: sections: - standard - default - gosimple: - go: '1.17' depguard: rules: Main: diff --git a/Makefile b/Makefile index 1feb319..b6a2494 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ setup: # Assumes setup has been executed. Runs go test with coverage .PHONY: cover cover: - export GO_TAGS=--tags=integration; ./coverage.sh + ./coverage.sh # Runs setup and executes tests with coverage. .PHONY: test-local @@ -41,3 +41,17 @@ golangci-lint: (cd $(mod) && \ echo "[lint] golangci-lint: $(mod)" && \ golangci-lint run --path-prefix $(mod) ./...) &&) true + +.PHONY: gen +gen: protoc-exists + cd test/evolution; protoc --proto_path=. --go_out=./ ./schema_1.proto + cd test/evolution; protoc --proto_path=. --go_out=./ ./schema_2.proto + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/producer_avro ./example/producer_avro/dummy_event.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/worker_avro ./example/worker_avro/dummy_event.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p avro1 -d ./test/evolution/avro1 ./test/evolution/schema_1.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p avro2 -d ./test/evolution/avro2 ./test/evolution/schema_2.avsc + +# a forced dependency which fails (and prints) if `avro-tools` isn't installed +.PHONY: protoc-exists +protoc-exists: + @which protoc > /dev/null || (echo "protoc is not installed. Install via `brew install protobuf`"; exit 1) \ No newline at end of file diff --git a/README.md b/README.md index 3657f6b..6c9b9f4 100644 --- a/README.md +++ b/README.md @@ -270,13 +270,17 @@ special processing of these messages. ### SchemaRegistry Support: -There is limited support for schema registry in zkafka. A schemaID can be hardcoded via configuration. No -communication is done with schema registry, but some primitive checks can be conducted if a schemaID is specified via -configuration. +zkafka supports schema registry. It extends `zfmt` to enable this adding three `zfmt.FormatterType`: +``` + AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" + ProtoSchemaRegistry zfmt.FormatterType = "proto_schema_registry" + JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" +``` + +This can be used in ProducerTopicConfig/ConsumerTopicConfig just like the others. Examples have been added +`example/producer_avro` and `example/worker_avro` which demonstrate the additional configuration (mostly there to enable the +schema registry communication that's required) -Below is a breakdown of schema registry interactions into two subcategories. One is `Raw Handling` where the configurable -foramtter is bypassed entirely in favor of operating with the value byte arrays directly. The other is `Native Support` which -attempts to create confluent compatible serializations, without communicating with schema registry directly. #### Producers diff --git a/changelog.md b/changelog.md index ca74bb1..7b62247 100644 --- a/changelog.md +++ b/changelog.md @@ -4,6 +4,14 @@ All notable changes to this project will be documented in this file. This project adheres to Semantic Versioning. +## 1.1.0 (Sep 22, 2024) + +1. Added support for schema registry (avro, proto, json). Extended `zfmt.FormatterType` types to include `avro_schema_registry`, `proto_schema_registry` and `json_schema_registry` +2. Added lifecycle function `LifecyclePostReadImmediate` +3. Added `workFactory.CreateWithFunc` which is a convenience work factory method for creating work using a callback instead of an interface (can reduce boilerplate) in some scenarios. +4. During the creation of readers/writers an error is now returned if bootstrap servers is empty + + ## 1.0.2 (Sep 6, 2024) 1. Updated `WithDeadLetterTopic` option to borrow username and password from ConsumerTopicConfig when those issues aren't specified on DeadLetterTopicConfig diff --git a/client.go b/client.go index c6f0b93..6e44ee9 100644 --- a/client.go +++ b/client.go @@ -36,13 +36,15 @@ type Client struct { tp trace.TracerProvider p propagation.TextMapPropagator - // confluent dependencies + srf *schemaRegistryFactory + producerProvider confluentProducerProvider consumerProvider confluentConsumerProvider } // NewClient instantiates a kafka client to get readers and writers func NewClient(conf Config, opts ...Option) *Client { + srf := newSchemaRegistryFactory() c := &Client{ conf: conf, readers: make(map[string]*KReader), @@ -51,6 +53,7 @@ func NewClient(conf Config, opts ...Option) *Client { producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, + srf: srf, } for _, opt := range opts { opt(c) @@ -79,16 +82,26 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts return r, nil } - reader, err := newReader(c.conf, topicConfig, c.consumerProvider, c.logger, c.groupPrefix) + formatter, err := c.getFormatter(formatterArgs{ + formatter: topicConfig.Formatter, + schemaID: topicConfig.SchemaID, + srCfg: topicConfig.SchemaRegistry, + }) if err != nil { return nil, err } - // copy settings from client first - reader.lifecycle = c.lifecycle - - // overwrite options if given - for _, opt := range opts { - opt(reader) + reader, err := newReader(readerArgs{ + cfg: c.conf, + cCfg: topicConfig, + consumerProvider: c.consumerProvider, + f: formatter, + l: c.logger, + prefix: c.groupPrefix, + hooks: c.lifecycle, + opts: opts, + }) + if err != nil { + return nil, err } c.readers[topicConfig.ClientID] = reader return c.readers[topicConfig.ClientID], nil @@ -100,8 +113,9 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts if err != nil { return nil, err } + writerKey := getWriterKey(topicConfig) c.mu.RLock() - w, exist := c.writers[topicConfig.ClientID] + w, exist := c.writers[writerKey] if exist && !w.isClosed { c.mu.RUnlock() return w, nil @@ -110,39 +124,36 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts c.mu.Lock() defer c.mu.Unlock() - w, exist = c.writers[topicConfig.ClientID] + w, exist = c.writers[writerKey] if exist && !w.isClosed { return w, nil } - writer, err := newWriter(c.conf, topicConfig, c.producerProvider) + formatter, err := c.getFormatter(formatterArgs{ + formatter: topicConfig.Formatter, + schemaID: topicConfig.SchemaID, + srCfg: topicConfig.SchemaRegistry, + }) + if err != nil { return nil, err } - // copy settings from client first - writer.logger = c.logger - writer.tracer = getTracer(c.tp) - writer.p = c.p - writer.lifecycle = c.lifecycle - - // overwrite options if given - for _, opt := range opts { - opt(writer) + writer, err := newWriter(writerArgs{ + cfg: c.conf, + pCfg: topicConfig, + producerProvider: c.producerProvider, + f: formatter, + l: c.logger, + t: getTracer(c.tp), + p: c.p, + hooks: c.lifecycle, + opts: opts, + }) + if err != nil { + return nil, err } - c.writers[topicConfig.ClientID] = writer - return c.writers[topicConfig.ClientID], nil -} -func getFormatter(topicConfig TopicConfig) (zfmt.Formatter, error) { - switch topicConfig.GetFormatter() { - case CustomFmt: - return &noopFormatter{}, nil - default: - f, err := zfmt.GetFormatter(topicConfig.GetFormatter(), topicConfig.GetSchemaID()) - if err != nil { - return nil, fmt.Errorf("unsupported formatter %s", topicConfig.GetFormatter()) - } - return f, nil - } + c.writers[writerKey] = writer + return c.writers[writerKey], nil } // Close terminates all cached readers and writers gracefully. @@ -165,9 +176,56 @@ func (c *Client) Close() error { return err } +func (c *Client) getFormatter(args formatterArgs) (kFormatter, error) { + formatter := args.formatter + schemaID := args.schemaID + + switch formatter { + case AvroSchemaRegistry: + scl, err := c.srf.createAvro(args.srCfg) + if err != nil { + return nil, err + } + cf, err := newAvroSchemaRegistryFormatter(scl) + return cf, err + case ProtoSchemaRegistry: + scl, err := c.srf.createProto(args.srCfg) + if err != nil { + return nil, err + } + cf := newProtoSchemaRegistryFormatter(scl) + return cf, nil + case JSONSchemaRegistry: + scl, err := c.srf.createJson(args.srCfg) + if err != nil { + return nil, err + } + cf := newJsonSchemaRegistryFormatter(scl) + return cf, nil + case CustomFmt: + return &errFormatter{}, nil + default: + f, err := zfmt.GetFormatter(formatter, schemaID) + if err != nil { + return nil, fmt.Errorf("unsupported formatter %s", formatter) + } + return zfmtShim{F: f}, nil + } +} + func getTracer(tp trace.TracerProvider) trace.Tracer { if tp == nil { return nil } return tp.Tracer(instrumentationName, trace.WithInstrumentationVersion("v1.0.0")) } + +func getWriterKey(cfg ProducerTopicConfig) string { + return cfg.ClientID + "-" + cfg.Topic +} + +type formatterArgs struct { + formatter zfmt.FormatterType + schemaID int + srCfg SchemaRegistryConfig +} diff --git a/client_test.go b/client_test.go index d384bc4..6c67584 100644 --- a/client_test.go +++ b/client_test.go @@ -178,6 +178,7 @@ func TestClient_Reader(t *testing.T) { { name: "create new KReader for closed KReader", fields: fields{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, readers: map[string]*KReader{ "test-config": {isClosed: true}, }, @@ -202,14 +203,15 @@ func TestClient_Reader(t *testing.T) { SessionTimeoutMillis: ptr(61000), MaxPollIntervalMillis: ptr(61000), }, - logger: NoopLogger{}, - fmtter: &zfmt.AvroFormatter{}, + logger: NoopLogger{}, + formatter: zfmtShim{&zfmt.AvroFormatter{}}, }, wantErr: false, }, { name: "create new KReader for closed KReader with default overrides", fields: fields{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, readers: map[string]*KReader{ "test-config": {isClosed: true}, }, @@ -234,8 +236,8 @@ func TestClient_Reader(t *testing.T) { SessionTimeoutMillis: ptr(20000), MaxPollIntervalMillis: ptr(21000), }, - logger: NoopLogger{}, - fmtter: &zfmt.AvroFormatter{}, + logger: NoopLogger{}, + formatter: zfmtShim{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -294,7 +296,7 @@ func TestClient_Reader(t *testing.T) { assertEqual(t, a, b, cmpopts.IgnoreUnexported(MockKafkaConsumer{})) } assertEqual(t, gotReader.logger, tt.want.logger) - assertEqual(t, gotReader.fmtter, tt.want.fmtter) + assertEqual(t, gotReader.formatter, tt.want.formatter) } }) } @@ -342,6 +344,7 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer", fields: fields{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, writers: map[string]*KWriter{ "test-id": {isClosed: true}, }, @@ -361,16 +364,17 @@ func TestClient_Writer(t *testing.T) { NagleDisable: ptr(true), LingerMillis: 0, }, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, - fmtter: &zfmt.ProtobufRawFormatter{}, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, + formatter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, { name: "create new KWriter for closed writer with default overrides", fields: fields{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, writers: map[string]*KWriter{ "test-id": {isClosed: true}, }, @@ -390,10 +394,10 @@ func TestClient_Writer(t *testing.T) { NagleDisable: ptr(false), LingerMillis: 1, }, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, - fmtter: &zfmt.ProtobufRawFormatter{}, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, + formatter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -408,7 +412,7 @@ func TestClient_Writer(t *testing.T) { name: "get from cache", fields: fields{ writers: map[string]*KWriter{ - "test-id": {}, + "test-id-topic": {}, }, }, args: args{ @@ -442,7 +446,7 @@ func TestClient_Writer(t *testing.T) { assertEqual(t, gotKWriter.topicConfig, tt.want.topicConfig) assertEqual(t, gotKWriter.logger, tt.want.logger) - assertEqual(t, gotKWriter.fmtter, tt.want.fmtter) + assertEqual(t, gotKWriter.formatter, tt.want.formatter) }) } } @@ -460,13 +464,25 @@ func TestClient_Close(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r1, err := newReader(Config{}, ConsumerTopicConfig{ - Formatter: zfmt.StringFmt, - }, m, &NoopLogger{}, "") + r1, err := newReader(readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: ConsumerTopicConfig{ + Formatter: zfmt.StringFmt, + }, + consumerProvider: m, + f: zfmtShim{F: &zfmt.StringFormatter{}}, + l: &NoopLogger{}, + }) require.NoError(t, err) - r2, err := newReader(Config{}, ConsumerTopicConfig{ - Formatter: zfmt.StringFmt, - }, m, &NoopLogger{}, "") + r2, err := newReader(readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: ConsumerTopicConfig{ + Formatter: zfmt.StringFmt, + }, + consumerProvider: m, + f: zfmtShim{F: &zfmt.StringFormatter{}}, + l: &NoopLogger{}, + }) require.NoError(t, err) tests := []struct { name string @@ -509,16 +525,16 @@ func TestClient_Close(t *testing.T) { for _, w := range c.writers { require.True(t, w.isClosed, "clients writer should be closed") } - for _, reader := range c.readers { - require.True(t, reader.isClosed, "clients reader should be closed") + for _, r := range c.readers { + require.True(t, r.isClosed, "clients reader should be closed") } }) } } -func Test_getFormatter(t *testing.T) { +func Test_getFormatter_Consumer(t *testing.T) { type args struct { - topicConfig TopicConfig + topicConfig ConsumerTopicConfig } tests := []struct { name string @@ -555,12 +571,6 @@ func Test_getFormatter(t *testing.T) { want: &zfmt.AvroFormatter{}, wantErr: false, }, - { - name: "confluent avro with schema ClientID", - args: args{topicConfig: ProducerTopicConfig{Formatter: zfmt.FormatterType("avro_schema")}}, - want: &zfmt.SchematizedAvroFormatter{}, - wantErr: false, - }, { name: "confluent avro with inferred schema ClientID", args: args{topicConfig: ConsumerTopicConfig{Formatter: zfmt.FormatterType("avro_schema"), SchemaID: 10}}, @@ -585,27 +595,68 @@ func Test_getFormatter(t *testing.T) { want: &zfmt.SchematizedProtoFormatterDeprecated{}, wantErr: false, }, + { + name: "unsupported", + args: args{topicConfig: ConsumerTopicConfig{Formatter: zfmt.FormatterType("what"), SchemaID: 10}}, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + defer recoverThenFail(t) + args := formatterArgs{ + formatter: tt.args.topicConfig.Formatter, + schemaID: tt.args.topicConfig.SchemaID, + } + c := Client{} + got, err := c.getFormatter(args) + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, zfmtShim{tt.want}, got) + } + }) + } +} + +func Test_getFormatter_Producer(t *testing.T) { + type args struct { + topicConfig ProducerTopicConfig + } + tests := []struct { + name string + args args + want zfmt.Formatter + wantErr bool + }{ + { + name: "confluent avro with schema ClientID", + args: args{topicConfig: ProducerTopicConfig{Formatter: zfmt.FormatterType("avro_schema")}}, + want: &zfmt.SchematizedAvroFormatter{}, + wantErr: false, + }, { name: "confluent json with inferred schema ID", args: args{topicConfig: ProducerTopicConfig{Formatter: zfmt.FormatterType("proto_schema_deprecated"), SchemaID: 10}}, want: &zfmt.SchematizedProtoFormatterDeprecated{SchemaID: 10}, wantErr: false, }, - { - name: "unsupported", - args: args{topicConfig: ConsumerTopicConfig{Formatter: zfmt.FormatterType("what"), SchemaID: 10}}, - wantErr: true, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, err := getFormatter(tt.args.topicConfig) + args := formatterArgs{ + formatter: tt.args.topicConfig.Formatter, + schemaID: tt.args.topicConfig.SchemaID, + } + c := Client{} + got, err := c.getFormatter(args) if tt.wantErr { require.Error(t, err) } else { require.NoError(t, err) - require.Equal(t, tt.want, got) + require.Equal(t, zfmtShim{tt.want}, got) } }) } @@ -731,10 +782,27 @@ func Test_makeConfig_Consumer(t *testing.T) { prefix string } tests := []struct { - name string - args args - want kafka.ConfigMap + name string + args args + want kafka.ConfigMap + wantErr string }{ + { + name: "missing bootstrap", + args: args{ + conf: Config{}, + topicConfig: ConsumerTopicConfig{ + ClientID: "clientid", + GroupID: "group", + Topic: "", + Formatter: "", + SchemaID: 0, + Transaction: true, + }, + }, + wantErr: "invalid consumer config, missing bootstrap server addresses", + }, + { name: "with transaction", args: args{ @@ -946,8 +1014,13 @@ func Test_makeConfig_Consumer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got := makeConsumerConfig(tt.args.conf, tt.args.topicConfig, tt.args.prefix) - assertEqual(t, got, tt.want) + got, err := makeConsumerConfig(tt.args.conf, tt.args.topicConfig, tt.args.prefix) + if tt.wantErr == "" { + require.NoError(t, err) + assertEqual(t, got, tt.want) + } else { + require.ErrorContains(t, err, tt.wantErr) + } }) } } @@ -958,10 +1031,24 @@ func Test_makeConfig_Producer(t *testing.T) { topicConfig ProducerTopicConfig } tests := []struct { - name string - args args - want kafka.ConfigMap + name string + args args + want kafka.ConfigMap + wantErr string }{ + { + name: "with missing bootstrap config", + args: args{ + conf: Config{}, + topicConfig: ProducerTopicConfig{ + ClientID: "clientid", + Topic: "yyy", + Transaction: true, + }, + }, + wantErr: "invalid producer config, missing bootstrap server addresses", + }, + { name: "with transaction", args: args{ @@ -970,9 +1057,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", - Topic: "", - Formatter: "", - SchemaID: 0, + Topic: "yyy", Transaction: true, }, }, @@ -981,7 +1066,7 @@ func Test_makeConfig_Producer(t *testing.T) { "enable.idempotence": true, "request.required.acks": -1, "max.in.flight.requests.per.connection": 1, - "client.id": "clientid", + "client.id": "clientid-yyy", "linger.ms": 0, }, }, @@ -993,12 +1078,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "zzz", DeliveryTimeoutMs: ptr(100), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080,https://localhost:8081", - "client.id": "clientid", + "client.id": "clientid-zzz", "delivery.timeout.ms": 100, "enable.idempotence": true, "linger.ms": 0, @@ -1012,6 +1098,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "zzz", DeliveryTimeoutMs: ptr(100), AdditionalProps: map[string]any{ "stewarts.random.property.not.included.in.topicconfig": 123, @@ -1021,7 +1108,7 @@ func Test_makeConfig_Producer(t *testing.T) { want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", "enable.idempotence": true, - "client.id": "clientid", + "client.id": "clientid-zzz", "delivery.timeout.ms": 100, "stewarts.random.property.not.included.in.topicconfig": 123, "linger.ms": 0, @@ -1037,6 +1124,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "abc", DeliveryTimeoutMs: ptr(100), EnableIdempotence: ptr(false), RequestRequiredAcks: ptr("all"), @@ -1048,7 +1136,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-abc", "enable.idempotence": false, "delivery.timeout.ms": 100, "auto.commit.interval.ms": 20, @@ -1070,12 +1158,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslUsername: ptr(""), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "linger.ms": 0, }, @@ -1090,12 +1179,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslUsername: ptr("usernameOverride"), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "sasl.mechanism": "SCRAM-SHA-256", "sasl.password": "password", @@ -1114,12 +1204,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslPassword: ptr("passwordOverride"), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "sasl.mechanism": "SCRAM-SHA-256", "sasl.password": "passwordOverride", @@ -1138,13 +1229,14 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslUsername: ptr("usernameOverride"), SaslPassword: ptr("passwordOverride"), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "sasl.mechanism": "SCRAM-SHA-256", "sasl.password": "passwordOverride", @@ -1157,8 +1249,13 @@ func Test_makeConfig_Producer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got := makeProducerConfig(tt.args.conf, tt.args.topicConfig) - assertEqual(t, got, tt.want) + got, err := makeProducerConfig(tt.args.conf, tt.args.topicConfig) + if tt.wantErr == "" { + require.NoError(t, err) + assertEqual(t, got, tt.want) + } else { + require.ErrorContains(t, err, tt.wantErr) + } }) } } diff --git a/config.go b/config.go index c84ffa8..6d12b5d 100644 --- a/config.go +++ b/config.go @@ -78,7 +78,9 @@ type ConsumerTopicConfig struct { // Formatter is json if not defined Formatter zfmt.FormatterType - // SchemaID defines the schema registered with Confluent Schema Registry + SchemaRegistry SchemaRegistryConfig + + // SchemaID defines the schema registered with Confluent schema Registry // Default value is 0, and it implies that both Writer and Reader do not care about schema validation // and should encode/decode the message based on data type provided. // Currently, this only works with SchematizedAvroFormatter @@ -172,7 +174,11 @@ type ProducerTopicConfig struct { // Formatter is json if not defined Formatter zfmt.FormatterType - // SchemaID defines the schema registered with Confluent Schema Registry + // SchemaRegistry provides details about connecting to a schema registry including URL + // as well as others. + SchemaRegistry SchemaRegistryConfig + + // SchemaID defines the schema registered with Confluent schema Registry // Default value is 0, and it implies that both Writer and Reader do not care about schema validation // and should encode/decode the message based on data type provided. // Currently, this only works with SchematizedAvroFormatter @@ -207,9 +213,26 @@ func (p ProducerTopicConfig) GetSchemaID() int { return p.SchemaID } -type TopicConfig interface { - GetFormatter() zfmt.FormatterType - GetSchemaID() int +type SchemaRegistryConfig struct { + // URL is the schema registry URL. During serialization and deserialization + // schema registry is checked against to confirm schema compatability. + URL string + // Serialization provides additional information used by schema registry formatters during serialization (data write) + Serialization SerializationConfig + // Deserialization provides additional information used by schema registry formatters during deserialization (data read) + Deserialization DeserializationConfig +} + +type SerializationConfig struct { + // AutoRegisterSchemas indicates whether new schemas (those that evolve existing schemas or are brand new) should be registered + // with schema registry dynamically. This feature is typically not used for production workloads + AutoRegisterSchemas bool + // Schema is used exclusively by the avro schema registry formatter today. Its necessary to provide proper schema evolution properties + // expected by typical use cases. + Schema string +} + +type DeserializationConfig struct { } func getDefaultConsumerTopicConfig(topicConfig *ConsumerTopicConfig) error { @@ -271,7 +294,7 @@ func getDefaultProducerTopicConfig(topicConfig *ProducerTopicConfig) error { // makeConsumerConfig creates a kafka configMap from the specified strongly typed Config and TopicConfig. // TopicConfig specifies a way to specify config values that aren't strongly typed via AdditionalProps field. // Those values are overwritten if specified in strongly typed TopicConfig fields. -func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix string) kafka.ConfigMap { +func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix string) (kafka.ConfigMap, error) { configMap := kafka.ConfigMap{} configMap[clientID] = topicConfig.ClientID @@ -309,6 +332,9 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str if len(topicConfig.BootstrapServers) != 0 { addresses = topicConfig.BootstrapServers } + if len(addresses) == 0 { + return nil, errors.New("invalid consumer config, missing bootstrap server addresses") + } configMap[bootstrapServers] = strings.Join(addresses, ",") saslUname := conf.SaslUsername @@ -340,16 +366,16 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str configMap[key] = kafka.ConfigValue(v) } } - return configMap + return configMap, nil } // makeProducerConfig creates a kafka configMap from the specified strongly typed Config and TopicConfig. // TopicConfig specifies a way to specify config values that aren't strongly typed via AdditionalProps field. // Those values are overwritten if specified in strongly typed TopicConfig fields. -func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.ConfigMap { +func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) (kafka.ConfigMap, error) { configMap := kafka.ConfigMap{} - configMap[clientID] = topicConfig.ClientID + configMap[clientID] = getWriterKey(topicConfig) if topicConfig.RequestRequiredAcks != nil { configMap[requestRequiredAcks] = *topicConfig.RequestRequiredAcks @@ -386,6 +412,10 @@ func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.Conf if len(topicConfig.BootstrapServers) != 0 { addresses = topicConfig.BootstrapServers } + if len(addresses) == 0 { + return nil, errors.New("invalid producer config, missing bootstrap server addresses") + } + configMap[bootstrapServers] = strings.Join(addresses, ",") saslUname := conf.SaslUsername @@ -417,5 +447,5 @@ func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.Conf configMap[key] = kafka.ConfigValue(v) } } - return configMap + return configMap, nil } diff --git a/config_test.go b/config_test.go index f30f255..12cf25c 100644 --- a/config_test.go +++ b/config_test.go @@ -28,7 +28,7 @@ func Test_getDefaultConsumerTopicConfig(t *testing.T) { wantErr: true, }, { - name: "missing required field (Topic) => error", + name: "missing required field (topic) => error", args: args{conf: &ConsumerTopicConfig{ GroupID: "test_group", ClientID: "test", @@ -36,7 +36,7 @@ func Test_getDefaultConsumerTopicConfig(t *testing.T) { wantErr: true, }, { - name: "missing required non empty fields (Topic and or Topics) => error", + name: "missing required non empty fields (topic and or Topics) => error", args: args{conf: &ConsumerTopicConfig{ GroupID: "test_group", ClientID: "test", @@ -126,7 +126,7 @@ func Test_getDefaultProducerTopicConfig(t *testing.T) { wantErr bool }{ { - name: "missing required field (Topic) => error", + name: "missing required field (topic) => error", args: args{conf: &ProducerTopicConfig{ ClientID: "test", }}, diff --git a/coverage.sh b/coverage.sh index ee8252c..4485c4b 100755 --- a/coverage.sh +++ b/coverage.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash set -x -# allows for GO test args to be passed in (Specifically added to control whether or not to pass in `--tags=integration`). -go_tags=$GO_TAGS -go_tags="${go_tags:---tags=unit}" - +# Protobuf schema registry schema evolution tests register the same message type +# in the same package. These are compiled in the same binary (the test), and by default +# proto panics in such a situation. Setting this envvar ignores that check +export GOLANG_PROTOBUF_REGISTRATION_CONFLICT=ignore # golang packages that will be used for either testing or will be assessed for coverage pck1=github.com/zillow/zkafka pck2=$pck1/test @@ -27,11 +27,11 @@ function quit() { } # change to example directory for execution (because it uses hardcoded filepaths, and the testable # examples don't work when executed outside of that directory -go test $go_tags -c -coverpkg=$pck1 -covermode=atomic -o "$root_res" $pck1 +go test --tags=evolution_test -c -coverpkg=$pck1 -covermode=atomic -o "$root_res" $pck1 # convert binary to go formatted -go tool test2json -t "$root_res" -test.v -test.coverprofile "$root_out" +go tool test2json -t "$root_res" -test.v -test.coverprofile "$root_out" -go test $go_tags -c -coverpkg=$pck1 -covermode=atomic -o "$source_res" $pck2 +go test --tags=evolution_test -c -coverpkg=$pck1 -covermode=atomic -o "$source_res" $pck2 go tool test2json -t "$source_res" -test.v -test.coverprofile "$source_out" # delete aggregate file diff --git a/example/compose.yaml b/example/compose.yaml index bd28a32..f197d20 100644 --- a/example/compose.yaml +++ b/example/compose.yaml @@ -1,6 +1,6 @@ services: zookeeper: - image: confluentinc/cp-zookeeper:latest + image: confluentinc/cp-zookeeper:7.7.1 container_name: zkafka-zookeeper environment: ZOOKEEPER_CLIENT_PORT: 2181 @@ -8,7 +8,7 @@ services: ports: - "22181:2181" kafka: - image: confluentinc/cp-kafka:latest + image: confluentinc/cp-kafka:7.7.1 container_name: zkafka-broker depends_on: - zookeeper @@ -22,3 +22,18 @@ services: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + schemaregistry: + image: confluentinc/cp-schema-registry:7.7.1 + depends_on: + - kafka + - zookeeper + ports: + - "8081:8081" + environment: + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181 + SCHEMA_REGISTRY_HOST_NAME: schemaregistry + SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 + SCHEMA_REGISTRY_DEBUG: true + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092 + + diff --git a/example/producer_avro/dummy_event.avsc b/example/producer_avro/dummy_event.avsc new file mode 100644 index 0000000..1f4112d --- /dev/null +++ b/example/producer_avro/dummy_event.avsc @@ -0,0 +1,13 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"}, + {"name": "NewFieldWithDefault", "type": ["null", "string"], "default": null }, + {"name": "NewFieldWithDefault2", "type": ["null", "string"], "default": null } + ] +} \ No newline at end of file diff --git a/example/producer_avro/dummy_event_gen.go b/example/producer_avro/dummy_event_gen.go new file mode 100644 index 0000000..083961e --- /dev/null +++ b/example/producer_avro/dummy_event_gen.go @@ -0,0 +1,31 @@ +// Code generated by avrogen. DO NOT EDIT. + +package main + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte + NewFieldWithDefault *string + NewFieldWithDefault2 *string +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"},{"default":null,"name":"NewFieldWithDefault","type":["null","string"]},{"default":null,"name":"NewFieldWithDefault2","type":["null","string"]}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/example/producer_avro/go.mod b/example/producer_avro/go.mod new file mode 100644 index 0000000..818a939 --- /dev/null +++ b/example/producer_avro/go.mod @@ -0,0 +1,26 @@ +module github.com/zillow/zkafka/example/producer_avro + +go 1.23.1 + +replace github.com/zillow/zkafka v1.0.0 => ../.. + + +require ( + github.com/zillow/zkafka v1.0.0 + github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/hamba/avro/v2 v2.20.1 // indirect + github.com/heetch/avro v0.4.5 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/sony/gobreaker v1.0.0 // indirect + github.com/zillow/zfmt v1.0.1 // indirect + go.opentelemetry.io/otel v1.28.0 // indirect + go.opentelemetry.io/otel/trace v1.28.0 // indirect + golang.org/x/sync v0.7.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect +) diff --git a/example/producer_avro/go.sum b/example/producer_avro/go.sum new file mode 100644 index 0000000..e8d3c72 --- /dev/null +++ b/example/producer_avro/go.sum @@ -0,0 +1,489 @@ +cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= +cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= +github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8= +github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= +github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= +github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= +github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= +github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= +github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1 h1:SBn4I0fJXF9FYOVRSVMWuhvEKoAHDikjGpS3wlmw5DE= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1/go.mod h1:2snWQJQUKsbN66vAawJuOGX7dr37pfOq9hb0tZDGIqQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= +github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/compose-spec/compose-go/v2 v2.1.0 h1:qdW2qISQlCQG8v1O2TChcdxgAWTUGgUX/CPSO+ES9+E= +github.com/compose-spec/compose-go/v2 v2.1.0/go.mod h1:bEPizBkIojlQ20pi2vNluBa58tevvj0Y18oUSHPyfdc= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 h1:PM18lA9g6u6Qcz06DpXmGRlxXTvWlHqnlAkQi1chPUo= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0/go.mod h1:Hyo+IIQ/tmsfkOcRP8T6VlSeOW3T33v0Me8Xvq4u90Y= +github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro= +github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= +github.com/containerd/containerd v1.7.15 h1:afEHXdil9iAm03BmhjzKyXnnEBtjaLJefdU7DV0IFes= +github.com/containerd/containerd v1.7.15/go.mod h1:ISzRRTMF8EXNpJlTzyr2XMhN+j9K302C21/+cr3kUnY= +github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8= +github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/ttrpc v1.2.3 h1:4jlhbXIGvijRtNC8F/5CpuJZ7yKOBFGFOOXg1bkISz0= +github.com/containerd/ttrpc v1.2.3/go.mod h1:ieWsXucbb8Mj9PH0rXCw1i8IunRbbAiDkpXkbfflWBM= +github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4= +github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/buildx v0.14.0 h1:FxqcfE7xgeEC4oQlKLpuvfobRDVDXrHE3jByM+mdyqk= +github.com/docker/buildx v0.14.0/go.mod h1:Vy/2lC9QsJvo33+7KKkN/GDE5WxnVqW0/dpcN7ZqPJY= +github.com/docker/cli v26.1.0+incompatible h1:+nwRy8Ocd8cYNQ60mozDDICICD8aoFGtlPXifX/UQ3Y= +github.com/docker/cli v26.1.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/compose/v2 v2.27.0 h1:FKyClQdErCxUZULC2zo6Jn5ve+epFPe/Y0HaxjmUzNg= +github.com/docker/compose/v2 v2.27.0/go.mod h1:uaqwmY6haO8wXWHk+LAsqqDapX6boH4izRKqj/E7+Bo= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v26.1.0+incompatible h1:W1G9MPNbskA6VZWL7b3ZljTh0pXI68FpINx0GKaOdaM= +github.com/docker/docker v26.1.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8= +github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg= +github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= +github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss= +github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= +github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw= +github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= +github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= +github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hamba/avro/v2 v2.20.1 h1:3WByQiVn7wT7d27WQq6pvBRC00FVOrniP6u67FLA/2E= +github.com/hamba/avro/v2 v2.20.1/go.mod h1:xHiKXbISpb3Ovc809XdzWow+XGTn+Oyf/F9aZbTLAig= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.5 h1:bJj+Pj19UZMIweq/iie+1u5YCdGrnxCT9yvm0e+Nd5M= +github.com/hashicorp/go-retryablehttp v0.7.5/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= +github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/vault/api v1.12.1 h1:WzGN4X5jrJdNO39g6Sa55djNio3I9DxEBOTmCZE7tm0= +github.com/hashicorp/vault/api v1.12.1/go.mod h1:1pqP/sErScodde+ybJCyP+ONC4jzEg7Dmawg/QLWo1k= +github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= +github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= +github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY= +github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= +github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/buildkit v0.13.1 h1:L8afOFhPq2RPJJSr/VyzbufwID7jquZVB7oFHbPRcPE= +github.com/moby/buildkit v0.13.1/go.mod h1:aNmNQKLBFYAOFuzQjR3VA27/FijlvtBD1pjNwTSN37k= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g= +github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= +github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= +github.com/moby/sys/symlink v0.2.0 h1:tk1rOM+Ljp0nFmfOIBtlV3rTDlWOwFRhjEeAhZB0nZc= +github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= +github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc= +github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= +github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= +github.com/sony/gobreaker v1.0.0 h1:feX5fGGXSl3dYd4aHZItw+FpHLvvoaqkawKjVNiFMNQ= +github.com/sony/gobreaker v1.0.0/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.31.0 h1:W0VwIhcEVhRflwL9as3dhY6jXjVCA27AkmbnZ+UTh3U= +github.com/testcontainers/testcontainers-go v0.31.0/go.mod h1:D2lAoA0zUFiSY+eAflqK5mcUx/A5hrrORaEQrd0SefI= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0 h1:H74o3HisnApIDQx7sWibGzOl/Oo0By8DjyVeUf3qd6I= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0/go.mod h1:z1JAsvL2/pNFy40yJX0VX9Yk+hzOCIO5DydxBJHBbCY= +github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= +github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0 h1:A/2tIdYXqUuVZeWy0Yq/PWKsXgebzMyh5mLbpNEMVUo= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0/go.mod h1:QXPc/i5yUEWWZ4lbe2WOam1kDdrXjGHRjl0Lzo7IQDU= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0 h1:REG5YX2omhgPmiIT7GLqmzWFnIksZsog1FHJ+Pi1xJE= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0/go.mod h1:OJLS+EYJo/BTViJj7EBG5deKLeQfYwVNW8HMS1qHAAo= +github.com/tink-crypto/tink-go/v2 v2.1.0 h1:QXFBguwMwTIaU17EgZpEJWsUSc60b1BAGTzBIoMdmok= +github.com/tink-crypto/tink-go/v2 v2.1.0/go.mod h1:y1TnYFt1i2eZVfx4OGc+C+EMp4CoKWAw2VSEuoicHHI= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5 h1:oZS8KCqAg62sxJkEq/Ppzqrb6EooqzWtL8Oaex7bc5c= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5/go.mod h1:vbbYqJlnswsbJqWUcJN8fKtBhnEgldDrcagTgnBVKKM= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiatechs/jsonata-go v1.8.5 h1:m1NaokPKD6LPaTPRl674EQz5mpkJvM3ymjdReDEP6/A= +github.com/xiatechs/jsonata-go v1.8.5/go.mod h1:yGEvviiftcdVfhSRhRSpgyTel89T58f+690iB0fp2Vk= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= +github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1/go.mod h1:GnOaBaFQ2we3b9AGWJpsBa7v1S5RlQzlC3O7dRMxZhM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= +go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0/go.mod h1:UVAO61+umUsHLtYb8KXXRoHtxUkdOPkYidzW3gipRLQ= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0 h1:wNMDy/LVGLj2h3p6zg4d0gypKfWKSWI14E1C4smOgl8= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0/go.mod h1:YfbDdXAAkemWJK3H/DshvlrxqFB2rtW4rY6ky/3x/H0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruOgJP5QiA1pw4fYYdv6nc6CBWw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkEZCJWobwBqMwC0cwCq8/wkkRy/OowZg5OArWZrM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0 h1:jwV9iQdvp38fxXi8ZC+lNpxjK16MRcZlpDYvbuO1FiA= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0/go.mod h1:f3bYiqNqhoPxkvI2LrXqQVC546K7BuRDL/kKuxkujhA= +go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= +go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= +go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= +go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= +go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0= +go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q= +go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= +go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa/go.mod h1:CnZenrTdRJb7jc+jOm0Rkywq+9wh0QC4U8tyiRbEPPM= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= +google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= +gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +k8s.io/api v0.29.2 h1:hBC7B9+MU+ptchxEqTNW2DkUosJpp1P+Wn6YncZ474A= +k8s.io/api v0.29.2/go.mod h1:sdIaaKuU7P44aoyyLlikSLayT6Vb7bvJNCX105xZXY0= +k8s.io/apimachinery v0.29.2 h1:EWGpfJ856oj11C52NRCHuU7rFDwxev48z+6DSlGNsV8= +k8s.io/apimachinery v0.29.2/go.mod h1:6HVkd1FwxIagpYrHSwJlQqZI3G9LfYWRPAkUvLnXTKU= +k8s.io/apiserver v0.29.2 h1:+Z9S0dSNr+CjnVXQePG8TcBWHr3Q7BmAr7NraHvsMiQ= +k8s.io/apiserver v0.29.2/go.mod h1:B0LieKVoyU7ykQvPFm7XSdIHaCHSzCzQWPFa5bqbeMQ= +k8s.io/client-go v0.29.2 h1:FEg85el1TeZp+/vYJM7hkDlSTFZ+c5nnK44DJ4FyoRg= +k8s.io/client-go v0.29.2/go.mod h1:knlvFZE58VpqbQpJNbCbctTVXcd35mMyAAwBdpt4jrA= +k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0= +k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 h1:aVUu9fTY98ivBPKR9Y5w/AuzbMm96cd3YHRTU83I780= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00/go.mod h1:AsvuZPBlUDVuCdzJ87iajxtXuR9oktsTctW/R9wwouA= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +tags.cncf.io/container-device-interface v0.6.2 h1:dThE6dtp/93ZDGhqaED2Pu374SOeUkBfuvkLuiTdwzg= +tags.cncf.io/container-device-interface v0.6.2/go.mod h1:Shusyhjs1A5Na/kqPVLL0KqnHQHuunol9LFeUNkuGVE= diff --git a/example/producer_avro/main.go b/example/producer_avro/main.go new file mode 100644 index 0000000..f9472a5 --- /dev/null +++ b/example/producer_avro/main.go @@ -0,0 +1,55 @@ +package main + +import ( + "context" + _ "embed" + "log" + "math/rand" + "time" + + "github.com/zillow/zkafka" +) + +//go:embed dummy_event.avsc +var dummyEventSchema string + +func main() { + ctx := context.Background() + writer, err := zkafka.NewClient(zkafka.Config{ + BootstrapServers: []string{"localhost:29092"}, + }).Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: "example", + Topic: "zkafka-example-topic", + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + // This likely isn't needed in production. A typical workflow involves registering + // a schema a priori. But for the local example, to save this setup, the flag is set to true + AutoRegisterSchemas: true, + // When using avro schema registry, you must specify the schema. In this case, + // the schema used to generate the golang type is used. + // The heetch generated struct also embeds the schema as well (and isn't lossy like some of the + // other generative solutions. For example, one lib didn't include default values), so that could be used as well. + Schema: dummyEventSchema, + }, + }, + }) + randomNames := []string{"stewy", "lydia", "asif", "mike", "justin"} + if err != nil { + log.Panic(err) + } + for { + event := DummyEvent{ + IntField: rand.Intn(100), + StringField: randomNames[rand.Intn(len(randomNames))], + } + + resp, err := writer.Write(ctx, event) + if err != nil { + log.Panic(err) + } + log.Printf("resp: %+v\n", resp) + time.Sleep(time.Second) + } +} diff --git a/example/worker_avro/dummy_event.avsc b/example/worker_avro/dummy_event.avsc new file mode 100644 index 0000000..03ea6e7 --- /dev/null +++ b/example/worker_avro/dummy_event.avsc @@ -0,0 +1,11 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"} + ] +} \ No newline at end of file diff --git a/example/worker_avro/dummy_event_gen.go b/example/worker_avro/dummy_event_gen.go new file mode 100644 index 0000000..e5245e1 --- /dev/null +++ b/example/worker_avro/dummy_event_gen.go @@ -0,0 +1,29 @@ +// Code generated by avrogen. DO NOT EDIT. + +package main + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/example/worker_avro/go.mod b/example/worker_avro/go.mod new file mode 100644 index 0000000..a5646ae --- /dev/null +++ b/example/worker_avro/go.mod @@ -0,0 +1,38 @@ +module github.com/zillow/zkafka/example/worker_avro + +go 1.23.1 + +replace github.com/zillow/zkafka v1.0.0 => ../.. + +require ( + github.com/google/uuid v1.6.0 + github.com/heetch/avro v0.4.5 + github.com/zillow/zkafka v1.0.0 +) + +require ( + github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/bufbuild/protocompile v0.8.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect + github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/hamba/avro/v2 v2.20.1 // indirect + github.com/invopop/jsonschema v0.12.0 // indirect + github.com/jhump/protoreflect v1.15.6 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 // indirect + github.com/sony/gobreaker v1.0.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect + github.com/zillow/zfmt v1.0.1 // indirect + go.opentelemetry.io/otel v1.28.0 // indirect + go.opentelemetry.io/otel/trace v1.28.0 // indirect + golang.org/x/sync v0.7.0 // indirect + google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa // indirect + google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/example/worker_avro/go.sum b/example/worker_avro/go.sum new file mode 100644 index 0000000..ef03f36 --- /dev/null +++ b/example/worker_avro/go.sum @@ -0,0 +1,507 @@ +cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= +cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= +cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= +github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8= +github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= +github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= +github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= +github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= +github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= +github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1 h1:SBn4I0fJXF9FYOVRSVMWuhvEKoAHDikjGpS3wlmw5DE= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1/go.mod h1:2snWQJQUKsbN66vAawJuOGX7dr37pfOq9hb0tZDGIqQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bufbuild/protocompile v0.8.0 h1:9Kp1q6OkS9L4nM3FYbr8vlJnEwtbpDPQlQOVXfR+78s= +github.com/bufbuild/protocompile v0.8.0/go.mod h1:+Etjg4guZoAqzVk2czwEQP12yaxLJ8DxuqCJ9qHdH94= +github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= +github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/compose-spec/compose-go/v2 v2.1.0 h1:qdW2qISQlCQG8v1O2TChcdxgAWTUGgUX/CPSO+ES9+E= +github.com/compose-spec/compose-go/v2 v2.1.0/go.mod h1:bEPizBkIojlQ20pi2vNluBa58tevvj0Y18oUSHPyfdc= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 h1:PM18lA9g6u6Qcz06DpXmGRlxXTvWlHqnlAkQi1chPUo= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0/go.mod h1:Hyo+IIQ/tmsfkOcRP8T6VlSeOW3T33v0Me8Xvq4u90Y= +github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro= +github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= +github.com/containerd/containerd v1.7.15 h1:afEHXdil9iAm03BmhjzKyXnnEBtjaLJefdU7DV0IFes= +github.com/containerd/containerd v1.7.15/go.mod h1:ISzRRTMF8EXNpJlTzyr2XMhN+j9K302C21/+cr3kUnY= +github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8= +github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/ttrpc v1.2.3 h1:4jlhbXIGvijRtNC8F/5CpuJZ7yKOBFGFOOXg1bkISz0= +github.com/containerd/ttrpc v1.2.3/go.mod h1:ieWsXucbb8Mj9PH0rXCw1i8IunRbbAiDkpXkbfflWBM= +github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4= +github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/buildx v0.14.0 h1:FxqcfE7xgeEC4oQlKLpuvfobRDVDXrHE3jByM+mdyqk= +github.com/docker/buildx v0.14.0/go.mod h1:Vy/2lC9QsJvo33+7KKkN/GDE5WxnVqW0/dpcN7ZqPJY= +github.com/docker/cli v26.1.0+incompatible h1:+nwRy8Ocd8cYNQ60mozDDICICD8aoFGtlPXifX/UQ3Y= +github.com/docker/cli v26.1.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/compose/v2 v2.27.0 h1:FKyClQdErCxUZULC2zo6Jn5ve+epFPe/Y0HaxjmUzNg= +github.com/docker/compose/v2 v2.27.0/go.mod h1:uaqwmY6haO8wXWHk+LAsqqDapX6boH4izRKqj/E7+Bo= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v26.1.0+incompatible h1:W1G9MPNbskA6VZWL7b3ZljTh0pXI68FpINx0GKaOdaM= +github.com/docker/docker v26.1.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8= +github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg= +github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= +github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss= +github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= +github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw= +github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= +github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= +github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hamba/avro/v2 v2.20.1 h1:3WByQiVn7wT7d27WQq6pvBRC00FVOrniP6u67FLA/2E= +github.com/hamba/avro/v2 v2.20.1/go.mod h1:xHiKXbISpb3Ovc809XdzWow+XGTn+Oyf/F9aZbTLAig= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.5 h1:bJj+Pj19UZMIweq/iie+1u5YCdGrnxCT9yvm0e+Nd5M= +github.com/hashicorp/go-retryablehttp v0.7.5/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= +github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/vault/api v1.12.1 h1:WzGN4X5jrJdNO39g6Sa55djNio3I9DxEBOTmCZE7tm0= +github.com/hashicorp/vault/api v1.12.1/go.mod h1:1pqP/sErScodde+ybJCyP+ONC4jzEg7Dmawg/QLWo1k= +github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= +github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= +github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY= +github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= +github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= +github.com/jhump/protoreflect v1.15.6 h1:WMYJbw2Wo+KOWwZFvgY0jMoVHM6i4XIvRs2RcBj5VmI= +github.com/jhump/protoreflect v1.15.6/go.mod h1:jCHoyYQIJnaabEYnbGwyo9hUqfyUMTbJw/tAut5t97E= +github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= +github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/buildkit v0.13.1 h1:L8afOFhPq2RPJJSr/VyzbufwID7jquZVB7oFHbPRcPE= +github.com/moby/buildkit v0.13.1/go.mod h1:aNmNQKLBFYAOFuzQjR3VA27/FijlvtBD1pjNwTSN37k= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g= +github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= +github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= +github.com/moby/sys/symlink v0.2.0 h1:tk1rOM+Ljp0nFmfOIBtlV3rTDlWOwFRhjEeAhZB0nZc= +github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 h1:uIkTLo0AGRc8l7h5l9r+GcYi9qfVPt6lD4/bhmzfiKo= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= +github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= +github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc= +github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= +github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= +github.com/sony/gobreaker v1.0.0 h1:feX5fGGXSl3dYd4aHZItw+FpHLvvoaqkawKjVNiFMNQ= +github.com/sony/gobreaker v1.0.0/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.31.0 h1:W0VwIhcEVhRflwL9as3dhY6jXjVCA27AkmbnZ+UTh3U= +github.com/testcontainers/testcontainers-go v0.31.0/go.mod h1:D2lAoA0zUFiSY+eAflqK5mcUx/A5hrrORaEQrd0SefI= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0 h1:H74o3HisnApIDQx7sWibGzOl/Oo0By8DjyVeUf3qd6I= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0/go.mod h1:z1JAsvL2/pNFy40yJX0VX9Yk+hzOCIO5DydxBJHBbCY= +github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= +github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0 h1:A/2tIdYXqUuVZeWy0Yq/PWKsXgebzMyh5mLbpNEMVUo= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0/go.mod h1:QXPc/i5yUEWWZ4lbe2WOam1kDdrXjGHRjl0Lzo7IQDU= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0 h1:REG5YX2omhgPmiIT7GLqmzWFnIksZsog1FHJ+Pi1xJE= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0/go.mod h1:OJLS+EYJo/BTViJj7EBG5deKLeQfYwVNW8HMS1qHAAo= +github.com/tink-crypto/tink-go/v2 v2.1.0 h1:QXFBguwMwTIaU17EgZpEJWsUSc60b1BAGTzBIoMdmok= +github.com/tink-crypto/tink-go/v2 v2.1.0/go.mod h1:y1TnYFt1i2eZVfx4OGc+C+EMp4CoKWAw2VSEuoicHHI= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5 h1:oZS8KCqAg62sxJkEq/Ppzqrb6EooqzWtL8Oaex7bc5c= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5/go.mod h1:vbbYqJlnswsbJqWUcJN8fKtBhnEgldDrcagTgnBVKKM= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiatechs/jsonata-go v1.8.5 h1:m1NaokPKD6LPaTPRl674EQz5mpkJvM3ymjdReDEP6/A= +github.com/xiatechs/jsonata-go v1.8.5/go.mod h1:yGEvviiftcdVfhSRhRSpgyTel89T58f+690iB0fp2Vk= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= +github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1/go.mod h1:GnOaBaFQ2we3b9AGWJpsBa7v1S5RlQzlC3O7dRMxZhM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= +go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0/go.mod h1:UVAO61+umUsHLtYb8KXXRoHtxUkdOPkYidzW3gipRLQ= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0 h1:wNMDy/LVGLj2h3p6zg4d0gypKfWKSWI14E1C4smOgl8= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0/go.mod h1:YfbDdXAAkemWJK3H/DshvlrxqFB2rtW4rY6ky/3x/H0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruOgJP5QiA1pw4fYYdv6nc6CBWw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkEZCJWobwBqMwC0cwCq8/wkkRy/OowZg5OArWZrM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0 h1:jwV9iQdvp38fxXi8ZC+lNpxjK16MRcZlpDYvbuO1FiA= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0/go.mod h1:f3bYiqNqhoPxkvI2LrXqQVC546K7BuRDL/kKuxkujhA= +go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= +go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= +go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= +go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= +go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0= +go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q= +go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= +go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa/go.mod h1:CnZenrTdRJb7jc+jOm0Rkywq+9wh0QC4U8tyiRbEPPM= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= +google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= +gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +k8s.io/api v0.29.2 h1:hBC7B9+MU+ptchxEqTNW2DkUosJpp1P+Wn6YncZ474A= +k8s.io/api v0.29.2/go.mod h1:sdIaaKuU7P44aoyyLlikSLayT6Vb7bvJNCX105xZXY0= +k8s.io/apimachinery v0.29.2 h1:EWGpfJ856oj11C52NRCHuU7rFDwxev48z+6DSlGNsV8= +k8s.io/apimachinery v0.29.2/go.mod h1:6HVkd1FwxIagpYrHSwJlQqZI3G9LfYWRPAkUvLnXTKU= +k8s.io/apiserver v0.29.2 h1:+Z9S0dSNr+CjnVXQePG8TcBWHr3Q7BmAr7NraHvsMiQ= +k8s.io/apiserver v0.29.2/go.mod h1:B0LieKVoyU7ykQvPFm7XSdIHaCHSzCzQWPFa5bqbeMQ= +k8s.io/client-go v0.29.2 h1:FEg85el1TeZp+/vYJM7hkDlSTFZ+c5nnK44DJ4FyoRg= +k8s.io/client-go v0.29.2/go.mod h1:knlvFZE58VpqbQpJNbCbctTVXcd35mMyAAwBdpt4jrA= +k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0= +k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 h1:aVUu9fTY98ivBPKR9Y5w/AuzbMm96cd3YHRTU83I780= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00/go.mod h1:AsvuZPBlUDVuCdzJ87iajxtXuR9oktsTctW/R9wwouA= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +tags.cncf.io/container-device-interface v0.6.2 h1:dThE6dtp/93ZDGhqaED2Pu374SOeUkBfuvkLuiTdwzg= +tags.cncf.io/container-device-interface v0.6.2/go.mod h1:Shusyhjs1A5Na/kqPVLL0KqnHQHuunol9LFeUNkuGVE= diff --git a/example/worker_avro/main.go b/example/worker_avro/main.go new file mode 100644 index 0000000..66c185e --- /dev/null +++ b/example/worker_avro/main.go @@ -0,0 +1,85 @@ +package main + +import ( + "context" + _ "embed" + "log" + "os" + "os/signal" + "syscall" + "time" + + "github.com/google/uuid" + "github.com/zillow/zkafka" +) + +// Demonstrates reading from a topic via the zkafka.Work struct which is more convenient, typically, than using the consumer directly +func main() { + ctx := context.Background() + client := zkafka.NewClient(zkafka.Config{ + BootstrapServers: []string{"localhost:29092"}, + }, + ) + // It's important to close the client after consumption to gracefully leave the consumer group + // (this commits completed work, and informs the broker that this consumer is leaving the group which yields a faster rebalance) + defer client.Close() + + topicConfig := zkafka.ConsumerTopicConfig{ + // ClientID is used for caching inside zkafka, and observability within streamz dashboards. But it's not an important + // part of consumer group semantics. A typical convention is to use the service name executing the kafka worker + ClientID: "service-name", + // GroupID is the consumer group. If multiple instances of the same consumer group read messages for the same + // topic the topic's partitions will be split between the collection. The broker remembers + // what offset has been committed for a consumer group, and therefore work can be picked up where it was left off + // across releases + GroupID: uuid.NewString(), + //GroupID: "zkafka/example/example-consumer", + Topic: "zkafka-example-topic", + // The formatter is registered internally to the `zkafka.Message` and used when calling `msg.Decode()` + // string fmt can be used for both binary and pure strings encoded in the value field of the kafka message. Other options include + // json, proto, avro, etc. + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Deserialization: zkafka.DeserializationConfig{}, + }, + AdditionalProps: map[string]any{ + // only important the first time a consumer group connects. Subsequent connections will start + // consuming messages + "auto.offset.reset": "earliest", + }, + } + // optionally set up a channel to signal when worker shutdown should occur. + // A nil channel is also acceptable, but this example demonstrates how to make utility of the signal. + // The channel should be closed, instead of simply written to, to properly broadcast to the separate worker threads. + stopCh := make(chan os.Signal, 1) + signal.Notify(stopCh, os.Interrupt, syscall.SIGTERM) + shutdown := make(chan struct{}) + + go func() { + <-stopCh + close(shutdown) + }() + + wf := zkafka.NewWorkFactory(client) + // Register a processor which is executed per message. + // Speedup is used to create multiple processor goroutines. Order is still maintained with this setup by way of `virtual partitions` + work := wf.CreateWithFunc(topicConfig, Process, zkafka.Speedup(1)) + if err := work.Run(ctx, shutdown); err != nil { + log.Panic(err) + } +} + +func Process(_ context.Context, msg *zkafka.Message) error { + // sleep to simulate random amount of work + time.Sleep(100 * time.Millisecond) + event := DummyEvent{} + err := msg.Decode(&event) + if err != nil { + log.Printf("error occurred during processing: %s", err) + return err + } + + log.Printf(" offset: %d, partition: %d. event.Age: %d, event.Name %s\n", msg.Offset, msg.Partition, event.IntField, event.StringField) + return nil +} diff --git a/formatter.go b/formatter.go index ac08af4..a8b55d2 100644 --- a/formatter.go +++ b/formatter.go @@ -2,6 +2,7 @@ package zkafka import ( "errors" + "fmt" "github.com/zillow/zfmt" ) @@ -9,9 +10,21 @@ import ( const ( // CustomFmt indicates that the user would pass in their own Formatter later CustomFmt zfmt.FormatterType = "custom" + // AvroSchemaRegistry uses confluent's schema registry. It encodes a schemaID as the first 5 bytes and then avro serializes (binary) + // for the remaining part of the payload. It is the successor to `avro_schema` which ships with zfmt, + AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" + + // ProtoSchemaRegistry uses confluent's schema registry. It encodes a schemaID as well as the message types as + // a payload prefix and then proto serializes (binary) for the remaining part of the payload. + // zfmt.ProtoSchemaDeprecatedFmt had a bug in its implementation and didn't work properly with confluent + ProtoSchemaRegistry zfmt.FormatterType = "proto_schema_registry" + + // JSONSchemaRegistry uses confluent's schema registry. It encodes a schemaID as the first 5 bytes and then json serializes (human readable) + // for the remaining part of the payload. It is the successor to `json_schema` which ships with zfmt, + JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" ) -var errMissingFmtter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") +var errMissingFormatter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") // Formatter allows the user to extend formatting capability to unsupported data types type Formatter interface { @@ -19,16 +32,146 @@ type Formatter interface { Unmarshal(b []byte, v any) error } -// noopFormatter is a formatter that returns error when called. The error will remind the user +type marshReq struct { + // topic is the kafka topic being written to + topic string + // subject is the data to be marshalled + subject any + // schema is currently only used for avro schematizations. It is necessary, + // because the confluent implementation reflects on the subject to get the schema to use for + // communicating with schema-registry and backward compatible evolutions fail beause if dataloss during reflection. + // For example, if a field has a default value, the reflection doesn't pick this up + schema string +} + +type unmarshReq struct { + // topic is the kafka topic being read from + topic string + // data is the message value which will be unmarshalled to a type + data []byte + // target is the stuct which is to be hydrated by the contents of data + target any +} + +var _ kFormatter = (*avroSchemaRegistryFormatter)(nil) +var _ kFormatter = (*zfmtShim)(nil) + +// kFormatter is zkafka special formatter. +// It extends zfmt options, and works with schema registry. +type kFormatter interface { + marshall(req marshReq) ([]byte, error) + unmarshal(req unmarshReq) error +} + +// zfmtShim is a shim type which allows +// zfmt formatters to work the kFormatter +type zfmtShim struct { + F zfmt.Formatter +} + +func (f zfmtShim) marshall(req marshReq) ([]byte, error) { + return f.F.Marshall(req.subject) +} + +func (f zfmtShim) unmarshal(req unmarshReq) error { + return f.F.Unmarshal(req.data, req.target) +} + +// errFormatter is a formatter that returns error when called. The error will remind the user // to provide appropriate implementation -type noopFormatter struct{} +type errFormatter struct{} + +// marshall returns error with reminder +func (f errFormatter) marshall(_ marshReq) ([]byte, error) { + return nil, errMissingFormatter +} + +// unmarshal returns error with reminder +func (f errFormatter) unmarshal(_ unmarshReq) error { + return errMissingFormatter +} + +type avroSchemaRegistryFormatter struct { + afmt avroFmt + f zfmt.SchematizedAvroFormatter +} + +func newAvroSchemaRegistryFormatter(afmt avroFmt) (avroSchemaRegistryFormatter, error) { + return avroSchemaRegistryFormatter{ + afmt: afmt, + }, nil +} + +func (f avroSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { + if req.schema == "" { + return nil, errors.New("avro schema is required for schema registry formatter") + } + id, err := f.afmt.GetID(req.topic, req.schema) + if err != nil { + return nil, fmt.Errorf("failed to get avro schema by id for topic %s: %w", req.topic, err) + } + f.f.SchemaID = id + data, err := f.f.Marshall(req.subject) + if err != nil { + return nil, fmt.Errorf("failed to marshal avro schema for topic %s: %w", req.topic, err) + } + return data, nil +} + +func (f avroSchemaRegistryFormatter) unmarshal(req unmarshReq) error { + err := f.afmt.Deserialize(req.topic, req.data, &req.target) + if err != nil { + return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) + } + return nil +} + +type protoSchemaRegistryFormatter struct { + pfmt protoFmt +} + +func newProtoSchemaRegistryFormatter(pfmt protoFmt) protoSchemaRegistryFormatter { + return protoSchemaRegistryFormatter{ + pfmt: pfmt, + } +} + +func (f protoSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { + msgBytes, err := f.pfmt.ser.Serialize(req.topic, req.subject) + if err != nil { + return nil, fmt.Errorf("failed to proto serialize: %w", err) + } + return msgBytes, nil +} + +func (f protoSchemaRegistryFormatter) unmarshal(req unmarshReq) error { + if err := f.pfmt.deser.DeserializeInto(req.topic, req.data, req.target); err != nil { + return fmt.Errorf("failed to proto deserialize: %w", err) + } + return nil +} + +type jsonSchemaRegistryFormatter struct { + jfmt jsonFmt +} + +func newJsonSchemaRegistryFormatter(jfmt jsonFmt) jsonSchemaRegistryFormatter { + return jsonSchemaRegistryFormatter{ + jfmt: jfmt, + } +} -// Marshall returns error with reminder -func (f noopFormatter) Marshall(_ any) ([]byte, error) { - return nil, errMissingFmtter +func (f jsonSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { + msgBytes, err := f.jfmt.ser.Serialize(req.topic, req.subject) + if err != nil { + return nil, fmt.Errorf("failed to json schema serialize: %w", err) + } + return msgBytes, nil } -// Unmarshal returns error with reminder -func (f noopFormatter) Unmarshal(_ []byte, _ any) error { - return errMissingFmtter +func (f jsonSchemaRegistryFormatter) unmarshal(req unmarshReq) error { + if err := f.jfmt.deser.DeserializeInto(req.topic, req.data, req.target); err != nil { + return fmt.Errorf("failed to json schema deserialize: %w", err) + } + return nil } diff --git a/formatter_test.go b/formatter_test.go index d9e6aa8..8c464b1 100644 --- a/formatter_test.go +++ b/formatter_test.go @@ -8,11 +8,11 @@ import ( func TestNoopFormatter_Marshall_Unmarshal(t *testing.T) { defer recoverThenFail(t) - fmtter := noopFormatter{} - _, err := fmtter.Marshall("anything") - require.ErrorIs(t, err, errMissingFmtter) + formatter := errFormatter{} + _, err := formatter.marshall(marshReq{subject: "anything"}) + require.ErrorIs(t, err, errMissingFormatter) var someInt int32 - err = fmtter.Unmarshal([]byte("test"), &someInt) - require.ErrorIs(t, err, errMissingFmtter) + err = formatter.unmarshal(unmarshReq{data: []byte("test"), target: &someInt}) + require.ErrorIs(t, err, errMissingFormatter) } diff --git a/go.mod b/go.mod index af79f0d..8d4abb0 100644 --- a/go.mod +++ b/go.mod @@ -7,23 +7,37 @@ require ( github.com/golang/mock v1.6.0 github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.6.0 + github.com/heetch/avro v0.4.5 github.com/sony/gobreaker v1.0.0 github.com/stretchr/testify v1.9.0 github.com/zillow/zfmt v1.0.1 go.opentelemetry.io/otel v1.28.0 go.opentelemetry.io/otel/trace v1.28.0 golang.org/x/sync v0.7.0 + google.golang.org/protobuf v1.34.2 ) require ( github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/bufbuild/protocompile v0.8.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/heetch/avro v0.4.5 // indirect + github.com/hamba/avro/v2 v2.20.1 // indirect + github.com/invopop/jsonschema v0.12.0 // indirect + github.com/jhump/protoreflect v1.15.6 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect go.opentelemetry.io/otel/metric v1.28.0 // indirect - google.golang.org/protobuf v1.34.2 // indirect + google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 436b46a..ba5f7cf 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,28 @@ +cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= +cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= +cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -16,6 +33,8 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= @@ -34,6 +53,8 @@ github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1x github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1 h1:SBn4I0fJXF9FYOVRSVMWuhvEKoAHDikjGpS3wlmw5DE= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1/go.mod h1:2snWQJQUKsbN66vAawJuOGX7dr37pfOq9hb0tZDGIqQ= github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= @@ -42,10 +63,18 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bufbuild/protocompile v0.8.0 h1:9Kp1q6OkS9L4nM3FYbr8vlJnEwtbpDPQlQOVXfR+78s= +github.com/bufbuild/protocompile v0.8.0/go.mod h1:+Etjg4guZoAqzVk2czwEQP12yaxLJ8DxuqCJ9qHdH94= github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= @@ -105,6 +134,8 @@ github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvn github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -118,28 +149,43 @@ github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2Kv github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= @@ -148,14 +194,30 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaW github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hamba/avro/v2 v2.20.1 h1:3WByQiVn7wT7d27WQq6pvBRC00FVOrniP6u67FLA/2E= +github.com/hamba/avro/v2 v2.20.1/go.mod h1:xHiKXbISpb3Ovc809XdzWow+XGTn+Oyf/F9aZbTLAig= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.5 h1:bJj+Pj19UZMIweq/iie+1u5YCdGrnxCT9yvm0e+Nd5M= +github.com/hashicorp/go-retryablehttp v0.7.5/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= +github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/vault/api v1.12.1 h1:WzGN4X5jrJdNO39g6Sa55djNio3I9DxEBOTmCZE7tm0= +github.com/hashicorp/vault/api v1.12.1/go.mod h1:1pqP/sErScodde+ybJCyP+ONC4jzEg7Dmawg/QLWo1k= github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= @@ -164,6 +226,10 @@ github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= +github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= +github.com/jhump/protoreflect v1.15.6 h1:WMYJbw2Wo+KOWwZFvgY0jMoVHM6i4XIvRs2RcBj5VmI= +github.com/jhump/protoreflect v1.15.6/go.mod h1:jCHoyYQIJnaabEYnbGwyo9hUqfyUMTbJw/tAut5t97E= github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -178,6 +244,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -200,6 +268,8 @@ github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= @@ -226,6 +296,7 @@ github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= @@ -242,6 +313,8 @@ github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQ github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -262,6 +335,10 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 h1:uIkTLo0AGRc8l7h5l9r+GcYi9qfVPt6lD4/bhmzfiKo= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= @@ -272,6 +349,8 @@ github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11 github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= @@ -282,6 +361,8 @@ github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= @@ -294,6 +375,12 @@ github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4D github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0 h1:A/2tIdYXqUuVZeWy0Yq/PWKsXgebzMyh5mLbpNEMVUo= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0/go.mod h1:QXPc/i5yUEWWZ4lbe2WOam1kDdrXjGHRjl0Lzo7IQDU= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0 h1:REG5YX2omhgPmiIT7GLqmzWFnIksZsog1FHJ+Pi1xJE= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0/go.mod h1:OJLS+EYJo/BTViJj7EBG5deKLeQfYwVNW8HMS1qHAAo= +github.com/tink-crypto/tink-go/v2 v2.1.0 h1:QXFBguwMwTIaU17EgZpEJWsUSc60b1BAGTzBIoMdmok= +github.com/tink-crypto/tink-go/v2 v2.1.0/go.mod h1:y1TnYFt1i2eZVfx4OGc+C+EMp4CoKWAw2VSEuoicHHI= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= @@ -304,17 +391,23 @@ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiatechs/jsonata-go v1.8.5 h1:m1NaokPKD6LPaTPRl674EQz5mpkJvM3ymjdReDEP6/A= +github.com/xiatechs/jsonata-go v1.8.5/go.mod h1:yGEvviiftcdVfhSRhRSpgyTel89T58f+690iB0fp2Vk= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= @@ -389,6 +482,8 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= diff --git a/heap_test.go b/heap_test.go index 2149110..36d7313 100644 --- a/heap_test.go +++ b/heap_test.go @@ -106,7 +106,7 @@ func Test_offsetHeap_SeekPop_DoesntImpactHeapOrdering(t *testing.T) { } got := heap.Pop() want := offsets[i] - require.Equal(t, want, got, "Expect pop to still pop minumums even after seek pops") + require.Equal(t, want, got, "Expect pop to still pop minimums even after seek pops") i++ } } diff --git a/lifecycle.go b/lifecycle.go index 3e5d414..44bd69e 100644 --- a/lifecycle.go +++ b/lifecycle.go @@ -9,9 +9,16 @@ import ( type LifecyclePostReadMeta struct { Topic string GroupID string - // Message that was read + // Message that was read (will be non nil) Message *Message } + +type LifecyclePostReadImmediateMeta struct { + // Message that was read (could be nil) + Message *Message + Err error +} + type LifecyclePreProcessingMeta struct { Topic string GroupID string @@ -47,9 +54,13 @@ type LifecyclePreWriteResp struct { } type LifecycleHooks struct { - // Called by work after reading a message, offers the ability to customize the context object (resulting context object passed to work processor) + // Called by work after reading a message (guaranteed non nil), offers the ability to customize the context object (resulting context object passed to work processor) PostRead func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) + // Called by work immediately after an attempt to read a message. Msg might be nil, if there was an error + // or no available messages. + PostReadImmediate func(ctx context.Context, meta LifecyclePostReadImmediateMeta) + // Called after receiving a message and before processing it. PreProcessing func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) @@ -78,6 +89,32 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return hooks[0] } return LifecycleHooks{ + PostRead: func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) { + var allErrs error + + hookCtx := ctx + + for _, h := range hooks { + if h.PostRead != nil { + var err error + + hookCtx, err = h.PostRead(hookCtx, meta) + if err != nil { + allErrs = errors.Join(allErrs, err) + } + } + } + + return hookCtx, allErrs + + }, + PostReadImmediate: func(ctx context.Context, meta LifecyclePostReadImmediateMeta) { + for _, h := range hooks { + if h.PostRead != nil { + h.PostReadImmediate(ctx, meta) + } + } + }, PreProcessing: func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) { var allErrs error @@ -96,7 +133,6 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return hookCtx, allErrs }, - PostProcessing: func(ctx context.Context, meta LifecyclePostProcessingMeta) error { var allErrs error @@ -111,7 +147,6 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return allErrs }, - PostAck: func(ctx context.Context, meta LifecyclePostAckMeta) error { var allErrs error @@ -126,5 +161,34 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return allErrs }, + PreWrite: func(ctx context.Context, meta LifecyclePreWriteMeta) (LifecyclePreWriteResp, error) { + var allErrs error + + out := LifecyclePreWriteResp{ + Headers: make(map[string][]byte), + } + for _, h := range hooks { + if h.PreProcessing != nil { + var err error + + resp, err := h.PreWrite(ctx, meta) + if err != nil { + allErrs = errors.Join(allErrs, err) + } + for k, v := range resp.Headers { + out.Headers[k] = v + } + } + } + + return out, allErrs + }, + PostFanout: func(ctx context.Context) { + for _, h := range hooks { + if h.PostRead != nil { + h.PostFanout(ctx) + } + } + }, } } diff --git a/lifecycle_test.go b/lifecycle_test.go index 2ba609e..180585e 100644 --- a/lifecycle_test.go +++ b/lifecycle_test.go @@ -11,6 +11,13 @@ func Test_LifecycleChainedHooksAreCalled(t *testing.T) { lhState := make(map[string]int) // Map from state to number of times called hooks1 := LifecycleHooks{ + PostRead: func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) { + lhState["hooks1-post-read"] += 1 + return ctx, nil + }, + PostReadImmediate: func(ctx context.Context, meta LifecyclePostReadImmediateMeta) { + lhState["hooks1-post-read-immediate"] += 1 + }, PreProcessing: func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) { lhState["hooks1-pre-processing"] += 1 return ctx, nil @@ -23,9 +30,23 @@ func Test_LifecycleChainedHooksAreCalled(t *testing.T) { lhState["hooks1-post-ack"] += 1 return nil }, + PreWrite: func(ctx context.Context, meta LifecyclePreWriteMeta) (LifecyclePreWriteResp, error) { + lhState["hooks1-pre-write"] += 1 + return LifecyclePreWriteResp{}, nil + }, + PostFanout: func(ctx context.Context) { + lhState["hooks1-post-fanout"] += 1 + }, } hooks2 := LifecycleHooks{ + PostRead: func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) { + lhState["hooks2-post-read"] += 1 + return ctx, nil + }, + PostReadImmediate: func(ctx context.Context, meta LifecyclePostReadImmediateMeta) { + lhState["hooks2-post-read-immediate"] += 1 + }, PreProcessing: func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) { lhState["hooks2-pre-processing"] += 1 return ctx, nil @@ -38,31 +59,134 @@ func Test_LifecycleChainedHooksAreCalled(t *testing.T) { lhState["hooks2-post-ack"] += 1 return nil }, + PreWrite: func(ctx context.Context, meta LifecyclePreWriteMeta) (LifecyclePreWriteResp, error) { + lhState["hooks2-pre-write"] += 1 + return LifecyclePreWriteResp{}, nil + }, + PostFanout: func(ctx context.Context) { + lhState["hooks2-post-fanout"] += 1 + }, } lh := ChainLifecycleHooks(hooks1, hooks2) lh.PreProcessing(context.Background(), LifecyclePreProcessingMeta{}) - require.Equal(t, 1, lhState["hooks1-pre-processing"], "hooks1-pre-processing not called") - require.Equal(t, 1, lhState["hooks2-pre-processing"], "hooks2-pre-processing not called") - require.Equal(t, 0, lhState["hooks1-post-processing"], "hooks1-post-processing called") - require.Equal(t, 0, lhState["hooks2-post-processing"], "hooks2-post-processing called") - require.Equal(t, 0, lhState["hooks1-post-ack"], "hooks1-post-ack called") - require.Equal(t, 0, lhState["hooks2-post-ack"], "hooks2-post-ack called") + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 0, lhState["hooks1-post-processing"]) + require.Equal(t, 0, lhState["hooks2-post-processing"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-post-read"]) + require.Equal(t, 0, lhState["hooks2-post-read"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) lh.PostProcessing(context.Background(), LifecyclePostProcessingMeta{}) - require.Equal(t, 1, lhState["hooks1-pre-processing"], "hooks1-pre-processing not called") - require.Equal(t, 1, lhState["hooks2-pre-processing"], "hooks2-pre-processing not called") - require.Equal(t, 1, lhState["hooks1-post-processing"], "hooks1-post-processing not called") - require.Equal(t, 1, lhState["hooks2-post-processing"], "hooks2-post-processing not called") - require.Equal(t, 0, lhState["hooks1-post-ack"], "hooks1-post-ack called") - require.Equal(t, 0, lhState["hooks2-post-ack"], "hooks2-post-ack called") + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-post-read"]) + require.Equal(t, 0, lhState["hooks2-post-read"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PostRead(context.Background(), LifecyclePostReadMeta{}) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) lh.PostAck(context.Background(), LifecyclePostAckMeta{}) - require.Equal(t, 1, lhState["hooks1-pre-processing"], "hooks1-pre-processing not called") - require.Equal(t, 1, lhState["hooks2-pre-processing"], "hooks2-pre-processing not called") - require.Equal(t, 1, lhState["hooks1-post-processing"], "hooks1-post-processing not called") - require.Equal(t, 1, lhState["hooks2-post-processing"], "hooks2-post-processing not called") - require.Equal(t, 1, lhState["hooks1-post-ack"], "hooks1-post-ack not called") - require.Equal(t, 1, lhState["hooks2-post-ack"], "hooks2-post-ack not called") + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PreWrite(context.Background(), LifecyclePreWriteMeta{}) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-pre-write"]) + require.Equal(t, 1, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PostFanout(context.Background()) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-pre-write"]) + require.Equal(t, 1, lhState["hooks2-pre-write"]) + require.Equal(t, 1, lhState["hooks1-post-fanout"]) + require.Equal(t, 1, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PostReadImmediate(context.Background(), LifecyclePostReadImmediateMeta{}) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-pre-write"]) + require.Equal(t, 1, lhState["hooks2-pre-write"]) + require.Equal(t, 1, lhState["hooks1-post-fanout"]) + require.Equal(t, 1, lhState["hooks2-post-fanout"]) + require.Equal(t, 1, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 1, lhState["hooks2-post-read-immediate"]) } diff --git a/message.go b/message.go index a78f638..6486ebd 100644 --- a/message.go +++ b/message.go @@ -7,7 +7,6 @@ import ( "time" "github.com/confluentinc/confluent-kafka-go/v2/kafka" - "github.com/zillow/zfmt" ) // Message is a container for kafka message @@ -24,7 +23,7 @@ type Message struct { TimeStamp time.Time value []byte topicPartition kafka.TopicPartition - fmt zfmt.Formatter + fmt kFormatter doneFunc func(ctx context.Context) doneOnce sync.Once } @@ -53,12 +52,18 @@ func (m *Message) Decode(v any) error { if m.value == nil { return errors.New("message is empty") } + return m.unmarshall(v) +} + +func (m *Message) unmarshall(target any) error { if m.fmt == nil { - // is error is most likely due to user calling KReader/KWriter - // with custom Formatter which can sometimes be nil - return errors.New("formatter is not set") + return errors.New("formatter is not supplied to decode kafka message") } - return m.fmt.Unmarshal(m.value, v) + return m.fmt.unmarshal(unmarshReq{ + topic: m.Topic, + data: m.value, + target: target, + }) } // Value returns a copy of the current value byte array. Useful for debugging diff --git a/message_test.go b/message_test.go index dd8d1c9..bad5441 100644 --- a/message_test.go +++ b/message_test.go @@ -26,7 +26,7 @@ func Test_makeProducerMessageRaw(t *testing.T) { hasHeaders bool }{ { - name: "has fmtter with valid input, no key, no partition", + name: "has formatter with valid input, no key, no partition", args: args{ serviceName: "concierge/test/test_group", topic: "test_topic", @@ -104,7 +104,7 @@ func TestMessage_Headers(t *testing.T) { func TestMessage_Decode(t *testing.T) { type fields struct { value []byte - fmt zfmt.Formatter + fmt kFormatter } type args struct { v any @@ -133,7 +133,7 @@ func TestMessage_Decode(t *testing.T) { name: "valid message, formatter, empty input => error", fields: fields{ value: []byte("test"), - fmt: &zfmt.StringFormatter{}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, }, args: args{}, wantErr: true, @@ -142,7 +142,7 @@ func TestMessage_Decode(t *testing.T) { name: "valid message, formatter, valid input => no error", fields: fields{ value: []byte("test"), - fmt: &zfmt.StringFormatter{}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, }, args: args{ v: &bytes.Buffer{}, @@ -194,7 +194,7 @@ func TestMessage_Done(t *testing.T) { Key: tt.fields.Key, Headers: tt.fields.Headers, value: tt.fields.value, - fmt: tt.fields.fmt, + fmt: zfmtShim{F: tt.fields.fmt}, doneFunc: func(ctx context.Context) { isCalled = true }, diff --git a/reader.go b/reader.go index 6750dc3..3ccd108 100644 --- a/reader.go +++ b/reader.go @@ -43,32 +43,59 @@ type KReader struct { topicConfig ConsumerTopicConfig isClosed bool - fmtter Formatter + formatter kFormatter + logger Logger lifecycle LifecycleHooks once sync.Once tCommitMgr *topicCommitMgr } +type readerArgs struct { + cfg Config + cCfg ConsumerTopicConfig + consumerProvider confluentConsumerProvider + f kFormatter + l Logger + prefix string + hooks LifecycleHooks + opts []ReaderOption +} + // newReader makes a new reader based on the configurations -func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string) (*KReader, error) { - confluentConfig := makeConsumerConfig(conf, topicConfig, prefix) - consumer, err := provider(confluentConfig) +func newReader(args readerArgs) (*KReader, error) { + conf := args.cfg + topicConfig := args.cCfg + prefix := args.prefix + provider := args.consumerProvider + formatter := args.f + logger := args.l + + confluentConfig, err := makeConsumerConfig(conf, topicConfig, prefix) if err != nil { return nil, err } - - fmtter, err := getFormatter(topicConfig) + consumer, err := provider(confluentConfig) if err != nil { return nil, err } - return &KReader{ + + r := &KReader{ consumer: consumer, - fmtter: fmtter, topicConfig: topicConfig, + formatter: formatter, logger: logger, + lifecycle: args.hooks, tCommitMgr: newTopicCommitMgr(), - }, nil + } + s := ReaderSettings{} + for _, opt := range args.opts { + opt(&s) + } + if s.formatter != nil { + r.formatter = s.formatter + } + return r, nil } // Read consumes a single message at a time. Blocks until a message is returned or some @@ -87,8 +114,9 @@ func (r *KReader) Read(ctx context.Context) (*Message, error) { } kmsg, err := r.consumer.ReadMessage(time.Duration(*r.topicConfig.ReadTimeoutMillis) * time.Millisecond) if err != nil { - switch v := err.(type) { - case kafka.Error: + var v kafka.Error + switch { + case errors.As(err, &v): // timeouts occur (because the assigned partitions aren't being written to, lack of activity, etc.). We'll // log them for debugging purposes if v.Code() == kafka.ErrTimedOut { @@ -198,7 +226,7 @@ func (r *KReader) mapMessage(_ context.Context, msg kafka.Message) *Message { } }, value: msg.Value, - fmt: r.fmtter, + fmt: r.formatter, } } @@ -284,14 +312,18 @@ func getTopicName(topicName *string) string { return topic } +type ReaderSettings struct { + formatter kFormatter +} + // ReaderOption is a function that modify the KReader configurations -type ReaderOption func(*KReader) +type ReaderOption func(*ReaderSettings) // RFormatterOption sets the formatter for this reader -func RFormatterOption(fmtter Formatter) ReaderOption { - return func(r *KReader) { - if fmtter != nil { - r.fmtter = fmtter +func RFormatterOption(formatter Formatter) ReaderOption { + return func(s *ReaderSettings) { + if formatter != nil { + s.formatter = zfmtShim{F: formatter} } } } diff --git a/reader_test.go b/reader_test.go index b4d536c..52bb81b 100644 --- a/reader_test.go +++ b/reader_test.go @@ -31,7 +31,14 @@ func TestReader_Read_NilReturn(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.NoError(t, err) @@ -59,7 +66,18 @@ func TestReader_Read(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + c := Client{} + f, err := c.getFormatter(formatterArgs{formatter: topicConfig.Formatter}) + require.NoError(t, err) + + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + f: f, + } + r, err := newReader(args) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.NoError(t, err) @@ -95,7 +113,14 @@ func TestReader_Read_Error(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.Error(t, err) @@ -127,7 +152,14 @@ func TestReader_Read_TimeoutError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.NoError(t, err, "expect no error to be returned on timeout") @@ -146,9 +178,15 @@ func TestReader_Read_SubscriberError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) - _, err := r.Read(context.TODO()) + _, err = r.Read(context.TODO()) require.Error(t, err, "expect an error to bubble up on Read because of subscribe error") } @@ -166,10 +204,15 @@ func TestReader_Read_CloseError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") - - err := r.Close() + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) + require.NoError(t, err) + err = r.Close() require.Error(t, err) } @@ -187,9 +230,15 @@ func TestReader_ReadWhenConnectionIsClosed(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) - err := r.Close() + err = r.Close() require.NoError(t, err) _, err = r.Read(context.TODO()) require.Error(t, err, "KReader.Read() message should return error due to connection lost") @@ -209,6 +258,7 @@ func Test_newReader(t *testing.T) { { name: "custom formatter, no error. It is implied that user will supply formatter later", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, topicConfig: ConsumerTopicConfig{ Formatter: zfmt.FormatterType("custom"), }, @@ -216,16 +266,6 @@ func Test_newReader(t *testing.T) { }, wantErr: false, }, - { - name: "invalid formatter", - args: args{ - consumeProvider: defaultConfluentConsumerProvider{}.NewConsumer, - topicConfig: ConsumerTopicConfig{ - Formatter: zfmt.FormatterType("invalid_fmt"), - }, - }, - wantErr: true, - }, { name: "valid formatter but has error when creating NewConsumer", args: args{ @@ -236,6 +276,7 @@ func Test_newReader(t *testing.T) { { name: "minimum config with formatter", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, consumeProvider: defaultConfluentConsumerProvider{}.NewConsumer, topicConfig: ConsumerTopicConfig{ Formatter: zfmt.StringFmt, @@ -247,7 +288,14 @@ func Test_newReader(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - _, err := newReader(tt.args.conf, tt.args.topicConfig, tt.args.consumeProvider, &NoopLogger{}, "") + args := readerArgs{ + cfg: tt.args.conf, + cCfg: tt.args.topicConfig, + consumerProvider: tt.args.consumeProvider, + l: &NoopLogger{}, + } + _, err := newReader(args) + if tt.wantErr { require.Error(t, err) } else { @@ -279,7 +327,15 @@ func Test_ProcessMessage(t *testing.T) { m := mockConfluentConsumerProvider{ c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) + got := r.mapMessage(context.Background(), dupMessage) require.Equal(t, got.Partition, dupMessage.TopicPartition.Partition) @@ -310,7 +366,16 @@ func Test_ProcessMultipleMessagesFromDifferentTopics_UpdatesInternalStateProperl m := mockConfluentConsumerProvider{ c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) + require.NoError(t, err) + for _, msg := range msgs { got := r.mapMessage(context.Background(), msg) require.Equal(t, got.Partition, msg.TopicPartition.Partition) @@ -350,7 +415,14 @@ func Test_ProcessMessage_StoreOffsetError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) + require.NoError(t, err) mgr := newTopicCommitMgr() cmgr := mgr.get(*dupMessage.TopicPartition.Topic) @@ -397,7 +469,15 @@ func Test_ProcessMessage_SetError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) + require.NoError(t, err) + mgr := newTopicCommitMgr() cmgr := mgr.get(*dupMessage.TopicPartition.Topic) cmgr.PushInWork(dupMessage.TopicPartition) diff --git a/schemareg.go b/schemareg.go new file mode 100644 index 0000000..dbca923 --- /dev/null +++ b/schemareg.go @@ -0,0 +1,146 @@ +package zkafka + +import ( + "errors" + "fmt" + "sync" + + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/jsonschema" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" +) + +type schemaRegistryFactory struct { + m sync.Mutex + srCls map[string]schemaregistry.Client +} + +func newSchemaRegistryFactory() *schemaRegistryFactory { + return &schemaRegistryFactory{ + srCls: make(map[string]schemaregistry.Client), + } +} + +func (c *schemaRegistryFactory) createAvro(srConfig SchemaRegistryConfig) (avroFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return avroFmt{}, err + } + + deserConfig := avrov2.NewDeserializerConfig() + deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return avroFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := avrov2.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return avroFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return avroFmt{ + ser: ser, + deser: deser, + }, nil +} + +func (c *schemaRegistryFactory) createProto(srConfig SchemaRegistryConfig) (protoFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return protoFmt{}, err + } + + deserConfig := protobuf.NewDeserializerConfig() + deser, err := protobuf.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return protoFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := protobuf.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := protobuf.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return protoFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return protoFmt{ + ser: ser, + deser: deser, + }, nil + +} + +func (c *schemaRegistryFactory) createJson(srConfig SchemaRegistryConfig) (jsonFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return jsonFmt{}, err + } + + deserConfig := jsonschema.NewDeserializerConfig() + deser, err := jsonschema.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return jsonFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := jsonschema.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := jsonschema.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return jsonFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return jsonFmt{ + ser: ser, + deser: deser, + }, nil + +} + +func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { + c.m.Lock() + defer c.m.Unlock() + + url := srConfig.URL + if url == "" { + return nil, errors.New("no schema registry url provided") + } + if srCl, ok := c.srCls[url]; ok { + return srCl, nil + } + client, err := schemaregistry.NewClient(schemaregistry.NewConfig(url)) + if err != nil { + return nil, fmt.Errorf("failed to create schema registry client: %w", err) + } + c.srCls[url] = client + return client, nil +} + +type avroFmt struct { + ser *avrov2.Serializer + deser *avrov2.Deserializer +} + +func (s avroFmt) GetID(topic string, avroSchema string) (int, error) { + return s.ser.GetID(topic, nil, &schemaregistry.SchemaInfo{Schema: avroSchema}) +} + +func (s avroFmt) Deserialize(topic string, value []byte, target any) error { + return s.deser.DeserializeInto(topic, value, target) +} + +type protoFmt struct { + ser *protobuf.Serializer + deser *protobuf.Deserializer +} + +type jsonFmt struct { + ser *jsonschema.Serializer + deser *jsonschema.Deserializer +} diff --git a/test/evolution/avro1/schema_1_gen.go b/test/evolution/avro1/schema_1_gen.go new file mode 100644 index 0000000..832022c --- /dev/null +++ b/test/evolution/avro1/schema_1_gen.go @@ -0,0 +1,29 @@ +// Code generated by avrogen. DO NOT EDIT. + +package avro1 + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/test/evolution/avro2/schema_2_gen.go b/test/evolution/avro2/schema_2_gen.go new file mode 100644 index 0000000..13d289c --- /dev/null +++ b/test/evolution/avro2/schema_2_gen.go @@ -0,0 +1,30 @@ +// Code generated by avrogen. DO NOT EDIT. + +package avro2 + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte + NewFieldWithDefault *string +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"},{"default":null,"name":"NewFieldWithDefault","type":["null","string"]}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/test/evolution/json1/schema_1.pb.go b/test/evolution/json1/schema_1.pb.go new file mode 100644 index 0000000..e7e52be --- /dev/null +++ b/test/evolution/json1/schema_1.pb.go @@ -0,0 +1,9 @@ +package json1 + +type DummyEvent struct { + IntField int64 `json:"IntField,omitempty"` + DoubleField float32 `json:"DoubleField,omitempty"` + StringField string `json:"StringField,omitempty"` + BoolField bool `json:"BoolField,omitempty"` + BytesField []byte `json:"BytesField,omitempty"` +} diff --git a/test/evolution/json2/schema_2.go b/test/evolution/json2/schema_2.go new file mode 100644 index 0000000..5d26adb --- /dev/null +++ b/test/evolution/json2/schema_2.go @@ -0,0 +1,10 @@ +package json2 + +type DummyEvent struct { + IntField int64 `json:"IntField,omitempty"` + DoubleField float32 `json:"DoubleField,omitempty"` + StringField string `json:"StringField,omitempty"` + BoolField bool `json:"BoolField,omitempty"` + BytesField []byte `json:"BytesField,omitempty"` + NewField string `json:"NewField,omitempty"` +} diff --git a/test/evolution/proto1/schema_1.pb.go b/test/evolution/proto1/schema_1.pb.go new file mode 100644 index 0000000..4538169 --- /dev/null +++ b/test/evolution/proto1/schema_1.pb.go @@ -0,0 +1,182 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v5.27.0 +// source: schema_1.proto + +package proto1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type DummyEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IntField int64 `protobuf:"varint,1,opt,name=IntField,proto3" json:"IntField,omitempty"` + DoubleField float32 `protobuf:"fixed32,2,opt,name=DoubleField,proto3" json:"DoubleField,omitempty"` + StringField string `protobuf:"bytes,3,opt,name=StringField,proto3" json:"StringField,omitempty"` + BoolField bool `protobuf:"varint,4,opt,name=BoolField,proto3" json:"BoolField,omitempty"` + BytesField []byte `protobuf:"bytes,5,opt,name=BytesField,proto3" json:"BytesField,omitempty"` +} + +func (x *DummyEvent) Reset() { + *x = DummyEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_schema_1_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DummyEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DummyEvent) ProtoMessage() {} + +func (x *DummyEvent) ProtoReflect() protoreflect.Message { + mi := &file_schema_1_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DummyEvent.ProtoReflect.Descriptor instead. +func (*DummyEvent) Descriptor() ([]byte, []int) { + return file_schema_1_proto_rawDescGZIP(), []int{0} +} + +func (x *DummyEvent) GetIntField() int64 { + if x != nil { + return x.IntField + } + return 0 +} + +func (x *DummyEvent) GetDoubleField() float32 { + if x != nil { + return x.DoubleField + } + return 0 +} + +func (x *DummyEvent) GetStringField() string { + if x != nil { + return x.StringField + } + return "" +} + +func (x *DummyEvent) GetBoolField() bool { + if x != nil { + return x.BoolField + } + return false +} + +func (x *DummyEvent) GetBytesField() []byte { + if x != nil { + return x.BytesField + } + return nil +} + +var File_schema_1_proto protoreflect.FileDescriptor + +var file_schema_1_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x09, 0x65, 0x76, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xaa, 0x01, 0x0a, 0x0a, + 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x44, 0x6f, 0x75, + 0x62, 0x6c, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x53, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x42, 0x6f, + 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x42, + 0x6f, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x09, 0x5a, 0x07, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x31, 0x2f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_schema_1_proto_rawDescOnce sync.Once + file_schema_1_proto_rawDescData = file_schema_1_proto_rawDesc +) + +func file_schema_1_proto_rawDescGZIP() []byte { + file_schema_1_proto_rawDescOnce.Do(func() { + file_schema_1_proto_rawDescData = protoimpl.X.CompressGZIP(file_schema_1_proto_rawDescData) + }) + return file_schema_1_proto_rawDescData +} + +var file_schema_1_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_schema_1_proto_goTypes = []interface{}{ + (*DummyEvent)(nil), // 0: evolution.DummyEvent +} +var file_schema_1_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_schema_1_proto_init() } +func file_schema_1_proto_init() { + if File_schema_1_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_schema_1_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DummyEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_schema_1_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_schema_1_proto_goTypes, + DependencyIndexes: file_schema_1_proto_depIdxs, + MessageInfos: file_schema_1_proto_msgTypes, + }.Build() + File_schema_1_proto = out.File + file_schema_1_proto_rawDesc = nil + file_schema_1_proto_goTypes = nil + file_schema_1_proto_depIdxs = nil +} diff --git a/test/evolution/proto2/schema_2.pb.go b/test/evolution/proto2/schema_2.pb.go new file mode 100644 index 0000000..01156d5 --- /dev/null +++ b/test/evolution/proto2/schema_2.pb.go @@ -0,0 +1,192 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v5.27.0 +// source: schema_2.proto + +package proto2 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type DummyEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IntField int64 `protobuf:"varint,1,opt,name=IntField,proto3" json:"IntField,omitempty"` + DoubleField float32 `protobuf:"fixed32,2,opt,name=DoubleField,proto3" json:"DoubleField,omitempty"` + StringField string `protobuf:"bytes,3,opt,name=StringField,proto3" json:"StringField,omitempty"` + BoolField bool `protobuf:"varint,4,opt,name=BoolField,proto3" json:"BoolField,omitempty"` + BytesField []byte `protobuf:"bytes,5,opt,name=BytesField,proto3" json:"BytesField,omitempty"` + NewField string `protobuf:"bytes,6,opt,name=NewField,proto3" json:"NewField,omitempty"` +} + +func (x *DummyEvent) Reset() { + *x = DummyEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_schema_2_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DummyEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DummyEvent) ProtoMessage() {} + +func (x *DummyEvent) ProtoReflect() protoreflect.Message { + mi := &file_schema_2_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DummyEvent.ProtoReflect.Descriptor instead. +func (*DummyEvent) Descriptor() ([]byte, []int) { + return file_schema_2_proto_rawDescGZIP(), []int{0} +} + +func (x *DummyEvent) GetIntField() int64 { + if x != nil { + return x.IntField + } + return 0 +} + +func (x *DummyEvent) GetDoubleField() float32 { + if x != nil { + return x.DoubleField + } + return 0 +} + +func (x *DummyEvent) GetStringField() string { + if x != nil { + return x.StringField + } + return "" +} + +func (x *DummyEvent) GetBoolField() bool { + if x != nil { + return x.BoolField + } + return false +} + +func (x *DummyEvent) GetBytesField() []byte { + if x != nil { + return x.BytesField + } + return nil +} + +func (x *DummyEvent) GetNewField() string { + if x != nil { + return x.NewField + } + return "" +} + +var File_schema_2_proto protoreflect.FileDescriptor + +var file_schema_2_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x32, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x09, 0x65, 0x76, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xc6, 0x01, 0x0a, 0x0a, + 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x44, 0x6f, 0x75, + 0x62, 0x6c, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x53, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x42, 0x6f, + 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x42, + 0x6f, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x4e, 0x65, 0x77, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x4e, 0x65, 0x77, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x42, 0x09, 0x5a, 0x07, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x32, 0x2f, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_schema_2_proto_rawDescOnce sync.Once + file_schema_2_proto_rawDescData = file_schema_2_proto_rawDesc +) + +func file_schema_2_proto_rawDescGZIP() []byte { + file_schema_2_proto_rawDescOnce.Do(func() { + file_schema_2_proto_rawDescData = protoimpl.X.CompressGZIP(file_schema_2_proto_rawDescData) + }) + return file_schema_2_proto_rawDescData +} + +var file_schema_2_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_schema_2_proto_goTypes = []interface{}{ + (*DummyEvent)(nil), // 0: evolution.DummyEvent +} +var file_schema_2_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_schema_2_proto_init() } +func file_schema_2_proto_init() { + if File_schema_2_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_schema_2_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DummyEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_schema_2_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_schema_2_proto_goTypes, + DependencyIndexes: file_schema_2_proto_depIdxs, + MessageInfos: file_schema_2_proto_msgTypes, + }.Build() + File_schema_2_proto = out.File + file_schema_2_proto_rawDesc = nil + file_schema_2_proto_goTypes = nil + file_schema_2_proto_depIdxs = nil +} diff --git a/test/evolution/schema_1.avsc b/test/evolution/schema_1.avsc new file mode 100644 index 0000000..03ea6e7 --- /dev/null +++ b/test/evolution/schema_1.avsc @@ -0,0 +1,11 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"} + ] +} \ No newline at end of file diff --git a/test/evolution/schema_1.proto b/test/evolution/schema_1.proto new file mode 100644 index 0000000..46f90aa --- /dev/null +++ b/test/evolution/schema_1.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package evolution; + +option go_package = "proto1/"; + +message DummyEvent { + int64 IntField = 1; + float DoubleField = 2; + string StringField = 3; + bool BoolField = 4; + bytes BytesField = 5; +} \ No newline at end of file diff --git a/test/evolution/schema_2.avsc b/test/evolution/schema_2.avsc new file mode 100644 index 0000000..8355d50 --- /dev/null +++ b/test/evolution/schema_2.avsc @@ -0,0 +1,12 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"}, + {"name": "NewFieldWithDefault", "type": ["null", "string"], "default": null } + ] +} \ No newline at end of file diff --git a/test/evolution/schema_2.proto b/test/evolution/schema_2.proto new file mode 100644 index 0000000..810d6c8 --- /dev/null +++ b/test/evolution/schema_2.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package evolution; + +option go_package = "proto2/"; + +message DummyEvent { + int64 IntField = 1; + float DoubleField = 2; + string StringField = 3; + bool BoolField = 4; + bytes BytesField = 5; + string NewField = 6; +} \ No newline at end of file diff --git a/test/integration_test.go b/test/integration_test.go index 83bc3e9..40552f4 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -1,6 +1,3 @@ -//go:build integration -// +build integration - package test import ( @@ -11,6 +8,7 @@ import ( "os" "slices" "sync" + "sync/atomic" "testing" "time" @@ -35,6 +33,8 @@ import ( // 1. Restart a consumer (being sure to reuse the same consumer group from before) // 1. Read another message. Assert its the second written message (first was already read and committed) func TestKafkaClientsCanReadOwnWritesAndBehaveProperlyAfterRestart(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() bootstrapServer := getBootstrap() @@ -166,6 +166,8 @@ func TestKafkaClientsCanReadOwnWritesAndBehaveProperlyAfterRestart(t *testing.T) // This is in response to a noted issue where rebalance was prone to replayed messages. // There are multiple versions of the tests which vary the processing duration func Test_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + type testCase struct { name string processingDuration time.Duration @@ -350,6 +352,8 @@ func Test_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { // when a consumer joins and starts consuming messages and later when another consumer joins // then there are no duplicate messages processed. func Test_WithMultipleTopics_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + type testCase struct { name string processingDuration time.Duration @@ -406,7 +410,7 @@ func Test_WithMultipleTopics_RebalanceDoesntCauseDuplicateMessages(t *testing.T) Val: "sdfds", } - t.Log("Begin writing to Test Topic") + t.Log("Begin writing to Test topic") // write N messages to topic1 msgCount := tc.messageCount for i := 0; i < msgCount; i++ { @@ -523,6 +527,8 @@ func Test_WithMultipleTopics_RebalanceDoesntCauseDuplicateMessages(t *testing.T) // The consumer's processing times are set to a range as opposed to a specific duration. This allows lookahead processing (where messages // of higher offsets are processed and completed, potentially, before lower offsets func Test_WithConcurrentProcessing_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + type testCase struct { name string processingDurationMinMillis int @@ -677,6 +683,8 @@ func Test_WithConcurrentProcessing_RebalanceDoesntCauseDuplicateMessages(t *test // The rebalances are handled during the Poll call under the hood (which is only called while a KReader is in the attempt of Reading. // So as we simulate two members of a group we'll need to keep calling from both consumers so the rebalance eventually occurs func Test_AssignmentsReflectsConsumerAssignments(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -793,6 +801,8 @@ func Test_AssignmentsReflectsConsumerAssignments(t *testing.T) { // when the second consumer joins and causes a rebalance // then the first isn't infinitely blocked in its rebalance func Test_UnfinishableWorkDoesntBlockWorkIndefinitely(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -887,6 +897,8 @@ func Test_UnfinishableWorkDoesntBlockWorkIndefinitely(t *testing.T) { // when processing that message errors and a deadletter is configured // then the errored message will be written to the dlt func Test_KafkaClientsCanWriteToTheirDeadLetterTopic(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + bootstrapServer := getBootstrap() topic := "topic1" + uuid.NewString() dlt := "deadlettertopic1" + uuid.NewString() @@ -906,6 +918,7 @@ func Test_KafkaClientsCanWriteToTheirDeadLetterTopic(t *testing.T) { Topic: topic, Formatter: zfmt.JSONFmt, }) + require.NoError(t, err) consumerTopicConfig := zkafka.ConsumerTopicConfig{ ClientID: fmt.Sprintf("worker-%s-%s", t.Name(), uuid.NewString()), @@ -979,6 +992,8 @@ func Test_KafkaClientsCanWriteToTheirDeadLetterTopic(t *testing.T) { } func Test_WorkDelay_GuaranteesProcessingDelayedAtLeastSpecifiedDelayDurationFromWhenMessageWritten(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -1085,6 +1100,8 @@ func Test_WorkDelay_GuaranteesProcessingDelayedAtLeastSpecifiedDelayDurationFrom // 2. It also asserts that the time between the first and last message is very short. // This is expected in a backlog situation, since the worker will delay once, and with monotonically increasing timestamps won't have to delay again func Test_WorkDelay_DoesntHaveDurationStackEffect(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -1185,6 +1202,184 @@ func Test_WorkDelay_DoesntHaveDurationStackEffect(t *testing.T) { require.WithinDuration(t, last.processingInstant, first.processingInstant, time.Duration(processDelayMillis/2)*time.Millisecond, "Time since first and last processed message should be very short, since processing just updates an in memory slice. This should take on the order of microseconds, but to account for scheduling drift the assertion is half the delay") } +// Test_DeadletterClientDoesntCollideWithProducer tests a common configuration scenario +// where a worker consumer has a clientID and the dead letter producer is implicitily configured +// with a clientid. For example, say the client id waw `service-x`, previously, the deadletter producer +// inherited the same clientID and would create a publisher with that name. +// The issue was if the processor was acting as a connector and published to another topic, it might be +// common to have explicitly configured a producer with the name `service-x`. This +// resulted in a collission for the cached producer clients, and the effect was that all messages +// would be written to the topic that happened to be registered in the cient cache first (this would have +// been the dead letter producer). +// +// This test shows that when a connector processes N messages half of which error (deadletter) and half of which connect +// to an egress topic, that messages end up in both targets (as opposed to exclusively in the deadletter) +func Test_DeadletterClientDoesntCollideWithProducer(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + + bootstrapServer := getBootstrap() + + topicIngress := "integration-test-topic-1" + uuid.NewString() + createTopic(t, bootstrapServer, topicIngress, 1) + topicEgress := "integration-test-topic-2" + uuid.NewString() + createTopic(t, bootstrapServer, topicEgress, 1) + topicDLT := "integration-test-topic-dlt" + uuid.NewString() + createTopic(t, bootstrapServer, topicDLT, 1) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + writer, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topicIngress, + }) + clientID1 := fmt.Sprintf("service-x-%s-%s", t.Name(), uuid.NewString()) + ingressTopicReaderConfig := zkafka.ConsumerTopicConfig{ + ClientID: clientID1, + Topic: topicIngress, + GroupID: groupID, + // deadlettertopic uses implicit clientid + DeadLetterTopicConfig: &zkafka.ProducerTopicConfig{ + Topic: topicDLT, + }, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + processorWriter, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: clientID1, + Topic: topicEgress, + }) + + // start the reader before we write messages (otherwise, since its a new consumer group, auto.offset.reset=latest will be started at an offset later than the just written messages). + // Loop in the reader until msg1 appears + msg := Msg{Val: "1"} + + // write 3 messages to ingress topic + _, err = writer.Write(ctx, msg) + require.NoError(t, err) + _, err = writer.Write(ctx, msg) + require.NoError(t, err) + _, err = writer.Write(ctx, msg) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + msgCount := atomic.Int64{} + wf := zkafka.NewWorkFactory(client, zkafka.WithWorkLifecycleHooks(zkafka.LifecycleHooks{ + PostProcessing: func(ctx context.Context, meta zkafka.LifecyclePostProcessingMeta) error { + if msgCount.Load() == 3 { + cancel() + } + + return nil + }, + })) + w := wf.CreateWithFunc(ingressTopicReaderConfig, func(ctx context.Context, msg *zkafka.Message) error { + t.Log("Processing message from ingress topic") + msgCount.Add(1) + if msgCount.Load()%2 == 0 { + return errors.New("random error occurred") + } + _, err := processorWriter.WriteRaw(ctx, nil, msg.Value()) + + return err + }) + + t.Log("Begin primary work loop") + err = w.Run(ctx, nil) + require.NoError(t, err) + t.Log("Exit primary work loop. Assess proper side effects") + + egressTopicReaderConfig := zkafka.ConsumerTopicConfig{ + ClientID: uuid.NewString(), + Topic: topicEgress, + GroupID: uuid.NewString(), + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + + ctx1, cancel1 := context.WithTimeout(context.Background(), time.Minute) + defer cancel1() + egressCount := atomic.Int64{} + w1 := wf.CreateWithFunc(egressTopicReaderConfig, func(_ context.Context, msg *zkafka.Message) error { + egressCount.Add(1) + cancel1() + return nil + }) + + dltTopicReaderConfig := zkafka.ConsumerTopicConfig{ + ClientID: uuid.NewString(), + Topic: topicDLT, + GroupID: uuid.NewString(), + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + ctx2, cancel2 := context.WithTimeout(context.Background(), time.Minute) + defer cancel2() + dltCount := atomic.Int64{} + w2 := wf.CreateWithFunc(dltTopicReaderConfig, func(_ context.Context, msg *zkafka.Message) error { + dltCount.Add(1) + cancel2() + return nil + }) + + t.Log("Start work running. Looking for egress event and DLT topic event") + require.NoError(t, w1.Run(ctx1, nil)) + require.NoError(t, w2.Run(ctx2, nil)) + require.Equal(t, int64(1), egressCount.Load(), "Expected a message to be written to the egress topic") + require.Equal(t, int64(1), dltCount.Load(), "Expected a message to be written to the DLT topic") +} + +func Test_MissingBootstrap_ShouldGiveClearError(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{}}, + zkafka.LoggerOption(stdLogger{}), + ) + defer func() { require.NoError(t, client.Close()) }() + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + ctx, cancel := context.WithTimeout(context.Background(), 100*time.Second) + defer cancel() + + var readErr error + wf := zkafka.NewWorkFactory(client, zkafka.WithLogger(stdLogger{}), + zkafka.WithWorkLifecycleHooks(zkafka.LifecycleHooks{ + PostReadImmediate: func(ctx context.Context, meta zkafka.LifecyclePostReadImmediateMeta) { + readErr = meta.Err + cancel() + }, + }), + ) + w := wf.CreateWithFunc(consumerTopicConfig, func(_ context.Context, msg *zkafka.Message) error { + return nil + }) + err := w.Run(context.Background(), ctx.Done()) + require.NoError(t, err) + require.ErrorContains(t, readErr, "invalid consumer config, missing bootstrap server addresses") +} + func createTopic(t *testing.T, bootstrapServer, topic string, partitions int) { t.Helper() aclient, err := kafka.NewAdminClient(&kafka.ConfigMap{"bootstrap.servers": bootstrapServer}) diff --git a/test/schema_registry_evo_test.go b/test/schema_registry_evo_test.go new file mode 100644 index 0000000..5a82e08 --- /dev/null +++ b/test/schema_registry_evo_test.go @@ -0,0 +1,700 @@ +// Build tag is added here because the proto evolution test creates a package loading runtime error. +// In the pipeline, this error is suppressed with an envvar. However, this repo wants to remain idiomatic +// and devs should be able to run `go test ./...` without the package loading runtime error. +//go:build evolution_test +// +build evolution_test + +package test + +import ( + "context" + _ "embed" + "fmt" + "math/rand" + "testing" + + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "github.com/zillow/zkafka" + "github.com/zillow/zkafka/test/evolution/avro1" + "github.com/zillow/zkafka/test/evolution/avro2" + "github.com/zillow/zkafka/test/evolution/json1" + "github.com/zillow/zkafka/test/evolution/json2" + "github.com/zillow/zkafka/test/evolution/proto1" + "github.com/zillow/zkafka/test/evolution/proto2" +) + +// Test_SchemaRegistryReal_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom tests +// the `avro_schema_registry` formatter which uses schema registry. +// +// two schemas exists, which are backwards compatible with one another. +// The default behavior of the confluent-kafka-go doesn't handle this well, since the new field in schema2 +// +// which has a default value and is nullable has the default value omitted in schema registration and is therefore +// +// found to be incompatible. Auto registration isn't typically used in production environments, +// but this behavior is still problematic because the implicit schema resolution is used to communicate with schema registry +// and determine the appropriate schemaID to embed. The omitted default means the correct schema isn't found. +// +// The two message successfully writing means the two schemas are registered. +// We then test we can use the confluent deserializer to decode the messages. For both schema1 and schema2. +// This confirms that backwards/forward compatible evolution is possible and old schemas can still read messages from new. +func Test_SchemaRegistryReal_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + evt2 := avro2.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewFieldWithDefault: ptr(uuid.NewString()), + } + _, err = writer2.Write(ctx, evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := avro1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) + + receivedEvt2Schema1 := avro1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := avro1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1) + + receivedEvt2Schema2 := avro2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2) +} + +func Test_SchemaRegistryReal_Proto_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + //Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + //Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + + evt1 := proto1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := proto2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := proto1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema1 := proto1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := proto1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema2 := proto2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) +} + +func Test_SchemaRegistryReal_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := json1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := json2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := json1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema1 := json1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := json1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema2 := json2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) +} + +func Test_SchemaRegistry_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + evt2 := avro2.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewFieldWithDefault: ptr(uuid.NewString()), + } + _, err = writer2.Write(ctx, evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := avro1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) + + receivedEvt2Schema1 := avro1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := avro1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1) + + receivedEvt2Schema2 := avro2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2) +} + +func Test_SchemaRegistry_Proto_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := proto1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := proto2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := proto1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema1 := proto1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := proto1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema2 := proto2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) +} + +func Test_SchemaRegistry_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := json1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := json2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := json1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema1 := json1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := json1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema2 := json2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) +} diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go new file mode 100644 index 0000000..3d4e3ca --- /dev/null +++ b/test/schema_registry_test.go @@ -0,0 +1,211 @@ +package test + +import ( + "context" + _ "embed" + "fmt" + "math/rand" + "os" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "github.com/zillow/zfmt" + "github.com/zillow/zkafka" + "github.com/zillow/zkafka/test/evolution/avro1" +) + +//go:embed evolution/schema_1.avsc +var dummyEventSchema1 string + +//go:embed evolution/schema_2.avsc +var dummyEventSchema2 string + +const enableSchemaRegistryTest = "ENABLE_SCHEMA_REGISTRY_TESTS" +const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" + +func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: false, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.ErrorContains(t, err, "failed to get avro schema by id") +} + +// Its possible not specify a schema for your producer. +// In this case, the underlying lib does +func Test_SchemaRegistry_Avro_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + // don't specify schema uses implicit handling + Schema: "", + }, + }, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.ErrorContains(t, err, "avro schema is required for schema registry formatter") +} + +// Test_SchemaNotRegistered_ResultsInWorkerDecodeError demonstrates the behavior when a worker reads +// a message for a schema that doesn't exist in shcema registry. This test shows that such a situation would result in a decode error +func Test_SchemaNotRegistered_ResultsInWorkerDecodeError(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer - no schema registration") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zfmt.AvroSchemaFmt, + SchemaID: 1, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + var gotErr error + wf := zkafka.NewWorkFactory(client) + w := wf.CreateWithFunc(consumerTopicConfig, func(_ context.Context, msg *zkafka.Message) error { + defer cancel() + gotErr = msg.Decode(&avro1.DummyEvent{}) + return gotErr + }) + + t.Log("Begin reading messages") + err = w.Run(ctx, nil) + require.NoError(t, err) + require.ErrorContains(t, gotErr, "Subject Not Found") +} + +func checkShouldSkipTest(t *testing.T, flags ...string) { + t.Helper() + for _, flag := range flags { + if os.Getenv(flag) != "true" { + t.Skipf("Skipping test. To execute. Set envvar '%s' to true", flag) + } + } +} + +func readMessages(reader zkafka.Reader, count int) (<-chan *zkafka.Message, error) { + + responses := make(chan *zkafka.Message, count) + + seen := 0 + for { + func() { + ctx := context.Background() + rmsg, err := reader.Read(ctx) + defer func() { + if rmsg == nil { + return + } + rmsg.DoneWithContext(ctx) + }() + if err != nil || rmsg == nil { + return + } + responses <- rmsg + seen++ + }() + if seen >= count { + close(responses) + return responses, nil + } + } +} diff --git a/test/worker_test.go b/test/worker_test.go index cab8e52..a16292c 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -82,8 +82,7 @@ func TestWork_Run_FailsWithLogsWhenGotNilReader(t *testing.T) { l.EXPECT().Warnw(gomock.Any(), "Kafka worker read message failed", "error", gomock.Any(), "topics", gomock.Any()).Times(1) l.EXPECT().Debugw(gomock.Any(), gomock.Any()).AnyTimes() - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(nil, nil) + kcp := zkafka.FakeClient{R: nil} ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -111,8 +110,7 @@ func TestWork_Run_FailsWithLogsForReadError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).Times(1).Return(nil, errors.New("error occurred during read")) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -139,8 +137,7 @@ func TestWork_Run_CircuitBreakerOpensOnReadError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, errors.New("error occurred during read")) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} kwf := zkafka.NewWorkFactory(kcp, zkafka.WithLogger(l)) @@ -192,8 +189,7 @@ func TestWork_Run_CircuitBreaksOnProcessError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := &fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -251,8 +247,7 @@ func TestWork_Run_DoNotSkipCircuitBreak(t *testing.T) { r.EXPECT().Read(gomock.Any()).Return(failureMessage, nil).AnyTimes() - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := &fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -314,8 +309,7 @@ func TestWork_Run_DoSkipCircuitBreak(t *testing.T) { r.EXPECT().Read(gomock.Any()).Return(failureMessage, nil).AnyTimes() - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -376,8 +370,7 @@ func TestWork_Run_CircuitBreaksOnProcessPanicInsideProcessorGoRoutine(t *testing r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := &fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -442,8 +435,7 @@ func TestWork_Run_DisabledCircuitBreakerContinueReadError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).MinTimes(4).Return(nil, errors.New("error occurred on read")) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} kwf := zkafka.NewWorkFactory(kcp, zkafka.WithLogger(l)) @@ -496,8 +488,7 @@ func TestWork_Run_SpedUpIsFaster(t *testing.T) { }).AnyTimes() mockReader.EXPECT().Close().Return(nil).AnyTimes() - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(2).Return(mockReader, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader} kwf := zkafka.NewWorkFactory(mockClientProvider, zkafka.WithLogger(zkafka.NoopLogger{})) slow := fakeProcessor{ @@ -578,8 +569,7 @@ func TestKafkaWork_ProcessorReturnsErrorIsLoggedAsWarning(t *testing.T) { }) mockReader := zkafka_mocks.NewMockReader(ctrl) mockReader.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(mockReader, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader} processor := fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -630,8 +620,7 @@ func TestKafkaWork_ProcessorTimeoutCausesContextCancellation(t *testing.T) { mockReader := zkafka_mocks.NewMockReader(ctrl) mockReader.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(mockReader, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader} wf := zkafka.NewWorkFactory(mockClientProvider, zkafka.WithLogger(l)) @@ -690,9 +679,7 @@ func TestWork_WithDeadLetterTopic_NoMessagesWrittenToDLTSinceNoErrorsOccurred(t mockWriter.EXPECT().Write(gomock.Any(), gomock.Any()).Times(0) mockWriter.EXPECT().Close().AnyTimes() - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(mockReader, nil) - mockClientProvider.EXPECT().Writer(gomock.Any(), gomock.Any()).Times(2).Return(mockWriter, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader, W: mockWriter} kwf := zkafka.NewWorkFactory(mockClientProvider, zkafka.WithLogger(l)) @@ -1162,8 +1149,7 @@ func TestWork_Run_OnDoneCallbackCalledOnProcessorError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} kwf := zkafka.NewWorkFactory(kcp, zkafka.WithLogger(l)) @@ -1224,8 +1210,7 @@ func TestWork_Run_WritesMetrics(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).MinTimes(1).Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ @@ -1286,8 +1271,7 @@ func TestWork_LifecycleHooksCalledForEachItem_Reader(t *testing.T) { r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, nil), ) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ @@ -1349,8 +1333,7 @@ func TestWork_LifecycleHooksPostReadCanUpdateContext(t *testing.T) { r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, nil), ) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ @@ -1410,8 +1393,7 @@ func TestWork_LifecycleHooksPostReadErrorDoesntHaltProcessing(t *testing.T) { r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, nil), ) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ diff --git a/testhelper.go b/testhelper.go index 59c57eb..a65e6b4 100644 --- a/testhelper.go +++ b/testhelper.go @@ -45,47 +45,69 @@ type FakeMessage struct { } // GetMsgFromFake allows the construction of a Message object (allowing the specification of some private fields). -func GetMsgFromFake(msg *FakeMessage) *Message { - if msg == nil { +func GetMsgFromFake(input *FakeMessage) *Message { + if input == nil { return nil } key := "" - if msg.Key != nil { - key = *msg.Key + if input.Key != nil { + key = *input.Key } timeStamp := time.Now() - if !msg.TimeStamp.IsZero() { - timeStamp = msg.TimeStamp + if !input.TimeStamp.IsZero() { + timeStamp = input.TimeStamp } doneFunc := func(ctx context.Context) {} - if msg.DoneFunc != nil { - doneFunc = msg.DoneFunc + if input.DoneFunc != nil { + doneFunc = input.DoneFunc } var val []byte - if msg.Value != nil { - val = msg.Value + if input.Value != nil { + val = input.Value } - if msg.ValueData != nil { + if input.ValueData != nil { //nolint:errcheck // To simplify this helper function's api, we'll suppress marshalling errors. - val, _ = msg.Fmt.Marshall(msg.ValueData) + val, _ = input.Fmt.Marshall(input.ValueData) } return &Message{ Key: key, - isKeyNil: msg.Key == nil, - Headers: msg.Headers, - Offset: msg.Offset, - Partition: msg.Partition, - Topic: msg.Topic, - GroupID: msg.GroupID, + isKeyNil: input.Key == nil, + Headers: input.Headers, + Offset: input.Offset, + Partition: input.Partition, + Topic: input.Topic, + GroupID: input.GroupID, TimeStamp: timeStamp, value: val, topicPartition: kafka.TopicPartition{ - Topic: &msg.Topic, - Partition: msg.Partition, - Offset: kafka.Offset(msg.Offset), + Topic: &input.Topic, + Partition: input.Partition, + Offset: kafka.Offset(input.Offset), }, - fmt: msg.Fmt, + fmt: zfmtShim{F: input.Fmt}, doneFunc: doneFunc, doneOnce: sync.Once{}, } } + +var _ ClientProvider = (*FakeClient)(nil) + +// FakeClient is a convenience struct for testing purposes. +// It allows the specification of your own Reader/Writer while implementing the `ClientProvider` interface, +// which makes it compatible with a work factory. +type FakeClient struct { + R Reader + W Writer +} + +func (f FakeClient) Reader(_ context.Context, _ ConsumerTopicConfig, _ ...ReaderOption) (Reader, error) { + return f.R, nil +} + +func (f FakeClient) Writer(_ context.Context, _ ProducerTopicConfig, _ ...WriterOption) (Writer, error) { + return f.W, nil +} + +func (f FakeClient) Close() error { + return nil +} diff --git a/work.go b/work.go index 1a5de2a..cf08523 100644 --- a/work.go +++ b/work.go @@ -172,7 +172,7 @@ func (w *Work) execProcessors(ctx context.Context, shutdown <-chan struct{}) { // initiateProcessors creates a buffered channel for each virtual partition, of size poolSize. That way // a particular virtual partition never blocks because of its own capacity issue (and instead the goroutinepool is used -// to limit indefinte growth of processing goroutines). +// to limit indefinite growth of processing goroutines). func (w *Work) initiateProcessors(_ context.Context) { poolSize := w.getPoolSize() w.virtualPartitions = make([]chan workUnit, poolSize) @@ -204,6 +204,14 @@ func (w *Work) fanOut(ctx context.Context, shutdown <-chan struct{}) { return } msg, err := w.readMessage(ctx, shutdown) + + if w.lifecycle.PostReadImmediate != nil { + w.lifecycle.PostReadImmediate(ctx, LifecyclePostReadImmediateMeta{ + Message: msg, + Err: err, + }) + } + if err != nil { w.logger.Warnw(ctx, "Kafka worker read message failed", "error", err, @@ -645,6 +653,12 @@ func NewWorkFactory( return factory } +// CreateWithFunc creates a new Work instance, but allows for the processor to be specified as a callback function +// instead of an interface +func (f WorkFactory) CreateWithFunc(topicConfig ConsumerTopicConfig, p func(_ context.Context, msg *Message) error, options ...WorkOption) *Work { + return f.Create(topicConfig, processorAdapter{p: p}, options...) +} + // Create creates a new Work instance. func (f WorkFactory) Create(topicConfig ConsumerTopicConfig, processor processor, options ...WorkOption) *Work { work := &Work{ @@ -825,3 +839,13 @@ func (c *delayCalculator) remaining(targetDelay time.Duration, msgTimeStamp time // this piece makes sure the return isn't possibly greater than the target return min(targetDelay-observedDelay, targetDelay) } + +var _ processor = (*processorAdapter)(nil) + +type processorAdapter struct { + p func(_ context.Context, _ *Message) error +} + +func (a processorAdapter) Process(ctx context.Context, message *Message) error { + return a.p(ctx, message) +} diff --git a/work_test.go b/work_test.go index 9eb7024..1dfc6b5 100644 --- a/work_test.go +++ b/work_test.go @@ -71,7 +71,7 @@ func TestWork_WithOptions(t *testing.T) { tp := noop.TracerProvider{} propagator := propagation.TraceContext{} - wf := NewWorkFactory(mockClientProvider{}, WithTracerProvider(tp), WithTextMapPropagator(propagator)) + wf := NewWorkFactory(FakeClient{}, WithTracerProvider(tp), WithTextMapPropagator(propagator)) work := wf.Create(ConsumerTopicConfig{}, &timeDelayProcessor{}) @@ -741,7 +741,7 @@ func Fuzz_AnySpeedupInputAlwaysCreatesABufferedChannel(f *testing.F) { f.Add(uint16(9)) f.Fuzz(func(t *testing.T, speedup uint16) { - wf := NewWorkFactory(mockClientProvider{}) + wf := NewWorkFactory(FakeClient{}) p := timeDelayProcessor{} w := wf.Create(ConsumerTopicConfig{}, &p, Speedup(speedup)) require.Greater(t, cap(w.messageBuffer), 0) @@ -783,20 +783,6 @@ func (m *timeDelayProcessor) Process(_ context.Context, message *Message) error return nil } -type mockClientProvider struct{} - -func (mockClientProvider) Reader(ctx context.Context, topicConfig ConsumerTopicConfig, opts ...ReaderOption) (Reader, error) { - return nil, nil -} - -func (mockClientProvider) Writer(ctx context.Context, topicConfig ProducerTopicConfig, opts ...WriterOption) (Writer, error) { - return nil, nil -} - -func (mockClientProvider) Close() error { - return nil -} - func assertContains(t *testing.T, wantIn kafka.TopicPartition, options []kafka.TopicPartition) { t.Helper() for _, want := range options { @@ -815,3 +801,14 @@ type workSettings struct { func (w *workSettings) ShutdownSig() <-chan struct{} { return w.shutdownSig } + +type fakeProcessor struct { + process func(context.Context, *Message) error +} + +func (p *fakeProcessor) Process(ctx context.Context, msg *Message) error { + if p.process != nil { + return p.process(ctx, msg) + } + return nil +} diff --git a/writer.go b/writer.go index 3558f91..4b706a1 100644 --- a/writer.go +++ b/writer.go @@ -46,7 +46,7 @@ type KWriter struct { mu sync.Mutex producer KafkaProducer topicConfig ProducerTopicConfig - fmtter Formatter + formatter kFormatter logger Logger tracer trace.Tracer p propagation.TextMapPropagator @@ -59,22 +59,51 @@ type keyValuePair struct { value any } -func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentProducerProvider) (*KWriter, error) { - confluentConfig := makeProducerConfig(conf, topicConfig) - p, err := producer(confluentConfig) +type writerArgs struct { + cfg Config + pCfg ProducerTopicConfig + producerProvider confluentProducerProvider + f kFormatter + l Logger + t trace.Tracer + p propagation.TextMapPropagator + hooks LifecycleHooks + opts []WriterOption +} + +func newWriter(args writerArgs) (*KWriter, error) { + conf := args.cfg + topicConfig := args.pCfg + producer := args.producerProvider + formatter := args.f + + confluentConfig, err := makeProducerConfig(conf, topicConfig) if err != nil { return nil, err } - fmtter, err := getFormatter(topicConfig) + + p, err := producer(confluentConfig) if err != nil { return nil, err } - return &KWriter{ + + w := &KWriter{ producer: p, - fmtter: fmtter, topicConfig: topicConfig, - logger: NoopLogger{}, - }, nil + formatter: formatter, + logger: args.l, + tracer: args.t, + p: args.p, + lifecycle: args.hooks, + } + s := WriterSettings{} + for _, opt := range args.opts { + opt(&s) + } + if s.f != nil { + w.formatter = s.f + } + return w, nil } // Write sends messages to kafka with message key set as nil. @@ -191,17 +220,25 @@ func (w *KWriter) startSpan(ctx context.Context, msg *kafka.Message) spanWrapper } func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOption) (Response, error) { - if w.fmtter == nil { - return Response{}, errors.New("formatter is not supplied to produce kafka message") - } - value, err := w.fmtter.Marshall(msg.value) + value, err := w.marshall(ctx, msg.value, w.topicConfig.SchemaRegistry.Serialization.Schema) if err != nil { - return Response{}, fmt.Errorf("failed to marshall producer message: %w", err) + return Response{}, err } return w.WriteRaw(ctx, msg.key, value, opts...) } +func (w *KWriter) marshall(_ context.Context, value any, schema string) ([]byte, error) { + if w.formatter == nil { + return nil, errors.New("formatter or confluent formatter is not supplied to produce kafka message") + } + return w.formatter.marshall(marshReq{ + topic: w.topicConfig.Topic, + subject: value, + schema: schema, + }) +} + // Close terminates the writer gracefully and mark it as closed func (w *KWriter) Close() { w.mu.Lock() @@ -210,14 +247,18 @@ func (w *KWriter) Close() { w.isClosed = true } +type WriterSettings struct { + f kFormatter +} + // WriterOption is a function that modify the writer configurations -type WriterOption func(*KWriter) +type WriterOption func(*WriterSettings) // WFormatterOption sets the formatter for this writer -func WFormatterOption(fmtter Formatter) WriterOption { - return func(w *KWriter) { - if fmtter != nil { - w.fmtter = fmtter +func WFormatterOption(f Formatter) WriterOption { + return func(s *WriterSettings) { + if f != nil { + s.f = zfmtShim{F: f} } } } diff --git a/writer_test.go b/writer_test.go index 0fc56b7..dd56bb1 100644 --- a/writer_test.go +++ b/writer_test.go @@ -36,7 +36,7 @@ func TestWriter_Write(t *testing.T) { type fields struct { Mutex *sync.Mutex Producer KafkaProducer - fmt zfmt.Formatter + fmt kFormatter } type args struct { ctx context.Context @@ -50,8 +50,10 @@ func TestWriter_Write(t *testing.T) { wantErr bool }{ { - name: "formatter check at minimum", - fields: fields{}, + name: "formatter check at minimum", + fields: fields{ + fmt: nil, + }, args: args{ctx: context.TODO(), value: "1"}, want: Response{Partition: 0, Offset: 0}, wantErr: true, @@ -59,7 +61,7 @@ func TestWriter_Write(t *testing.T) { { name: "has formatter and producer", fields: fields{ - fmt: &zfmt.StringFormatter{}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, Producer: p, }, args: args{ctx: context.TODO(), value: "1"}, @@ -68,7 +70,7 @@ func TestWriter_Write(t *testing.T) { { name: "has formatter, producer, incompatible message type", fields: fields{ - fmt: &zfmt.StringFormatter{}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, Producer: p, }, args: args{ctx: context.TODO(), value: 5}, @@ -81,11 +83,11 @@ func TestWriter_Write(t *testing.T) { defer recoverThenFail(t) w := &KWriter{ - producer: tt.fields.Producer, - fmtter: tt.fields.fmt, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, + producer: tt.fields.Producer, + formatter: tt.fields.fmt, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, } got, err := w.Write(tt.args.ctx, tt.args.value) if tt.wantErr { @@ -162,7 +164,7 @@ func TestWriter_WriteKey(t *testing.T) { w := &KWriter{ producer: tt.fields.Producer, topicConfig: tt.fields.conf, - fmtter: tt.fields.fmt, + formatter: zfmtShim{tt.fields.fmt}, isClosed: tt.fields.isClosed, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), @@ -200,7 +202,7 @@ func TestWriter_WriteKeyReturnsImmediateError(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{}, isClosed: false, - fmtter: &zfmt.JSONFormatter{}, + formatter: zfmtShim{&zfmt.JSONFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -238,7 +240,7 @@ func TestWriter_WritesMetrics(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: &zfmt.StringFormatter{}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -301,11 +303,11 @@ func TestWriter_WriteSpecialCase(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { w := &KWriter{ - producer: tt.fields.Producer, - fmtter: tt.fields.fmt, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, + producer: tt.fields.Producer, + formatter: zfmtShim{tt.fields.fmt}, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, } got, err := w.Write(tt.args.ctx, tt.args.value) if tt.wantErr { @@ -346,7 +348,7 @@ func TestWriter_PreWriteLifecycleHookCanAugmentHeaders(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: &zfmt.StringFormatter{}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -375,7 +377,7 @@ func TestWriter_WithHeadersWriteOptionCanAugmentHeaders(t *testing.T) { wr := &KWriter{ producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, - fmtter: &zfmt.StringFormatter{}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -433,7 +435,7 @@ func TestWriter_PreWriteLifecycleHookErrorDoesntHaltProcessing(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: &zfmt.StringFormatter{}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -504,6 +506,7 @@ func Test_newWriter(t *testing.T) { { name: "custom formatter, no error. It is implied that user will supply formatter later", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, topicConfig: ProducerTopicConfig{ Formatter: zfmt.FormatterType("custom"), }, @@ -511,16 +514,16 @@ func Test_newWriter(t *testing.T) { }, wantErr: false, }, - { - name: "invalid formatter", - args: args{ - producerP: defaultConfluentProducerProvider{}.NewProducer, - topicConfig: ProducerTopicConfig{ - Formatter: zfmt.FormatterType("invalid_fmt"), - }, - }, - wantErr: true, - }, + //{ + // name: "invalid formatter", + // args: args{ + // producerP: defaultConfluentProducerProvider{}.NewProducer, + // topicConfig: ProducerTopicConfig{ + // Formatter: zfmt.FormatterType("invalid_fmt"), + // }, + // }, + // wantErr: true, + //}, { name: "valid formatter but has error from confluent producer constructor", args: args{ @@ -529,8 +532,10 @@ func Test_newWriter(t *testing.T) { wantErr: true, }, { + name: "minimum config with formatter", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, producerP: defaultConfluentProducerProvider{}.NewProducer, topicConfig: ProducerTopicConfig{ Formatter: zfmt.StringFmt, @@ -542,7 +547,12 @@ func Test_newWriter(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { recoverThenFail(t) - w, err := newWriter(tt.args.conf, tt.args.topicConfig, tt.args.producerP) + args := writerArgs{ + cfg: tt.args.conf, + pCfg: tt.args.topicConfig, + producerProvider: tt.args.producerP, + } + w, err := newWriter(args) if tt.wantErr { require.Error(t, err, "expected error for newWriter()") } else { @@ -556,10 +566,11 @@ func Test_newWriter(t *testing.T) { func TestWriter_WithOptions(t *testing.T) { recoverThenFail(t) w := &KWriter{} - require.Nil(t, w.fmtter, "expected nil formatter") + require.Nil(t, w.formatter, "expected nil formatter") - WFormatterOption(&zfmt.StringFormatter{})(w) - require.NotNil(t, w.fmtter, "expected non-nil formatter") + settings := WriterSettings{} + WFormatterOption(&zfmt.StringFormatter{})(&settings) + require.NotNil(t, settings.f, "expected non-nil formatter") } func Test_writeAttributeCarrier_Set(t *testing.T) {