From c38e50956406b8abb34df51e8626cf27c65ac8be Mon Sep 17 00:00:00 2001 From: Jerome Bidault Date: Sat, 23 Sep 2023 18:03:05 +0700 Subject: [PATCH 1/2] Implemented recordNameStrategy for protobuf, JSON, and Avro in Schemaregistry --- examples/schemaregistry_example/avro/Makefile | 7 + .../avro/recordNameStrategyGenericAvro.go | 297 +++++++++++++++ .../avro/recordNameStrategySpecificAvro.go | 283 +++++++++++++++ .../schemaregistry_example/avro/schemas.avsc | 32 ++ .../avro/schemas/address.go | 173 +++++++++ .../avro/schemas/person.go | 173 +++++++++ .../avro/schemas/union_person_address.go | 145 ++++++++ .../avro/topicNameStrategyGenericAvro.go | 262 +++++++++++++ .../avro/topicNameStrategySpecificAvro.go | 258 +++++++++++++ .../docker-compose.yaml | 84 +++++ .../json/recordNameStrategy.go | 343 ++++++++++++++++++ .../json/topicNameStrategy.go | 261 +++++++++++++ .../protobuf/api/v1/proto/Address.pb.go | 152 ++++++++ .../protobuf/api/v1/proto/Address.proto | 10 + .../protobuf/api/v1/proto/Person.pb.go | 254 +++++++++++++ .../protobuf/api/v1/proto/Person.proto | 20 + .../protobuf/recordNameStrategy.go | 294 +++++++++++++++ .../protobuf/topicNameStrategy.go | 245 +++++++++++++ go.mod | 2 + go.sum | 5 + schemaregistry/mock_schemaregistry_client.go | 132 ++++++- schemaregistry/schemaregistry_client.go | 136 +++++-- schemaregistry/serde/avro/avro_generic.go | 190 ++++++++++ .../serde/avro/avro_generic_test.go | 306 ++++++++++++++++ schemaregistry/serde/avro/avro_specific.go | 244 +++++++++++++ .../serde/avro/avro_specific_test.go | 291 +++++++++++++++ schemaregistry/serde/config.go | 14 + .../serde/jsonschema/json_schema.go | 250 ++++++++++++- .../serde/jsonschema/json_schema_test.go | 313 ++++++++++++++++ schemaregistry/serde/protobuf/protobuf.go | 169 ++++++++- .../serde/protobuf/protobuf_test.go | 343 ++++++++++++++++++ schemaregistry/serde/serde.go | 61 +++- .../test/avro/recordname/advanced.go | 249 +++++++++++++ .../test/avro/recordname/basic_person.go | 182 ++++++++++ schemaregistry/test/avro/recordname/bytes.go | 86 +++++ .../test/avro/recordname/demo_schema.go | 260 +++++++++++++ .../test/avro/recordname/map_basic_person.go | 71 ++++ .../test/avro/recordname/union_long_null.go | 142 ++++++++ .../test/avro/recordname/union_string.go | 130 +++++++ .../test/proto/recordname/cycle.pb.go | 155 ++++++++ .../test/proto/recordname/cycle.proto | 9 + .../test/proto/recordname/example.pb.go | 233 ++++++++++++ .../test/proto/recordname/example.proto | 15 + 43 files changed, 7197 insertions(+), 84 deletions(-) create mode 100644 examples/schemaregistry_example/avro/Makefile create mode 100644 examples/schemaregistry_example/avro/recordNameStrategyGenericAvro.go create mode 100644 examples/schemaregistry_example/avro/recordNameStrategySpecificAvro.go create mode 100644 examples/schemaregistry_example/avro/schemas.avsc create mode 100644 examples/schemaregistry_example/avro/schemas/address.go create mode 100644 examples/schemaregistry_example/avro/schemas/person.go create mode 100644 examples/schemaregistry_example/avro/schemas/union_person_address.go create mode 100644 examples/schemaregistry_example/avro/topicNameStrategyGenericAvro.go create mode 100644 examples/schemaregistry_example/avro/topicNameStrategySpecificAvro.go create mode 100644 examples/schemaregistry_example/docker-compose.yaml create mode 100644 examples/schemaregistry_example/json/recordNameStrategy.go create mode 100644 examples/schemaregistry_example/json/topicNameStrategy.go create mode 100644 examples/schemaregistry_example/protobuf/api/v1/proto/Address.pb.go create mode 100644 examples/schemaregistry_example/protobuf/api/v1/proto/Address.proto create mode 100644 examples/schemaregistry_example/protobuf/api/v1/proto/Person.pb.go create mode 100644 examples/schemaregistry_example/protobuf/api/v1/proto/Person.proto create mode 100644 examples/schemaregistry_example/protobuf/recordNameStrategy.go create mode 100644 examples/schemaregistry_example/protobuf/topicNameStrategy.go create mode 100644 schemaregistry/test/avro/recordname/advanced.go create mode 100644 schemaregistry/test/avro/recordname/basic_person.go create mode 100644 schemaregistry/test/avro/recordname/bytes.go create mode 100644 schemaregistry/test/avro/recordname/demo_schema.go create mode 100644 schemaregistry/test/avro/recordname/map_basic_person.go create mode 100644 schemaregistry/test/avro/recordname/union_long_null.go create mode 100644 schemaregistry/test/avro/recordname/union_string.go create mode 100644 schemaregistry/test/proto/recordname/cycle.pb.go create mode 100644 schemaregistry/test/proto/recordname/cycle.proto create mode 100644 schemaregistry/test/proto/recordname/example.pb.go create mode 100644 schemaregistry/test/proto/recordname/example.proto diff --git a/examples/schemaregistry_example/avro/Makefile b/examples/schemaregistry_example/avro/Makefile new file mode 100644 index 000000000..5f4d7c079 --- /dev/null +++ b/examples/schemaregistry_example/avro/Makefile @@ -0,0 +1,7 @@ +SCHEMA_PATH = $(shell pwd)/schemas +SCHEMA_FILE = schemas.avsc +SCHEMAS = schemas + +.PHONY: compile +compile: + gogen-avro --containers=false --sources-comment=false --short-unions=false --package=${SCHEMAS} ${SCHEMA_PATH} ${SCHEMA_FILE} diff --git a/examples/schemaregistry_example/avro/recordNameStrategyGenericAvro.go b/examples/schemaregistry_example/avro/recordNameStrategyGenericAvro.go new file mode 100644 index 000000000..0aca0db63 --- /dev/null +++ b/examples/schemaregistry_example/avro/recordNameStrategyGenericAvro.go @@ -0,0 +1,297 @@ +package main + +import ( + "fmt" + "os" + "reflect" + "strings" + + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +type Person struct { + Name string `avro:"name"` + Age int `avro:"age"` +} + +type Address struct { + Street string `avro:"street"` + City string `avro:"city"` +} + +// func init() { +// compiler.LoggingEnabled = true +// } + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := Person{ + Name: "robert", + Age: 23, + } + + addr := Address{ + Street: "rue de la soif", + City: "Rennes", + } + + for { + offset, err := producer.ProduceMessage(msg, topic, reflect.TypeOf(msg).String()) + if err != nil { + log.Println("Error producing Message: ", err) + } + + offset, err = producer.ProduceMessage(addr, topic, reflect.TypeOf(addr).String()) + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg interface{}, topic, subject string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := avro.NewGenericSerializer(c, serde.ValueSerde, avro.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg interface{}, topic, subject string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + payload, err := p.serializer.SerializeRecordName(msg) + // or payload, err := p.serializer.SerializeRecordName(msg, subject) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + err = consumer.Run(topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *avro.GenericDeserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := avro.NewGenericDeserializer(sr, serde.ValueSerde, avro.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// RegisterMessageFactory Pass a pointer to the receiver object for the SR to unmarshal the payload into +func (c *srConsumer) RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case "main.Person": + return &Person{}, nil + case "main.Address": + return &Address{}, nil + } + return nil, fmt.Errorf("Err RegisterMessageFactory") + // return receiver, nil + } +} + +// func (c *srConsumer) RegisterMessageFactoryWithMap(subjectTypes map[string]interface{}) func(string, string) (interface{}, error) { +// return func(subject string, name string) (interface{}, error) { +// if tp, ok := subjectTypes[subject]; !ok { +// return nil, errors.New("Invalid receiver") +// } else { +// return tp, nil +// } +// } +// } + +// Run consumer +// func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { +func (c *srConsumer) Run(topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + // case DeserializeRecordName(facultatif) + c.deserializer.MessageFactory = c.RegisterMessageFactory() + + // case DeserializeIntoRecordName(no need RegisterMessageFactory) + ref := make(map[string]interface{}) + px := Person{} + addr := Address{} + // avro schema does not have a 'namespace' in this case SR default to Go namespace + msgFQN := reflect.TypeOf(px).String() + addrFQN := reflect.TypeOf(addr).String() + ref[msgFQN] = &px + ref[addrFQN] = &addr + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // get a msg of type interface{} + msg, err := c.deserializer.DeserializeRecordName(kafkaMsg.Value) + if err != nil { + return err + } + if _, ok := msg.(*Person); ok { + fmt.Println("Person: ", msg.(*Person).Name, " - ", msg.(*Person).Age) + } else { + fmt.Println("Address: ", msg.(*Address).City, " - ", msg.(*Address).Street) + } + // c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // // use deserializer.DeserializeInto to get a struct back + // err = c.deserializer.DeserializeIntoRecordName(ref, kafkaMsg.Value) + // if err != nil { + // return err + // } + // fmt.Println("See the Person struct: ", px.Name, " - ", px.Age) + // fmt.Println("See the Address struct: ", addr.Street, " - ", addr.City) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) + +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/avro/recordNameStrategySpecificAvro.go b/examples/schemaregistry_example/avro/recordNameStrategySpecificAvro.go new file mode 100644 index 000000000..6ef7e7cab --- /dev/null +++ b/examples/schemaregistry_example/avro/recordNameStrategySpecificAvro.go @@ -0,0 +1,283 @@ +package main + +import ( + "fmt" + "os" + "strings" + + avSch "avroexample/schemas" + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := &avSch.Person{ + Name: "robert", + Age: 23, + } + + addr := &avSch.Address{ + Street: "rue de la soif", + City: "Rennes", + } + + for { + offset, err := producer.ProduceMessage(msg, topic, "personrecord.Person") + if err != nil { + log.Println("Error producing Message: ", err) + } + + offset, err = producer.ProduceMessage(addr, topic, "addressrecord.Address") + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg interface{}, topic, subject string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := avro.NewSpecificSerializer(c, serde.ValueSerde, avro.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg interface{}, topic, subject string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + payload, err := p.serializer.SerializeRecordName(msg, subject) + // or payload, err := p.serializer.SerializeRecordName(msg) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + err = consumer.Run(topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *avro.SpecificDeserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := avro.NewSpecificDeserializer(sr, serde.ValueSerde, avro.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// RegisterMessageFactory Pass a pointer to the receiver object for the SR to unmarshal the payload into +func (c *srConsumer) RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case "personrecord.Person": + return &avSch.Person{}, nil + case "addressrecord.Address": + return &avSch.Address{}, nil + } + return nil, fmt.Errorf("Err RegisterMessageFactory") + // return receiver, nil + } +} + +// // NOTE doing like so make sure the event subject match the expected receiver's subject +// func (c *srConsumer) RegisterMessageFactoryWithMap(subjectTypes map[string]interface{}) func(string, string) (interface{}, error) { +// return func(subject string, name string) (interface{}, error) { +// if tp, ok := subjectTypes[name]; !ok { +// return nil, errors.New("Invalid receiver") +// } else { +// return tp, nil +// } +// } +// } + +// Run consumer +// func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { +func (c *srConsumer) Run(topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + // case DeserializeRecordName + // c.deserializer.MessageFactory = c.RegisterMessageFactory() + + // case DeserializeIntoRecordName(no need RegisterMessageFactory) + ref := make(map[string]interface{}) + px := avSch.Person{} + addr := avSch.Address{} + msgFQN := "personrecord.Person" + addrFQN := "addressrecord.Address" + ref[msgFQN] = &px + ref[addrFQN] = &addr + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // get a msg of type interface{} + msg, err := c.deserializer.DeserializeRecordName(kafkaMsg.Value) + if err != nil { + return err + } + // if _, ok := msg.(*avSch.Person); ok { + // fmt.Println("Person: ", msg.(*avSch.Person).Name, " - ", msg.(*avSch.Person).Age) + // } else { + // fmt.Println("Address: ", msg.(*avSch.Address).City, " - ", msg.(*avSch.Address).Street) + // } + c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // // use deserializer.DeserializeInto to get a struct back + // err = c.deserializer.DeserializeIntoRecordName(ref, kafkaMsg.Value) + // if err != nil { + // return err + // } + // fmt.Println("See the Person struct: ", px.Name, " - ", px.Age) + // fmt.Println("See the Address struct: ", addr.Street, " - ", addr.City) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) + +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/avro/schemas.avsc b/examples/schemaregistry_example/avro/schemas.avsc new file mode 100644 index 000000000..e6de3cb50 --- /dev/null +++ b/examples/schemaregistry_example/avro/schemas.avsc @@ -0,0 +1,32 @@ +[ + { + "type": "record", + "name": "Person", + "namespace": "personrecord", + "fields": [ + { + "name": "name", + "type": "string" + }, + { + "name": "age", + "type": "int" + } + ] + }, + { + "type": "record", + "name": "Address", + "namespace": "addressrecord", + "fields": [ + { + "name": "street", + "type": "string" + }, + { + "name": "city", + "type": "string" + } + ] + } +] diff --git a/examples/schemaregistry_example/avro/schemas/address.go b/examples/schemaregistry_example/avro/schemas/address.go new file mode 100644 index 000000000..aba425c04 --- /dev/null +++ b/examples/schemaregistry_example/avro/schemas/address.go @@ -0,0 +1,173 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package schemas + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +var _ = fmt.Printf + +type Address struct { + Street string `json:"street"` + + City string `json:"city"` +} + +const AddressAvroCRC64Fingerprint = "\xefy\xd9\xf4\xd3\xeb.Y" + +func NewAddress() Address { + r := Address{} + return r +} + +func DeserializeAddress(r io.Reader) (Address, error) { + t := NewAddress() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func DeserializeAddressFromSchema(r io.Reader, schema string) (Address, error) { + t := NewAddress() + + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func writeAddress(r Address, w io.Writer) error { + var err error + err = vm.WriteString(r.Street, w) + if err != nil { + return err + } + err = vm.WriteString(r.City, w) + if err != nil { + return err + } + return err +} + +func (r Address) Serialize(w io.Writer) error { + return writeAddress(r, w) +} + +func (r Address) Schema() string { + return "{\"fields\":[{\"name\":\"street\",\"type\":\"string\"},{\"name\":\"city\",\"type\":\"string\"}],\"name\":\"addressrecord.Address\",\"type\":\"record\"}" +} + +func (r Address) SchemaName() string { + return "addressrecord.Address" +} + +func (_ Address) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ Address) SetInt(v int32) { panic("Unsupported operation") } +func (_ Address) SetLong(v int64) { panic("Unsupported operation") } +func (_ Address) SetFloat(v float32) { panic("Unsupported operation") } +func (_ Address) SetDouble(v float64) { panic("Unsupported operation") } +func (_ Address) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ Address) SetString(v string) { panic("Unsupported operation") } +func (_ Address) SetUnionElem(v int64) { panic("Unsupported operation") } + +func (r *Address) Get(i int) types.Field { + switch i { + case 0: + w := types.String{Target: &r.Street} + + return w + + case 1: + w := types.String{Target: &r.City} + + return w + + } + panic("Unknown field index") +} + +func (r *Address) SetDefault(i int) { + switch i { + } + panic("Unknown field index") +} + +func (r *Address) NullField(i int) { + switch i { + } + panic("Not a nullable field index") +} + +func (_ Address) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ Address) AppendArray() types.Field { panic("Unsupported operation") } +func (_ Address) HintSize(int) { panic("Unsupported operation") } +func (_ Address) Finalize() {} + +func (_ Address) AvroCRC64Fingerprint() []byte { + return []byte(AddressAvroCRC64Fingerprint) +} + +func (r Address) MarshalJSON() ([]byte, error) { + var err error + output := make(map[string]json.RawMessage) + output["street"], err = json.Marshal(r.Street) + if err != nil { + return nil, err + } + output["city"], err = json.Marshal(r.City) + if err != nil { + return nil, err + } + return json.Marshal(output) +} + +func (r *Address) UnmarshalJSON(data []byte) error { + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + + var val json.RawMessage + val = func() json.RawMessage { + if v, ok := fields["street"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Street); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for street") + } + val = func() json.RawMessage { + if v, ok := fields["city"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.City); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for city") + } + return nil +} diff --git a/examples/schemaregistry_example/avro/schemas/person.go b/examples/schemaregistry_example/avro/schemas/person.go new file mode 100644 index 000000000..bc730a600 --- /dev/null +++ b/examples/schemaregistry_example/avro/schemas/person.go @@ -0,0 +1,173 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package schemas + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +var _ = fmt.Printf + +type Person struct { + Name string `json:"name"` + + Age int32 `json:"age"` +} + +const PersonAvroCRC64Fingerprint = "\xd3\x16^\a3\xbeM\xd7" + +func NewPerson() Person { + r := Person{} + return r +} + +func DeserializePerson(r io.Reader) (Person, error) { + t := NewPerson() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func DeserializePersonFromSchema(r io.Reader, schema string) (Person, error) { + t := NewPerson() + + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func writePerson(r Person, w io.Writer) error { + var err error + err = vm.WriteString(r.Name, w) + if err != nil { + return err + } + err = vm.WriteInt(r.Age, w) + if err != nil { + return err + } + return err +} + +func (r Person) Serialize(w io.Writer) error { + return writePerson(r, w) +} + +func (r Person) Schema() string { + return "{\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"age\",\"type\":\"int\"}],\"name\":\"personrecord.Person\",\"type\":\"record\"}" +} + +func (r Person) SchemaName() string { + return "personrecord.Person" +} + +func (_ Person) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ Person) SetInt(v int32) { panic("Unsupported operation") } +func (_ Person) SetLong(v int64) { panic("Unsupported operation") } +func (_ Person) SetFloat(v float32) { panic("Unsupported operation") } +func (_ Person) SetDouble(v float64) { panic("Unsupported operation") } +func (_ Person) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ Person) SetString(v string) { panic("Unsupported operation") } +func (_ Person) SetUnionElem(v int64) { panic("Unsupported operation") } + +func (r *Person) Get(i int) types.Field { + switch i { + case 0: + w := types.String{Target: &r.Name} + + return w + + case 1: + w := types.Int{Target: &r.Age} + + return w + + } + panic("Unknown field index") +} + +func (r *Person) SetDefault(i int) { + switch i { + } + panic("Unknown field index") +} + +func (r *Person) NullField(i int) { + switch i { + } + panic("Not a nullable field index") +} + +func (_ Person) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ Person) AppendArray() types.Field { panic("Unsupported operation") } +func (_ Person) HintSize(int) { panic("Unsupported operation") } +func (_ Person) Finalize() {} + +func (_ Person) AvroCRC64Fingerprint() []byte { + return []byte(PersonAvroCRC64Fingerprint) +} + +func (r Person) MarshalJSON() ([]byte, error) { + var err error + output := make(map[string]json.RawMessage) + output["name"], err = json.Marshal(r.Name) + if err != nil { + return nil, err + } + output["age"], err = json.Marshal(r.Age) + if err != nil { + return nil, err + } + return json.Marshal(output) +} + +func (r *Person) UnmarshalJSON(data []byte) error { + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + + var val json.RawMessage + val = func() json.RawMessage { + if v, ok := fields["name"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Name); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for name") + } + val = func() json.RawMessage { + if v, ok := fields["age"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Age); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for age") + } + return nil +} diff --git a/examples/schemaregistry_example/avro/schemas/union_person_address.go b/examples/schemaregistry_example/avro/schemas/union_person_address.go new file mode 100644 index 000000000..087ad478d --- /dev/null +++ b/examples/schemaregistry_example/avro/schemas/union_person_address.go @@ -0,0 +1,145 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package schemas + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +type UnionPersonAddressTypeEnum int + +const ( + UnionPersonAddressTypeEnumPerson UnionPersonAddressTypeEnum = 0 + + UnionPersonAddressTypeEnumAddress UnionPersonAddressTypeEnum = 1 +) + +type UnionPersonAddress struct { + Person Person + Address Address + UnionType UnionPersonAddressTypeEnum +} + +func writeUnionPersonAddress(r UnionPersonAddress, w io.Writer) error { + + err := vm.WriteLong(int64(r.UnionType), w) + if err != nil { + return err + } + switch r.UnionType { + case UnionPersonAddressTypeEnumPerson: + return writePerson(r.Person, w) + case UnionPersonAddressTypeEnumAddress: + return writeAddress(r.Address, w) + } + return fmt.Errorf("invalid value for UnionPersonAddress") +} + +func NewUnionPersonAddress() UnionPersonAddress { + return UnionPersonAddress{} +} + +func (r UnionPersonAddress) Serialize(w io.Writer) error { + return writeUnionPersonAddress(r, w) +} + +func DeserializeUnionPersonAddress(r io.Reader) (UnionPersonAddress, error) { + t := NewUnionPersonAddress() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + + if err != nil { + return t, err + } + return t, err +} + +func DeserializeUnionPersonAddressFromSchema(r io.Reader, schema string) (UnionPersonAddress, error) { + t := NewUnionPersonAddress() + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + + if err != nil { + return t, err + } + return t, err +} + +func (r UnionPersonAddress) Schema() string { + return "[{\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"age\",\"type\":\"int\"}],\"name\":\"Person\",\"namespace\":\"personrecord\",\"type\":\"record\"},{\"fields\":[{\"name\":\"street\",\"type\":\"string\"},{\"name\":\"city\",\"type\":\"string\"}],\"name\":\"Address\",\"namespace\":\"addressrecord\",\"type\":\"record\"}]" +} + +func (_ UnionPersonAddress) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ UnionPersonAddress) SetInt(v int32) { panic("Unsupported operation") } +func (_ UnionPersonAddress) SetFloat(v float32) { panic("Unsupported operation") } +func (_ UnionPersonAddress) SetDouble(v float64) { panic("Unsupported operation") } +func (_ UnionPersonAddress) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ UnionPersonAddress) SetString(v string) { panic("Unsupported operation") } + +func (r *UnionPersonAddress) SetLong(v int64) { + + r.UnionType = (UnionPersonAddressTypeEnum)(v) +} + +func (r *UnionPersonAddress) Get(i int) types.Field { + + switch i { + case 0: + r.Person = NewPerson() + return &types.Record{Target: (&r.Person)} + case 1: + r.Address = NewAddress() + return &types.Record{Target: (&r.Address)} + } + panic("Unknown field index") +} +func (_ UnionPersonAddress) NullField(i int) { panic("Unsupported operation") } +func (_ UnionPersonAddress) HintSize(i int) { panic("Unsupported operation") } +func (_ UnionPersonAddress) SetDefault(i int) { panic("Unsupported operation") } +func (_ UnionPersonAddress) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ UnionPersonAddress) AppendArray() types.Field { panic("Unsupported operation") } +func (_ UnionPersonAddress) Finalize() {} + +func (r UnionPersonAddress) MarshalJSON() ([]byte, error) { + + switch r.UnionType { + case UnionPersonAddressTypeEnumPerson: + return json.Marshal(map[string]interface{}{"personrecord.Person": r.Person}) + case UnionPersonAddressTypeEnumAddress: + return json.Marshal(map[string]interface{}{"addressrecord.Address": r.Address}) + } + return nil, fmt.Errorf("invalid value for UnionPersonAddress") +} + +func (r *UnionPersonAddress) UnmarshalJSON(data []byte) error { + + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + if len(fields) > 1 { + return fmt.Errorf("more than one type supplied for union") + } + if value, ok := fields["personrecord.Person"]; ok { + r.UnionType = 0 + return json.Unmarshal([]byte(value), &r.Person) + } + if value, ok := fields["addressrecord.Address"]; ok { + r.UnionType = 1 + return json.Unmarshal([]byte(value), &r.Address) + } + return fmt.Errorf("invalid value for UnionPersonAddress") +} diff --git a/examples/schemaregistry_example/avro/topicNameStrategyGenericAvro.go b/examples/schemaregistry_example/avro/topicNameStrategyGenericAvro.go new file mode 100644 index 000000000..553255bd2 --- /dev/null +++ b/examples/schemaregistry_example/avro/topicNameStrategyGenericAvro.go @@ -0,0 +1,262 @@ +package main + +import ( + "fmt" + "os" + "strings" + + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +type Person struct { + Name string `avro:"name"` + Age int `avro:"age"` +} + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := &Person{ + Name: "robert", + Age: 23, + } + + for { + offset, err := producer.ProduceMessage(msg, topic) + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg interface{}, topic string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := avro.NewGenericSerializer(c, serde.ValueSerde, avro.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg interface{}, topic string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + payload, err := p.serializer.Serialize(topic, msg) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + err = consumer.Run(topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *avro.GenericDeserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := avro.NewGenericDeserializer(sr, serde.ValueSerde, avro.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// RegisterMessageFactory Pass a pointer to the receiver object for the SR to unmarshal the payload into +func (c *srConsumer) RegisterMessageFactory(receiver interface{}) func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + return receiver, nil + } +} + +// NOTE doing like so make sure the event subject match the expected receiver's subject +func (c *srConsumer) RegisterMessageFactoryWithMap(subjectTypes map[string]interface{}) func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + // !!! in avro name is the object Name(Person in this ex) + if tp, ok := subjectTypes[subject]; !ok { + return nil, fmt.Errorf("Invalid receiver") + } else { + return tp, nil + } + } +} + +// Run consumer +// func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { +func (c *srConsumer) Run(topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + // receivers := make(map[string]interface{}) + // receivers[fmt.Sprintf("%v-value", topic)] = &Person{} + // c.deserializer.MessageFactory = c.RegisterMessageFactoryWithMap(receivers) + + // c.deserializer.MessageFactory = c.RegisterMessageFactory(&Person{}) + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // // get a msg of type interface{} + // msg, err := c.deserializer.Deserialize(topic, kafkaMsg.Value) + // if err != nil { + // return err + // } + // if _, ok := msg.(*Person); ok { + // fmt.Println("Person: ", msg.(*Person).Name, " - ", msg.(*Person).Age) + // } + // c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // use deserializer.DeserializeInto to get a struct back + person := &Person{} + err = c.deserializer.DeserializeInto(topic, kafkaMsg.Value, person) + if err != nil { + return err + } + fmt.Println("See the struct: ", person.Name, " - ", person.Age) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) + +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/avro/topicNameStrategySpecificAvro.go b/examples/schemaregistry_example/avro/topicNameStrategySpecificAvro.go new file mode 100644 index 000000000..62911fbce --- /dev/null +++ b/examples/schemaregistry_example/avro/topicNameStrategySpecificAvro.go @@ -0,0 +1,258 @@ +package main + +import ( + "fmt" + "os" + "strings" + + avSch "avroexample/schemas" + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := &avSch.Person{ + Name: "robert", + Age: 23, + } + + for { + offset, err := producer.ProduceMessage(msg, topic) + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg interface{}, topic string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := avro.NewSpecificSerializer(c, serde.ValueSerde, avro.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg interface{}, topic string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + payload, err := p.serializer.Serialize(topic, msg) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + err = consumer.Run(topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *avro.SpecificDeserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := avro.NewSpecificDeserializer(sr, serde.ValueSerde, avro.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// RegisterMessageFactory Pass a pointer to the receiver object for the SR to unmarshal the payload into +func (c *srConsumer) RegisterMessageFactory(receiver interface{}) func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + return receiver, nil + } +} + +// NOTE doing like so make sure the event subject match the expected receiver's subject +func (c *srConsumer) RegisterMessageFactoryWithMap(subjectTypes map[string]interface{}) func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + // !!! in avro name is the object Name(Person in this ex) + if tp, ok := subjectTypes[subject]; !ok { + return nil, fmt.Errorf("Invalid receiver") + } else { + return tp, nil + } + } +} + +// Run consumer +// func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { +func (c *srConsumer) Run(topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + receivers := make(map[string]interface{}) + receivers[fmt.Sprintf("%v-value", topic)] = &avSch.Person{} + c.deserializer.MessageFactory = c.RegisterMessageFactoryWithMap(receivers) + + // c.deserializer.MessageFactory = c.RegisterMessageFactory(&avSch.Person{}) + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // get a msg of type interface{} + msg, err := c.deserializer.Deserialize(topic, kafkaMsg.Value) + if err != nil { + return err + } + if _, ok := msg.(*avSch.Person); ok { + fmt.Println("Person: ", msg.(*avSch.Person).Name, " - ", msg.(*avSch.Person).Age) + } + // c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // // use deserializer.DeserializeInto to get a struct back + // person := &avSch.Person{} + // err = c.deserializer.DeserializeInto(topic, kafkaMsg.Value, person) + // if err != nil { + // return err + // } + // fmt.Println("See the struct: ", person.Name, " - ", person.Age) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) + +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/docker-compose.yaml b/examples/schemaregistry_example/docker-compose.yaml new file mode 100644 index 000000000..e67ed58d8 --- /dev/null +++ b/examples/schemaregistry_example/docker-compose.yaml @@ -0,0 +1,84 @@ +version: '2' + +# do not forget to create a kafka directory + +services: + zookeeper: + image: confluentinc/cp-zookeeper:7.3.2 + hostname: zookeeper + container_name: zookeeper + user: root + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + volumes: + - ./kafka/zk-data:/var/lib/zookeeper/data + - ./kafka/zk-logs:/var/lib/zookeeper/log + + kafka: + image: confluentinc/cp-server:7.0.1 + hostname: kafka + container_name: kafka + user: root + depends_on: + - zookeeper + ports: + - "9092:9092" + - "29092:29092" + environment: + KAFKA_HEAP_OPTS: '-Xms1g -Xmx1g' + KAFKA_BROKER_ID: -1 + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: | + PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: | + PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092 + KAFKA_DEFAULT_REPLICATION_FACTOR: 1 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8081 + KAFKA_NUM_RECOVERY_THREADS_PER_DATA_DIR: 2 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false' + LOG_DIR: /var/log/kafka + KAFKA_DATA_DIR: /var/lib/kafka + CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 + CONFLUENT_METRICS_ENABLE: 'false' + CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous' + CONFLUENT_REPORTERS_TELEMETRY_AUTO_ENABLE: "false" + healthcheck: + test: ["CMD", "nc", "-vz", "localhost", "9092"] + interval: 30s + timeout: 10s + retries: 4 + volumes: + - ./kafka/kafka-data:/var/lib/kafka/data + - ./kafka/kafka-log:/var/log/kafka + + schema-registry: + image: confluentinc/cp-schema-registry:7.1.0 + hostname: schema-registry + container_name: schema-registry + user: root + depends_on: + kafka: + condition: service_healthy + ports: + - "8081:8081" + environment: + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'kafka:9092' + SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 + healthcheck: + test: nc -z localhost 8081 || exit -1 + interval: 10s + timeout: 5s + retries: 6 + start_period: 30s + volumes: + - ./kafka/registry:/etc/schema-registry + diff --git a/examples/schemaregistry_example/json/recordNameStrategy.go b/examples/schemaregistry_example/json/recordNameStrategy.go new file mode 100644 index 000000000..e1ae22a83 --- /dev/null +++ b/examples/schemaregistry_example/json/recordNameStrategy.go @@ -0,0 +1,343 @@ +package main + +import ( + "errors" + "fmt" + "os" + "reflect" + "strings" + + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/jsonschema" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +type Person struct { + Name string `json:"name"` + Age int `json:"age"` +} + +type Address struct { + Street string `json:"street"` + City string `json:"city"` +} + +type EmbeddedPax struct { + Name string `json:"name"` + Age int `json:"age"` + Address Address `json:"address"` +} + +type Embedded struct { + Pax EmbeddedPax `json:"pax"` + Status string `json:"status"` +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := Person{ + Name: "robert", + Age: 23, + } + + addr := Address{ + Street: "myStreet", + City: "Berlin", + } + + // px := EmbeddedPax{ + // Name: "robert", + // Age: 23, + // Address: addr, + // } + + // embedded := Embedded{ + // Pax: px, + // Status: "embedded", + // } + + for { + offset, err := producer.ProduceMessage(msg, topic, reflect.TypeOf(msg).String()) + if err != nil { + log.Println("Error producing Message: ", err) + } + + offset, err = producer.ProduceMessage(addr, topic, reflect.TypeOf(addr).String()) + if err != nil { + log.Println("Error producing Message: ", err) + } + + // offset, err = producer.ProduceMessage(px, topic) + // if err != nil { + // log.Println("Error producing Message: ", err) + // } + + // offset, err = producer.ProduceMessage(embedded, topic) + // if err != nil { + // log.Println("Error producing Message: ", err) + // } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg interface{}, topic, subject string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := jsonschema.NewSerializer( + c, + serde.ValueSerde, + jsonschema.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg interface{}, topic, subject string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + typeName := reflect.TypeOf(msg).String() + // log.Println("recordnameStrategy.go - see the fully qualify class name of person", typeName) + + payload, err := p.serializer.SerializeRecordName(msg, typeName) + // or payload, err := p.serializer.SerializeRecordName(msg) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + err = consumer.Run(topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *jsonschema.Deserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := jsonschema.NewDeserializer(sr, serde.ValueSerde, jsonschema.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// func (c *srConsumer) RegisterMessageFactory(subjectTypes map[string]interface{}) func(string, string) (interface{}, error) { +// +// return func(subject string, name string) (interface{}, error) { +// +// // subject have the form: package.Type-value +// if v, ok := subjectTypes[subject]; !ok { +// return nil, errors.New("Invalid receiver") +// } else { +// return v, nil +// } +// } +// } + +func (c *srConsumer) RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case "main.Person": + return &Person{}, nil + case "main.Address": + return &Address{}, nil + case "main.Embedded": + return &Embedded{}, nil + case "main.EmbeddedPax": + return &EmbeddedPax{}, nil + } + return nil, errors.New("No matching receiver") + } +} + +// Run consumer +// func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { +func (c *srConsumer) Run(topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + // register the MessageFactory is facultatif + // but is it useful to allow the event receiver to be an initialized object + c.deserializer.MessageFactory = c.RegisterMessageFactory() + + // case recordIntoNameSTrategy + px := Person{} + addr := Address{} + embPax := EmbeddedPax{} + emb := Embedded{} + msgFQN := reflect.TypeOf(px).String() + addrFQN := reflect.TypeOf(addr).String() + embPaxFQN := reflect.TypeOf(embPax).String() + embFQN := reflect.TypeOf(emb).String() + ref := make(map[string]interface{}) + ref[msgFQN] = &px + ref[addrFQN] = &addr + ref[embPaxFQN] = &embPax + ref[embFQN] = &emb + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // get a msg of type interface{} + msg, err := c.deserializer.DeserializeRecordName(kafkaMsg.Value) + if err != nil { + return err + } + // c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + if _, ok := msg.(*Person); ok { + fmt.Println("Person: ", msg.(*Person).Name, " - ", msg.(*Person).Age) + } else { + fmt.Println("Address: ", msg.(*Address).City, " - ", msg.(*Address).Street) + } + + // // use deserializer.DeserializeIntoRecordName to get a struct back + // err = c.deserializer.DeserializeIntoRecordName(ref, kafkaMsg.Value) + // if err != nil { + // return err + // } + // fmt.Println("message deserialized into: ", px.Name, " - ", addr.Street) + // // fmt.Println("message deserialized into EmbeddedPax: ", embPax.Name, " - ", embPax.Address.Street) + // // fmt.Println("message deserialized into Emb: ", emb.Pax.Name, " - ", emb.Pax.Address.Street, " - ", emb.Status) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/json/topicNameStrategy.go b/examples/schemaregistry_example/json/topicNameStrategy.go new file mode 100644 index 000000000..e856b6cf6 --- /dev/null +++ b/examples/schemaregistry_example/json/topicNameStrategy.go @@ -0,0 +1,261 @@ +package main + +import ( + // "errors" + "fmt" + "os" + "strings" + + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/jsonschema" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +type Person struct { + Name string `json:"name"` + Age int `json:"age"` +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := Person{ + Name: "robert", + Age: 23, + } + + for { + offset, err := producer.ProduceMessage(msg, topic) + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg interface{}, topic string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := jsonschema.NewSerializer(c, serde.ValueSerde, jsonschema.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg interface{}, topic string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + payload, err := p.serializer.Serialize(topic, msg) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + err = consumer.Run(topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *jsonschema.Deserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := jsonschema.NewDeserializer(sr, serde.ValueSerde, jsonschema.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// // RegisterMessageFactory Pass a pointer to the receiver object for the SR to unmarshal the payload into +// func (c *srConsumer) RegisterMessageFactory(receiver interface{}) func(string, string) (interface{}, error) { +// return func(subject string, name string) (interface{}, error) { +// return receiver, nil +// } +// } + +// NOTE doing like so make sure the event subject match the expected receiver's subject +func (c *srConsumer) RegisterMessageFactoryWithMap(subjectTypes map[string]interface{}) func(string, string) (interface{}, error) { + // !!! in json there is no 'name' passed into the MessageFactory + // we only can rely on the subject + return func(subject string, name string) (interface{}, error) { + if tp, ok := subjectTypes[subject]; !ok { + return nil, fmt.Errorf("Invalid receiver") + } else { + return tp, nil + } + } +} + +// Run consumer +// func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { +func (c *srConsumer) Run(topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + // receivers := make(map[string]interface{}) + // receivers[fmt.Sprintf("%v-value", topic)] = &Person{} + // c.deserializer.MessageFactory = c.RegisterMessageFactoryWithMap(receivers) + // c.deserializer.MessageFactory = c.RegisterMessageFactory(&Person{}) + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // get a msg of type interface{} + msg, err := c.deserializer.Deserialize(topic, kafkaMsg.Value) + if err != nil { + return err + } + // fmt.Println("with MessageFactory: ", msg.(*Person).Name) + c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // use deserializer.DeserializeInto to get a struct back + person := &Person{} + err = c.deserializer.DeserializeInto(topic, kafkaMsg.Value, person) + if err != nil { + return err + } + fmt.Println("See the struct: ", person.Name, " - ", person.Age) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) + +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/protobuf/api/v1/proto/Address.pb.go b/examples/schemaregistry_example/protobuf/api/v1/proto/Address.pb.go new file mode 100644 index 000000000..e9030607e --- /dev/null +++ b/examples/schemaregistry_example/protobuf/api/v1/proto/Address.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v4.23.4 +// source: api/v1/proto/Address.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Address struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Street string `protobuf:"bytes,1,opt,name=street,proto3" json:"street,omitempty"` + City string `protobuf:"bytes,2,opt,name=city,proto3" json:"city,omitempty"` +} + +func (x *Address) Reset() { + *x = Address{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_proto_Address_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Address) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Address) ProtoMessage() {} + +func (x *Address) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_proto_Address_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Address.ProtoReflect.Descriptor instead. +func (*Address) Descriptor() ([]byte, []int) { + return file_api_v1_proto_Address_proto_rawDescGZIP(), []int{0} +} + +func (x *Address) GetStreet() string { + if x != nil { + return x.Street + } + return "" +} + +func (x *Address) GetCity() string { + if x != nil { + return x.City + } + return "" +} + +var File_api_v1_proto_Address_proto protoreflect.FileDescriptor + +var file_api_v1_proto_Address_proto_rawDesc = []byte{ + 0x0a, 0x1a, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x41, + 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x61, 0x6e, + 0x6f, 0x74, 0x68, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x22, 0x35, 0x0a, 0x07, 0x41, 0x64, 0x64, 0x72, + 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x65, 0x65, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x72, 0x65, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, + 0x69, 0x74, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x69, 0x74, 0x79, 0x42, + 0x10, 0x5a, 0x0e, 0x2e, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_v1_proto_Address_proto_rawDescOnce sync.Once + file_api_v1_proto_Address_proto_rawDescData = file_api_v1_proto_Address_proto_rawDesc +) + +func file_api_v1_proto_Address_proto_rawDescGZIP() []byte { + file_api_v1_proto_Address_proto_rawDescOnce.Do(func() { + file_api_v1_proto_Address_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_v1_proto_Address_proto_rawDescData) + }) + return file_api_v1_proto_Address_proto_rawDescData +} + +var file_api_v1_proto_Address_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_api_v1_proto_Address_proto_goTypes = []interface{}{ + (*Address)(nil), // 0: another.v1.Address +} +var file_api_v1_proto_Address_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_v1_proto_Address_proto_init() } +func file_api_v1_proto_Address_proto_init() { + if File_api_v1_proto_Address_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_v1_proto_Address_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Address); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_v1_proto_Address_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_api_v1_proto_Address_proto_goTypes, + DependencyIndexes: file_api_v1_proto_Address_proto_depIdxs, + MessageInfos: file_api_v1_proto_Address_proto_msgTypes, + }.Build() + File_api_v1_proto_Address_proto = out.File + file_api_v1_proto_Address_proto_rawDesc = nil + file_api_v1_proto_Address_proto_goTypes = nil + file_api_v1_proto_Address_proto_depIdxs = nil +} diff --git a/examples/schemaregistry_example/protobuf/api/v1/proto/Address.proto b/examples/schemaregistry_example/protobuf/api/v1/proto/Address.proto new file mode 100644 index 000000000..f9bfbcb3c --- /dev/null +++ b/examples/schemaregistry_example/protobuf/api/v1/proto/Address.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package another.v1; + +option go_package = "./api/v1/proto"; + +message Address { + string street = 1; + string city = 2; +} diff --git a/examples/schemaregistry_example/protobuf/api/v1/proto/Person.pb.go b/examples/schemaregistry_example/protobuf/api/v1/proto/Person.pb.go new file mode 100644 index 000000000..0bdd4da69 --- /dev/null +++ b/examples/schemaregistry_example/protobuf/api/v1/proto/Person.pb.go @@ -0,0 +1,254 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v4.23.4 +// source: api/v1/proto/Person.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Person struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Age float32 `protobuf:"fixed32,2,opt,name=age,proto3" json:"age,omitempty"` + Address string `protobuf:"bytes,3,opt,name=address,proto3" json:"address,omitempty"` + CodePostal int32 `protobuf:"varint,4,opt,name=code_postal,json=codePostal,proto3" json:"code_postal,omitempty"` + Firstname string `protobuf:"bytes,5,opt,name=firstname,proto3" json:"firstname,omitempty"` + Mytest *Test `protobuf:"bytes,6,opt,name=mytest,proto3" json:"mytest,omitempty"` +} + +func (x *Person) Reset() { + *x = Person{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_proto_Person_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Person) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Person) ProtoMessage() {} + +func (x *Person) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_proto_Person_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Person.ProtoReflect.Descriptor instead. +func (*Person) Descriptor() ([]byte, []int) { + return file_api_v1_proto_Person_proto_rawDescGZIP(), []int{0} +} + +func (x *Person) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Person) GetAge() float32 { + if x != nil { + return x.Age + } + return 0 +} + +func (x *Person) GetAddress() string { + if x != nil { + return x.Address + } + return "" +} + +func (x *Person) GetCodePostal() int32 { + if x != nil { + return x.CodePostal + } + return 0 +} + +func (x *Person) GetFirstname() string { + if x != nil { + return x.Firstname + } + return "" +} + +func (x *Person) GetMytest() *Test { + if x != nil { + return x.Mytest + } + return nil +} + +type Test struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` +} + +func (x *Test) Reset() { + *x = Test{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_proto_Person_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Test) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Test) ProtoMessage() {} + +func (x *Test) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_proto_Person_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Test.ProtoReflect.Descriptor instead. +func (*Test) Descriptor() ([]byte, []int) { + return file_api_v1_proto_Person_proto_rawDescGZIP(), []int{1} +} + +func (x *Test) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +var File_api_v1_proto_Person_proto protoreflect.FileDescriptor + +var file_api_v1_proto_Person_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x50, + 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x07, 0x74, 0x65, 0x73, + 0x74, 0x2e, 0x76, 0x31, 0x22, 0xae, 0x01, 0x0a, 0x06, 0x50, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, + 0x52, 0x03, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, + 0x1f, 0x0a, 0x0b, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x70, 0x6f, 0x73, 0x74, 0x61, 0x6c, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x63, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x73, 0x74, 0x61, 0x6c, + 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69, 0x72, 0x73, 0x74, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x72, 0x73, 0x74, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x25, + 0x0a, 0x06, 0x6d, 0x79, 0x74, 0x65, 0x73, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, + 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x65, 0x73, 0x74, 0x52, 0x06, 0x6d, + 0x79, 0x74, 0x65, 0x73, 0x74, 0x22, 0x1a, 0x0a, 0x04, 0x54, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, + 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, + 0x74, 0x42, 0x10, 0x5a, 0x0e, 0x2e, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_v1_proto_Person_proto_rawDescOnce sync.Once + file_api_v1_proto_Person_proto_rawDescData = file_api_v1_proto_Person_proto_rawDesc +) + +func file_api_v1_proto_Person_proto_rawDescGZIP() []byte { + file_api_v1_proto_Person_proto_rawDescOnce.Do(func() { + file_api_v1_proto_Person_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_v1_proto_Person_proto_rawDescData) + }) + return file_api_v1_proto_Person_proto_rawDescData +} + +var file_api_v1_proto_Person_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_api_v1_proto_Person_proto_goTypes = []interface{}{ + (*Person)(nil), // 0: test.v1.Person + (*Test)(nil), // 1: test.v1.Test +} +var file_api_v1_proto_Person_proto_depIdxs = []int32{ + 1, // 0: test.v1.Person.mytest:type_name -> test.v1.Test + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_api_v1_proto_Person_proto_init() } +func file_api_v1_proto_Person_proto_init() { + if File_api_v1_proto_Person_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_api_v1_proto_Person_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Person); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_proto_Person_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Test); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_v1_proto_Person_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_api_v1_proto_Person_proto_goTypes, + DependencyIndexes: file_api_v1_proto_Person_proto_depIdxs, + MessageInfos: file_api_v1_proto_Person_proto_msgTypes, + }.Build() + File_api_v1_proto_Person_proto = out.File + file_api_v1_proto_Person_proto_rawDesc = nil + file_api_v1_proto_Person_proto_goTypes = nil + file_api_v1_proto_Person_proto_depIdxs = nil +} diff --git a/examples/schemaregistry_example/protobuf/api/v1/proto/Person.proto b/examples/schemaregistry_example/protobuf/api/v1/proto/Person.proto new file mode 100644 index 000000000..0f5a4f171 --- /dev/null +++ b/examples/schemaregistry_example/protobuf/api/v1/proto/Person.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package test.v1; + +option go_package = "./api/v1/proto"; + +message Person { + string name = 1; + float age = 2; + string address = 3; + int32 code_postal = 4; + string firstname = 5; + Test mytest = 6; +}; + +message Test{ + string text = 1; +} + + diff --git a/examples/schemaregistry_example/protobuf/recordNameStrategy.go b/examples/schemaregistry_example/protobuf/recordNameStrategy.go new file mode 100644 index 000000000..3153d6754 --- /dev/null +++ b/examples/schemaregistry_example/protobuf/recordNameStrategy.go @@ -0,0 +1,294 @@ +package main + +import ( + "errors" + pb "examples/api/v1/proto" + "fmt" + "os" + "strings" + + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 + subjectPerson = "test.v1.Person" + subjectAddress = "another.v1.Address" +) + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := &pb.Person{ + Name: "robert", + Age: 23, + } + + city := &pb.Address{ + Street: "myStreet", + City: "Bangkok", + } + + for { + offset, err := producer.ProduceMessage(msg, topic, subjectPerson) + if err != nil { + log.Println("Error producing Message: ", err) + } + + offset, err = producer.ProduceMessage(city, topic, subjectAddress) + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg proto.Message, topic, subject string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := protobuf.NewSerializer(c, serde.ValueSerde, protobuf.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg proto.Message, topic, subject string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + // convenient + // payload, err := p.serializer.SerializeRecordName(msg) + + // assert the fullyQualifiedName. log an err if mismatch + payload, err := p.serializer.SerializeRecordName(msg, subject) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ + +var person = &pb.Person{} +var address = &pb.Address{} + +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + personType := (&pb.Person{}).ProtoReflect().Type() + addressType := (&pb.Address{}).ProtoReflect().Type() + + err = consumer.Run([]protoreflect.MessageType{personType, addressType}, topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(messagesType []protoreflect.MessageType, topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *protobuf.Deserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := protobuf.NewDeserializer(sr, serde.ValueSerde, protobuf.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// RegisterMessageFactory will overwrite the default one +// In this case &pb.Person{} is the "msg" at "msg, err := c.deserializer.DeserializeRecordName()" +func (c *srConsumer) RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case subjectPerson: + return &pb.Person{}, nil + case subjectAddress: + return &pb.Address{}, nil + } + return nil, errors.New("No matching receiver") + } +} + +// Run consumer +func (c *srConsumer) Run(messagesType []protoreflect.MessageType, topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + if len(messagesType) > 0 { + for _, mt := range messagesType { + if err := c.deserializer.ProtoRegistry.RegisterMessage(mt); err != nil { + + return err + } + } + } + + // register the MessageFactory is facultatif + // but is it useful to allow the event receiver to be an initialized object + c.deserializer.MessageFactory = c.RegisterMessageFactory() + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + msg, err := c.deserializer.DeserializeRecordName(kafkaMsg.Value) + if err != nil { + return err + } + // without RegisterMessageFactory() + // c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // with RegisterMessageFactory() + if _, ok := msg.(*pb.Person); ok { + fmt.Println("Person: ", msg.(*pb.Person).Name, " - ", msg.(*pb.Person).Age) + } else { + + fmt.Println("Address: ", msg.(*pb.Address).City, " - ", msg.(*pb.Address).Street) + } + + // // Deserialize into a struct + // // receivers for DeserializeIntoRecordName + // subjects := make(map[string]interface{}) + // subjects[subjectPerson] = person + // subjects[subjectAddress] = address + // err = c.deserializer.DeserializeIntoRecordName(subjects, kafkaMsg.Value) + // if err != nil { + // return err + // } + + // fmt.Println("person: ", person.Name, " - ", person.Age) + // fmt.Println("address: ", address.City, " - ", address.Street) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/examples/schemaregistry_example/protobuf/topicNameStrategy.go b/examples/schemaregistry_example/protobuf/topicNameStrategy.go new file mode 100644 index 000000000..b13cf1100 --- /dev/null +++ b/examples/schemaregistry_example/protobuf/topicNameStrategy.go @@ -0,0 +1,245 @@ +package main + +// // from Nina Pakshina +// // https://medium.com/@ninucium/is-using-kafka-with-schema-registry-and-protobuf-worth-it-part-1-1c4a9995a5d3 +// +import ( + pb "examples/api/v1/proto" + "fmt" + "os" + "strings" + + "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" + "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "log" + "time" +) + +const ( + producerMode string = "producer" + consumerMode string = "consumer" + nullOffset = -1 + topic = "my-topic" + kafkaURL = "127.0.0.1:29092" + srURL = "http://127.0.0.1:8081" + schemaFile string = "./api/v1/proto/Person.proto" + consumerGroupID = "test-consumer" + defaultSessionTimeout = 6000 + noTimeout = -1 +) + +func main() { + + clientMode := os.Args[1] + + if strings.Compare(clientMode, producerMode) == 0 { + producer() + } else if strings.Compare(clientMode, consumerMode) == 0 { + consumer() + } else { + fmt.Printf("Invalid option. Valid options are '%s' and '%s'.", + producerMode, consumerMode) + } +} + +func producer() { + producer, err := NewProducer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + msg := &pb.Person{ + Name: "robert", + Age: 23, + } + + for { + offset, err := producer.ProduceMessage(msg, topic) + if err != nil { + log.Println("Error producing Message: ", err) + } + + log.Println("Message produced, offset is: ", offset) + time.Sleep(2 * time.Second) + } +} + +// SRProducer interface +type SRProducer interface { + ProduceMessage(msg proto.Message, topic string) (int64, error) + Close() +} + +type srProducer struct { + producer *kafka.Producer + serializer serde.Serializer +} + +// NewProducer returns kafka producer with schema registry +func NewProducer(kafkaURL, srURL string) (SRProducer, error) { + p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": kafkaURL}) + if err != nil { + return nil, err + } + c, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + s, err := protobuf.NewSerializer(c, serde.ValueSerde, protobuf.NewSerializerConfig()) + if err != nil { + return nil, err + } + return &srProducer{ + producer: p, + serializer: s, + }, nil +} + +// ProduceMessage sends serialized message to kafka using schema registry +func (p *srProducer) ProduceMessage(msg proto.Message, topic string) (int64, error) { + kafkaChan := make(chan kafka.Event) + defer close(kafkaChan) + + payload, err := p.serializer.Serialize(topic, msg) + if err != nil { + return nullOffset, err + } + if err = p.producer.Produce(&kafka.Message{ + TopicPartition: kafka.TopicPartition{Topic: &topic}, + Value: payload, + }, kafkaChan); err != nil { + return nullOffset, err + } + e := <-kafkaChan + switch ev := e.(type) { + case *kafka.Message: + log.Println("message sent: ", string(ev.Value)) + return int64(ev.TopicPartition.Offset), nil + case kafka.Error: + return nullOffset, err + } + return nullOffset, nil +} + +// Close schema registry and Kafka +func (p *srProducer) Close() { + p.serializer.Close() + p.producer.Close() +} + +/* +* =============================== +* CONSUMER +* =============================== +**/ +func consumer() { + consumer, err := NewConsumer(kafkaURL, srURL) + if err != nil { + log.Fatal("Can not create producer: ", err) + } + + personType := (&pb.Person{}).ProtoReflect().Type() + + err = consumer.Run(personType, topic) + if err != nil { + log.Println("ConsumerRun Error: ", err) + } + +} + +// SRConsumer interface +type SRConsumer interface { + Run(messagesType protoreflect.MessageType, topic string) error + Close() +} + +type srConsumer struct { + consumer *kafka.Consumer + deserializer *protobuf.Deserializer +} + +// NewConsumer returns new consumer with schema registry +func NewConsumer(kafkaURL, srURL string) (SRConsumer, error) { + c, err := kafka.NewConsumer(&kafka.ConfigMap{ + "bootstrap.servers": kafkaURL, + "group.id": consumerGroupID, + "session.timeout.ms": defaultSessionTimeout, + "enable.auto.commit": false, + }) + if err != nil { + return nil, err + } + + sr, err := schemaregistry.NewClient(schemaregistry.NewConfig(srURL)) + if err != nil { + return nil, err + } + + d, err := protobuf.NewDeserializer(sr, serde.ValueSerde, protobuf.NewDeserializerConfig()) + if err != nil { + return nil, err + } + return &srConsumer{ + consumer: c, + deserializer: d, + }, nil +} + +// RegisterMessage add simpleHandler and register schema in SR +func (c *srConsumer) RegisterMessage(messageType protoreflect.MessageType) error { + return nil +} + +// Run consumer +func (c *srConsumer) Run(messageType protoreflect.MessageType, topic string) error { + if err := c.consumer.SubscribeTopics([]string{topic}, nil); err != nil { + return err + } + + if err := c.deserializer.ProtoRegistry.RegisterMessage(messageType); err != nil { + return err + } + + for { + kafkaMsg, err := c.consumer.ReadMessage(noTimeout) + if err != nil { + return err + } + + // get a msg of type interface{} + msg, err := c.deserializer.Deserialize(topic, kafkaMsg.Value) + if err != nil { + return err + } + c.handleMessageAsInterface(msg, int64(kafkaMsg.TopicPartition.Offset)) + + // use deserializer.DeserializeInto to get a struct back + person := &pb.Person{} + err = c.deserializer.DeserializeInto(topic, kafkaMsg.Value, person) + if err != nil { + return err + } + fmt.Println("See the struct: ", person.Name, " - ", person.Age) + + if _, err = c.consumer.CommitMessage(kafkaMsg); err != nil { + return err + } + } +} + +func (c *srConsumer) handleMessageAsInterface(message interface{}, offset int64) { + fmt.Printf("message %v with offset %d\n", message, offset) + +} + +// Close all connections +func (c *srConsumer) Close() { + if err := c.consumer.Close(); err != nil { + log.Fatal(err) + } + c.deserializer.Close() +} diff --git a/go.mod b/go.mod index bbf5a4b6f..81d38b214 100644 --- a/go.mod +++ b/go.mod @@ -8,9 +8,11 @@ require ( github.com/heetch/avro v0.4.4 github.com/invopop/jsonschema v0.7.0 github.com/jhump/protoreflect v1.14.1 + github.com/linkedin/goavro v2.1.0+incompatible github.com/santhosh-tekuri/jsonschema/v5 v5.2.0 github.com/stretchr/testify v1.8.2 github.com/testcontainers/testcontainers-go v0.14.0 google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633 google.golang.org/protobuf v1.30.0 + gopkg.in/linkedin/goavro.v1 v1.0.5 // indirect ) diff --git a/go.sum b/go.sum index 4d4cd17b5..136cb12e0 100644 --- a/go.sum +++ b/go.sum @@ -1041,6 +1041,7 @@ github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -1228,6 +1229,8 @@ github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/linkedin/goavro v2.1.0+incompatible h1:DV2aUlj2xZiuxQyvag8Dy7zjY69ENjS66bWkSfdpddY= +github.com/linkedin/goavro v2.1.0+incompatible/go.mod h1:bBCwI2eGYpUI/4820s67MElg9tdeLbINjLjiM2xZFYM= github.com/linkedin/goavro/v2 v2.11.1/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo= github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= @@ -2343,6 +2346,8 @@ gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKW gopkg.in/httprequest.v1 v1.2.1/go.mod h1:x2Otw96yda5+8+6ZeWwHIJTFkEHWP/qP8pJOzqEtWPM= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/linkedin/goavro.v1 v1.0.5 h1:BJa69CDh0awSsLUmZ9+BowBdokpduDZSM9Zk8oKHfN4= +gopkg.in/linkedin/goavro.v1 v1.0.5/go.mod h1:Aw5GdAbizjOEl0kAMHV9iHmA8reZzW/OKuJAl4Hb9F0= gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= diff --git a/schemaregistry/mock_schemaregistry_client.go b/schemaregistry/mock_schemaregistry_client.go index 7f86fb1f9..b6e184af7 100644 --- a/schemaregistry/mock_schemaregistry_client.go +++ b/schemaregistry/mock_schemaregistry_client.go @@ -22,6 +22,7 @@ import ( "net/url" "reflect" "sort" + "strings" "sync" ) @@ -54,10 +55,12 @@ type idCacheEntry struct { type mockclient struct { sync.Mutex url *url.URL - schemaToIDCache map[subjectJSON]idCacheEntry - schemaToIDCacheLock sync.RWMutex + schemaToIdCache map[subjectJSON]idCacheEntry + schemaToIdCacheLock sync.RWMutex idToSchemaCache map[subjectID]*SchemaInfo idToSchemaCacheLock sync.RWMutex + idCache map[subjectOnlyID]*SchemaInfo + idCacheLock sync.RWMutex schemaToVersionCache map[subjectJSON]versionCacheEntry schemaToVersionCacheLock sync.RWMutex compatibilityCache map[string]Compatibility @@ -73,27 +76,92 @@ func (c *mockclient) Register(subject string, schema SchemaInfo, normalize bool) if err != nil { return -1, err } + cacheKey := subjectJSON{ subject: subject, json: string(schemaJSON), } - c.schemaToIDCacheLock.RLock() - idCacheEntryVal, ok := c.schemaToIDCache[cacheKey] + c.schemaToIdCacheLock.RLock() + idCacheEntryVal, ok := c.schemaToIdCache[cacheKey] if idCacheEntryVal.softDeleted { ok = false } - c.schemaToIDCacheLock.RUnlock() + c.schemaToIdCacheLock.RUnlock() if ok { return idCacheEntryVal.id, nil } - id, err = c.getIDFromRegistry(subject, schema) + // extract the fullyQualifiedName from the subject + parts := strings.Split(subject, ".") + var fullQualifName string + if len(parts) == 2 { + fullQualifName = parts[0] + } else if len(parts) > 2 { + for i := 0; i < len(parts)-1; i++ { + if i == 0 { + fullQualifName = parts[i] + } else { + fullQualifName += fmt.Sprintf(".%v", parts[i]) + } + } + } + if parts[0] == "jsonschema" || + fullQualifName == "avro" || + fullQualifName == "recordname" || + fullQualifName == "python.test.advanced" { + + // case of recordName(id c.schemaToIdCache[cacheKey] unfound id == 0) + id, err = c.getIDFromRegistryRecordName(subject, idCacheEntryVal.id, schema) + if err != nil { + return -1, err + } + } else { + + id, err = c.getIDFromRegistry(subject, schema) + if err != nil { + return -1, err + } + } + + c.schemaToIdCacheLock.Lock() + c.schemaToIdCache[cacheKey] = idCacheEntry{id, false} + c.schemaToIdCacheLock.Unlock() + + return id, nil +} + +func (c *mockclient) getIDFromRegistryRecordName(subject string, id int, schema SchemaInfo) (int, error) { + + if id > 0 { + c.idCacheLock.RLock() + for key, _ := range c.idCache { + if key.id == id { + id = key.id + break + } + } + c.idCacheLock.RUnlock() + } + + err := c.generateVersion(subject, schema) if err != nil { return -1, err } - c.schemaToIDCacheLock.Lock() - c.schemaToIDCache[cacheKey] = idCacheEntry{id, false} - c.schemaToIDCacheLock.Unlock() + if id < 1 { + id = c.counter.increment() + id += len(c.idCache) + idCacheKey := subjectOnlyID{ + id: id, + } + + c.idCacheLock.Lock() + if c.idCache == nil { + c.idCache = make(map[subjectOnlyID]*SchemaInfo) + } + c.idCache[idCacheKey] = &schema + c.idCacheLock.Unlock() + } + return id, nil } @@ -146,6 +214,30 @@ func (c *mockclient) generateVersion(subject string, schema SchemaInfo) error { return nil } +// TODO to implement +func (c *mockclient) GetByID(id int) (schema SchemaInfo, err error) { + cacheKey := subjectOnlyID{ + id: id, + } + c.idCacheLock.RLock() + info, ok := c.idCache[cacheKey] + c.idCacheLock.RUnlock() + if ok { + return *info, nil + } + subject := "" + posErr := url.Error{ + Op: "GET", + // TODO + // URL: c.url.String() + fmt.Sprintf(schemasByID, id, id), + URL: c.url.String() + fmt.Sprintf(schemasBySubject, id, url.QueryEscape(subject)), + Err: errors.New("Subject Not Found"), + } + return SchemaInfo{}, &posErr + + // return SchemaInfo{}, nil +} + // GetBySubjectAndID returns the schema identified by id // Returns Schema object on success func (c *mockclient) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, err error) { @@ -177,12 +269,12 @@ func (c *mockclient) GetID(subject string, schema SchemaInfo, normalize bool) (i subject: subject, json: string(schemaJSON), } - c.schemaToIDCacheLock.RLock() - idCacheEntryVal, ok := c.schemaToIDCache[cacheKey] + c.schemaToIdCacheLock.RLock() + idCacheEntryVal, ok := c.schemaToIdCache[cacheKey] if idCacheEntryVal.softDeleted { ok = false } - c.schemaToIDCacheLock.RUnlock() + c.schemaToIdCacheLock.RUnlock() if ok { return idCacheEntryVal.id, nil } @@ -308,9 +400,9 @@ func (c *mockclient) deleteVersion(key subjectJSON, version int, permanent bool) func (c *mockclient) deleteID(key subjectJSON, id int, permanent bool) { if permanent { - delete(c.schemaToIDCache, key) + delete(c.schemaToIdCache, key) } else { - c.schemaToIDCache[key] = idCacheEntry{id, true} + c.schemaToIdCache[key] = idCacheEntry{id, true} } } @@ -360,13 +452,13 @@ func (c *mockclient) GetAllSubjects() ([]string, error) { // Deletes provided Subject from registry // Returns integer slice of versions removed by delete func (c *mockclient) DeleteSubject(subject string, permanent bool) (deleted []int, err error) { - c.schemaToIDCacheLock.Lock() - for key, value := range c.schemaToIDCache { + c.schemaToIdCacheLock.Lock() + for key, value := range c.schemaToIdCache { if key.subject == subject && (!value.softDeleted || permanent) { c.deleteID(key, value.id, permanent) } } - c.schemaToIDCacheLock.Unlock() + c.schemaToIdCacheLock.Unlock() c.schemaToVersionCacheLock.Lock() for key, value := range c.schemaToVersionCache { if key.subject == subject && (!value.softDeleted || permanent) { @@ -402,12 +494,12 @@ func (c *mockclient) DeleteSubjectVersion(subject string, version int, permanent subject: subject, json: string(schemaJSON), } - c.schemaToIDCacheLock.Lock() - idSchemaEntryVal, ok := c.schemaToIDCache[cacheKeySchema] + c.schemaToIdCacheLock.Lock() + idSchemaEntryVal, ok := c.schemaToIdCache[cacheKeySchema] if ok { c.deleteID(key, idSchemaEntryVal.id, permanent) } - c.schemaToIDCacheLock.Unlock() + c.schemaToIdCacheLock.Unlock() if permanent && ok { c.idToSchemaCacheLock.Lock() cacheKeyID := subjectID{ diff --git a/schemaregistry/schemaregistry_client.go b/schemaregistry/schemaregistry_client.go index afe0ce2f4..ba8450b3d 100644 --- a/schemaregistry/schemaregistry_client.go +++ b/schemaregistry/schemaregistry_client.go @@ -78,6 +78,8 @@ type SchemaInfo struct { Schema string `json:"schema,omitempty"` SchemaType string `json:"schemaType,omitempty"` References []Reference `json:"references,omitempty"` + // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` //added + Subject string `json:"subject,omitempty"` } // MarshalJSON implements the json.Marshaler interface @@ -86,10 +88,14 @@ func (sd *SchemaInfo) MarshalJSON() ([]byte, error) { Schema string `json:"schema,omitempty"` SchemaType string `json:"schemaType,omitempty"` References []Reference `json:"references,omitempty"` + // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` + Subject string `json:"subject,omitempty"` }{ sd.Schema, sd.SchemaType, sd.References, + // sd.SchemaFullyQualifiedName, // added + sd.Subject, }) } @@ -100,6 +106,8 @@ func (sd *SchemaInfo) UnmarshalJSON(b []byte) error { Schema string `json:"schema,omitempty"` SchemaType string `json:"schemaType,omitempty"` References []Reference `json:"references,omitempty"` + // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` + Subject string `json:"subject,omitempty"` } err = json.Unmarshal(b, &tmp) @@ -107,6 +115,8 @@ func (sd *SchemaInfo) UnmarshalJSON(b []byte) error { sd.Schema = tmp.Schema sd.SchemaType = tmp.SchemaType sd.References = tmp.References + // sd.SchemaFullyQualifiedName = tmp.SchemaFullyQualifiedName // added + sd.Subject = tmp.Subject return err } @@ -128,6 +138,7 @@ func (sd *SchemaMetadata) MarshalJSON() ([]byte, error) { ID int `json:"id,omitempty"` Subject string `json:"subject,omitempty"` Version int `json:"version,omitempty"` + // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` }{ sd.Schema, sd.SchemaType, @@ -135,6 +146,7 @@ func (sd *SchemaMetadata) MarshalJSON() ([]byte, error) { sd.ID, sd.Subject, sd.Version, + // sd.SchemaFullyQualifiedName, }) } @@ -148,6 +160,7 @@ func (sd *SchemaMetadata) UnmarshalJSON(b []byte) error { ID int `json:"id,omitempty"` Subject string `json:"subject,omitempty"` Version int `json:"version,omitempty"` + // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` } err = json.Unmarshal(b, &tmp) @@ -158,6 +171,7 @@ func (sd *SchemaMetadata) UnmarshalJSON(b []byte) error { sd.ID = tmp.ID sd.Subject = tmp.Subject sd.Version = tmp.Version + // sd.SchemaFullyQualifiedName = tmp.SchemaFullyQualifiedName return err } @@ -177,12 +191,16 @@ type subjectVersion struct { version int } +type subjectOnlyID struct { + id int +} + /* HTTP(S) Schema Registry Client and schema caches */ type client struct { sync.Mutex restService *restService - schemaToIDCache cache.Cache - schemaToIDCacheLock sync.RWMutex + schemaToIdCache cache.Cache + schemaToIdCacheLock sync.RWMutex idToSchemaCache cache.Cache idToSchemaCacheLock sync.RWMutex schemaToVersionCache cache.Cache @@ -198,6 +216,7 @@ var _ Client = new(client) // https://github.com/confluentinc/schema-registry/blob/master/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java type Client interface { Register(subject string, schema SchemaInfo, normalize bool) (id int, err error) + GetByID(id int) (schema SchemaInfo, err error) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, err error) GetID(subject string, schema SchemaInfo, normalize bool) (id int, err error) GetLatestSchemaMetadata(subject string) (SchemaMetadata, error) @@ -225,7 +244,7 @@ func NewClient(conf *Config) (Client, error) { } mock := &mockclient{ url: url, - schemaToIDCache: make(map[subjectJSON]idCacheEntry), + schemaToIdCache: make(map[subjectJSON]idCacheEntry), idToSchemaCache: make(map[subjectID]*SchemaInfo), schemaToVersionCache: make(map[subjectJSON]versionCacheEntry), compatibilityCache: make(map[string]Compatibility), @@ -238,12 +257,12 @@ func NewClient(conf *Config) (Client, error) { return nil, err } - var schemaToIDCache cache.Cache + var schemaToIdCache cache.Cache var idToSchemaCache cache.Cache var schemaToVersionCache cache.Cache var versionToSchemaCache cache.Cache if conf.CacheCapacity != 0 { - schemaToIDCache, err = cache.NewLRUCache(conf.CacheCapacity) + schemaToIdCache, err = cache.NewLRUCache(conf.CacheCapacity) if err != nil { return nil, err } @@ -260,14 +279,14 @@ func NewClient(conf *Config) (Client, error) { return nil, err } } else { - schemaToIDCache = cache.NewMapCache() + schemaToIdCache = cache.NewMapCache() idToSchemaCache = cache.NewMapCache() schemaToVersionCache = cache.NewMapCache() versionToSchemaCache = cache.NewMapCache() } handle := &client{ restService: restService, - schemaToIDCache: schemaToIDCache, + schemaToIdCache: schemaToIdCache, idToSchemaCache: idToSchemaCache, schemaToVersionCache: schemaToVersionCache, versionToSchemaCache: versionToSchemaCache, @@ -281,13 +300,15 @@ func (c *client) Register(subject string, schema SchemaInfo, normalize bool) (id if err != nil { return -1, err } + cacheKey := subjectJSON{ subject: subject, json: string(schemaJSON), } - c.schemaToIDCacheLock.RLock() - idValue, ok := c.schemaToIDCache.Get(cacheKey) - c.schemaToIDCacheLock.RUnlock() + + c.schemaToIdCacheLock.RLock() + idValue, ok := c.schemaToIdCache.Get(cacheKey) + c.schemaToIdCacheLock.RUnlock() if ok { return idValue.(int), nil } @@ -295,23 +316,66 @@ func (c *client) Register(subject string, schema SchemaInfo, normalize bool) (id metadata := SchemaMetadata{ SchemaInfo: schema, } - c.schemaToIDCacheLock.Lock() + + c.schemaToIdCacheLock.Lock() // another goroutine could have already put it in cache - idValue, ok = c.schemaToIDCache.Get(cacheKey) + idValue, ok = c.schemaToIdCache.Get(cacheKey) if !ok { err = c.restService.handleRequest(newRequest("POST", versionNormalize, &metadata, url.PathEscape(subject), normalize), &metadata) if err == nil { - c.schemaToIDCache.Put(cacheKey, metadata.ID) + c.schemaToIdCache.Put(cacheKey, metadata.ID) } else { metadata.ID = -1 } } else { metadata.ID = idValue.(int) } - c.schemaToIDCacheLock.Unlock() + c.schemaToIdCacheLock.Unlock() return metadata.ID, err } +// GetByID returns the schema identified by id +// Returns Schema object on success +func (c *client) GetByID(id int) (schema SchemaInfo, err error) { + + cacheKey := subjectOnlyID{id} + + c.idToSchemaCacheLock.RLock() + subjIDPayload, ok := c.idToSchemaCache.Get(cacheKey) + c.idToSchemaCacheLock.RUnlock() + + if ok { + return *subjIDPayload.(*SchemaInfo), nil + } + + metadata := SchemaMetadata{} + newInfo := &SchemaInfo{} + c.idToSchemaCacheLock.Lock() + // another goroutine could have already put it in cache + subjIDPayload, ok = c.idToSchemaCache.Get(cacheKey) + if !ok { + var err error + err = c.restService.handleRequest(newRequest("GET", schemas, nil, id), &metadata) + if err == nil { + + newInfo.Schema = metadata.Schema + newInfo.SchemaType = metadata.SchemaType + newInfo.References = metadata.References + + c.idToSchemaCache.Put(cacheKey, newInfo) + } else { + return *newInfo, fmt.Errorf("Invalid server error") + } + + } else { + // newInfo = subjIDPayload.(subjectOnlyIDPayload).SchemaInfo + newInfo = subjIDPayload.(*SchemaInfo) + } + + c.idToSchemaCacheLock.Unlock() + return *newInfo, err +} + // GetBySubjectAndID returns the schema identified by id // Returns Schema object on success func (c *client) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, err error) { @@ -338,11 +402,11 @@ func (c *client) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, e err = c.restService.handleRequest(newRequest("GET", schemas, nil, id), &metadata) } if err == nil { - newInfo = &SchemaInfo{ - Schema: metadata.Schema, - SchemaType: metadata.SchemaType, - References: metadata.References, - } + // newInfo = &SchemaInfo{ + newInfo.Schema = metadata.Schema + newInfo.SchemaType = metadata.SchemaType + newInfo.References = metadata.References + //} c.idToSchemaCache.Put(cacheKey, newInfo) } } else { @@ -358,13 +422,16 @@ func (c *client) GetID(subject string, schema SchemaInfo, normalize bool) (id in if err != nil { return -1, err } + cacheKey := subjectJSON{ subject: subject, json: string(schemaJSON), } - c.schemaToIDCacheLock.RLock() - idValue, ok := c.schemaToIDCache.Get(cacheKey) - c.schemaToIDCacheLock.RUnlock() + + c.schemaToIdCacheLock.RLock() + idValue, ok := c.schemaToIdCache.Get(cacheKey) + // log.Println("schemaregistry_client.go - GetID - idValue from cache: ", idValue) + c.schemaToIdCacheLock.RUnlock() if ok { return idValue.(int), nil } @@ -372,20 +439,21 @@ func (c *client) GetID(subject string, schema SchemaInfo, normalize bool) (id in metadata := SchemaMetadata{ SchemaInfo: schema, } - c.schemaToIDCacheLock.Lock() + + c.schemaToIdCacheLock.Lock() // another goroutine could have already put it in cache - idValue, ok = c.schemaToIDCache.Get(cacheKey) + idValue, ok = c.schemaToIdCache.Get(cacheKey) if !ok { err = c.restService.handleRequest(newRequest("POST", subjectsNormalize, &metadata, url.PathEscape(subject), normalize), &metadata) if err == nil { - c.schemaToIDCache.Put(cacheKey, metadata.ID) + c.schemaToIdCache.Put(cacheKey, metadata.ID) } else { metadata.ID = -1 } } else { metadata.ID = idValue.(int) } - c.schemaToIDCacheLock.Unlock() + c.schemaToIdCacheLock.Unlock() return metadata.ID, err } @@ -485,14 +553,14 @@ func (c *client) GetAllSubjects() ([]string, error) { // Deletes provided Subject from registry // Returns integer slice of versions removed by delete func (c *client) DeleteSubject(subject string, permanent bool) (deleted []int, err error) { - c.schemaToIDCacheLock.Lock() - for keyValue := range c.schemaToIDCache.ToMap() { + c.schemaToIdCacheLock.Lock() + for keyValue := range c.schemaToIdCache.ToMap() { key := keyValue.(subjectJSON) if key.subject == subject { - c.schemaToIDCache.Delete(key) + c.schemaToIdCache.Delete(key) } } - c.schemaToIDCacheLock.Unlock() + c.schemaToIdCacheLock.Unlock() c.schemaToVersionCacheLock.Lock() for keyValue := range c.schemaToVersionCache.ToMap() { key := keyValue.(subjectJSON) @@ -535,12 +603,12 @@ func (c *client) DeleteSubjectVersion(subject string, version int, permanent boo subject: subject, json: string(schemaJSON), } - c.schemaToIDCacheLock.Lock() - idValue, ok := c.schemaToIDCache.Get(cacheKeySchema) + c.schemaToIdCacheLock.Lock() + idValue, ok := c.schemaToIdCache.Get(cacheKeySchema) if ok { - c.schemaToIDCache.Delete(cacheKeySchema) + c.schemaToIdCache.Delete(cacheKeySchema) } - c.schemaToIDCacheLock.Unlock() + c.schemaToIdCacheLock.Unlock() if ok { id := idValue.(int) c.idToSchemaCacheLock.Lock() diff --git a/schemaregistry/serde/avro/avro_generic.go b/schemaregistry/serde/avro/avro_generic.go index add2e30f2..f8fb6afe6 100644 --- a/schemaregistry/serde/avro/avro_generic.go +++ b/schemaregistry/serde/avro/avro_generic.go @@ -17,7 +17,10 @@ package avro import ( + "encoding/json" + "fmt" "reflect" + "strings" "unsafe" "github.com/actgardner/gogen-avro/v10/parser" @@ -25,6 +28,7 @@ import ( "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/heetch/avro" + "github.com/linkedin/goavro" ) // GenericSerializer represents a generic Avro serializer @@ -50,6 +54,81 @@ func NewGenericSerializer(client schemaregistry.Client, serdeType serde.Type, co return s, nil } +func (s *GenericSerializer) addFullyQualifiedNameToSchema(avroStr, msgFQN string) ([]byte, error) { + var data map[string]interface{} + if err := json.Unmarshal([]byte(avroStr), &data); err != nil { + return nil, err + } + + parts := strings.Split(msgFQN, ".") + if len(parts) > 0 { + var namespace string + if len(parts) == 2 { + namespace = parts[0] + } else if len(parts) > 2 { + for i := 0; i < len(parts)-1; i++ { + if i == 0 { + namespace += parts[0] + } else { + namespace += fmt.Sprintf(".%v", parts[i]) + } + } + + } + data["namespace"] = namespace + } + return json.Marshal(data) +} + +// Serialize implements serialization of generic Avro data +func (s *GenericSerializer) SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) { + if msg == nil { + return nil, nil + } + + msgFQN := reflect.TypeOf(msg).String() + msgFQN = strings.TrimLeft(msgFQN, "*") // in case + + if len(subject) > 0 { + if msgFQN != subject[0] { + return nil, fmt.Errorf(`the payload's fullyQualifiedName: '%v' does not match the subject: '%v'`, msgFQN, subject[0]) + } + } + + val := reflect.ValueOf(msg) + if val.Kind() == reflect.Ptr { + // avro.TypeOf expects an interface containing a non-pointer + msg = val.Elem().Interface() + } + avroType, err := avro.TypeOf(msg) + if err != nil { + return nil, err + } + + modifiedJSON, err := s.addFullyQualifiedNameToSchema(avroType.String(), msgFQN) + if err != nil { + return nil, err + } + + info := schemaregistry.SchemaInfo{ + Schema: string(modifiedJSON), + } + + id, err := s.GetID(msgFQN, msg, info) + if err != nil { + return nil, err + } + msgBytes, _, err := avro.Marshal(msg) + if err != nil { + return nil, err + } + payload, err := s.WriteBytes(id, msgBytes) + if err != nil { + return nil, err + } + return payload, nil +} + // Serialize implements serialization of generic Avro data func (s *GenericSerializer) Serialize(topic string, msg interface{}) ([]byte, error) { if msg == nil { @@ -64,6 +143,7 @@ func (s *GenericSerializer) Serialize(topic string, msg interface{}) ([]byte, er if err != nil { return nil, err } + info := schemaregistry.SchemaInfo{ Schema: avroType.String(), } @@ -89,9 +169,98 @@ func NewGenericDeserializer(client schemaregistry.Client, serdeType serde.Type, if err != nil { return nil, err } + s.MessageFactory = s.avroMessageFactory return s, nil } +func (s *GenericDeserializer) DeserializeRecordName(payload []byte) (interface{}, error) { + if payload == nil { + return nil, nil + } + + info, err := s.GetSchema("", payload) + if err != nil { + return nil, err + } + + // recreate the fullyQualifiedName + var data map[string]interface{} + if err := json.Unmarshal([]byte(info.Schema), &data); err != nil { + return nil, err + } + name := data["name"].(string) + namespace := data["namespace"].(string) + fullyQualifiedName := fmt.Sprintf("%s.%s", namespace, name) + + // fmt.Println("see the info schema: ", info.Schema) + + writer, name, err := s.toType(info) + if err != nil { + return nil, err + } + + subject, err := s.SubjectNameStrategy(fullyQualifiedName, s.SerdeType, info) + if err != nil { + return nil, err + } + + msg, err := s.MessageFactory(subject, fullyQualifiedName) + if err != nil { + return nil, err + } + + if msg == struct{}{} { + codec, err := goavro.NewCodec(info.Schema) + if err != nil { + return nil, err + } + + native, _, err := codec.NativeFromBinary(payload[5:]) + if err != nil { + return nil, err + } + + return native, nil + } + + _, err = avro.Unmarshal(payload[5:], msg, writer) + return msg, err + +} + +func (s *GenericDeserializer) DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error { + if payload == nil { + return fmt.Errorf("Empty payload") + } + + info, err := s.GetSchema("", payload) + if err != nil { + return err + } + + // recreate the fullyQualifiedName + var data map[string]interface{} + if err := json.Unmarshal([]byte(info.Schema), &data); err != nil { + return err + } + name := data["name"].(string) + namespace := data["namespace"].(string) + fullyQualifiedName := fmt.Sprintf("%s.%s", namespace, name) + + v, ok := subjects[fullyQualifiedName] + if !ok { + return fmt.Errorf("unfound subject declaration") + } + + writer, name, err := s.toType(info) + if err != nil { + return err + } + + _, err = avro.Unmarshal(payload[5:], v, writer) + return err +} + // Deserialize implements deserialization of generic Avro data func (s *GenericDeserializer) Deserialize(topic string, payload []byte) (interface{}, error) { if payload == nil { @@ -101,6 +270,7 @@ func (s *GenericDeserializer) Deserialize(topic string, payload []byte) (interfa if err != nil { return nil, err } + writer, name, err := s.toType(info) if err != nil { return nil, err @@ -113,6 +283,21 @@ func (s *GenericDeserializer) Deserialize(topic string, payload []byte) (interfa if err != nil { return nil, err } + + if msg == struct{}{} { + codec, err := goavro.NewCodec(info.Schema) + if err != nil { + return nil, err + } + + native, _, err := codec.NativeFromBinary(payload[5:]) + if err != nil { + return nil, err + } + + return native, nil + } + _, err = avro.Unmarshal(payload[5:], msg, writer) return msg, err } @@ -149,6 +334,11 @@ func (s *GenericDeserializer) toAvroType(schema schemaregistry.SchemaInfo) (sche return resolveAvroReferences(s.Client, schema, ns) } +func (s *GenericDeserializer) avroMessageFactory(subject string, name string) (interface{}, error) { + + return struct{}{}, nil +} + // From https://stackoverflow.com/questions/42664837/how-to-access-unexported-struct-fields/43918797#43918797 func setPrivateAvroType(t *avro.Type, avroType schema.AvroType) { rt := reflect.ValueOf(t).Elem() diff --git a/schemaregistry/serde/avro/avro_generic_test.go b/schemaregistry/serde/avro/avro_generic_test.go index 86e6e57cd..84bd548a0 100644 --- a/schemaregistry/serde/avro/avro_generic_test.go +++ b/schemaregistry/serde/avro/avro_generic_test.go @@ -18,6 +18,7 @@ package avro import ( "errors" + "fmt" "testing" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" @@ -167,3 +168,308 @@ type GenericLinkedList struct { Value int32 Next *GenericLinkedList } + +const ( + linkedList = "avro.LinkedList" + pizza = "avro.Pizza" + invalidSchema = "invalidSchema" +) + +type LinkedList struct { + Value int +} + +type Pizza struct { + Size string + Toppings []string +} + +type Author struct { + Name string +} + +var ( + inner = LinkedList{ + Value: 100, + } + + obj = Pizza{ + Size: "Extra extra large", + Toppings: []string{"anchovies", "mushrooms"}, + } +) + +func TestAvroGenericSerdeDeserializeRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `map[Value:100]`)) + // access the newobj payload + if obj, ok := newobj.(map[string]interface{}); ok { + if value, ok := obj["Value"].(interface{}); ok { + serde.MaybeFail("deserialization", serde.Expect(value.(int64), int64(100))) + } else { + fmt.Println("Value is not of type int") + } + } + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `map[Size:Extra extra large Toppings:[anchovies mushrooms]]`)) +} + +func RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case linkedList: + return &LinkedList{}, nil + case pizza: + return &Pizza{}, nil + } + return nil, fmt.Errorf("No matching receiver") + } +} + +func RegisterMessageFactoryNoReceiver() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + return nil, fmt.Errorf("No matching receiver") + } +} + +func RegisterMessageFactoryInvalidReceiver() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case pizza: + return &LinkedList{}, nil + case linkedList: + return "", nil + } + return nil, fmt.Errorf("No matching receiver") + } +} + +func TestAvroGenericSerdeDeserializeRecordNameWithHandler(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner, linkedList) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + deser.MessageFactory = RegisterMessageFactory() + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*LinkedList).Value, inner.Value)) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*Pizza).Size, obj.Size)) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*Pizza).Toppings[0], obj.Toppings[0])) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*Pizza).Toppings[1], obj.Toppings[1])) +} + +func TestAvroGenericSerdeDeserializeRecordNameWithHandlerNoReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid receiver + deser.MessageFactory = RegisterMessageFactoryNoReceiver() + + newobj, err := deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "No matching receiver")) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, nil)) +} + +func TestAvroGenericSerdeDeserializeRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid schema + deser.MessageFactory = RegisterMessageFactoryInvalidReceiver() + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, "")) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "destination is not a pointer string")) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserializeInvalidReceiver", err) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `&{0}`)) +} + +func TestAvroGenericSerdeDeserializeIntoRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[linkedList] = &LinkedList{} + receivers[pizza] = &Pizza{} + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(int(receivers[linkedList].(*LinkedList).Value), 100)) + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[pizza].(*Pizza).Toppings[0], obj.Toppings[0])) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[pizza].(*Pizza).Toppings[1], obj.Toppings[1])) +} + +func TestAvroGenericSerdeDeserializeIntoRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[invalidSchema] = &Pizza{} + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) + serde.MaybeFail("deserialization", serde.Expect(receivers[invalidSchema].(*Pizza).Size, "")) +} + +func TestAvroGenericSerdeDeserializeIntoRecordNameWithInvalidReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + aut := Author{ + Name: "aut", + } + bytesAut, err := ser.SerializeRecordName(&aut, "avro.Author") + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[pizza] = &LinkedList{} + receivers[linkedList] = "" + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprint(receivers[pizza]), `&{0}`)) + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "destination is not a pointer string")) + err = deser.DeserializeIntoRecordName(receivers, bytesAut) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) +} + +func TestAvroGenericSerdeRecordNamePayloadMismatchSubject(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewGenericSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + _, err = ser.SerializeRecordName(&obj, "test.Pizza") + serde.MaybeFail("serialization", serde.Expect(err.Error(), "the payload's fullyQualifiedName: 'avro.Pizza' does not match the subject: 'test.Pizza'")) +} diff --git a/schemaregistry/serde/avro/avro_specific.go b/schemaregistry/serde/avro/avro_specific.go index 5304d43c3..5d2abf55a 100644 --- a/schemaregistry/serde/avro/avro_specific.go +++ b/schemaregistry/serde/avro/avro_specific.go @@ -18,8 +18,11 @@ package avro import ( "bytes" + "encoding/json" "fmt" "io" + "reflect" + "strings" "github.com/actgardner/gogen-avro/v10/compiler" "github.com/actgardner/gogen-avro/v10/parser" @@ -28,6 +31,7 @@ import ( "github.com/actgardner/gogen-avro/v10/vm/types" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/linkedin/goavro" ) // SpecificSerializer represents a specific Avro serializer @@ -92,6 +96,102 @@ func (s *SpecificSerializer) Serialize(topic string, msg interface{}) ([]byte, e return payload, nil } +func (s *SpecificSerializer) addFullyQualifiedNameToSchema(avroStr string, msg interface{}) ([]byte, string, error) { + var data map[string]interface{} + if err := json.Unmarshal([]byte(avroStr), &data); err != nil { + fmt.Println("Error unmarshaling JSON:", err) + } + + var fullyQualifiedName string + parts := strings.Split(data["name"].(string), ".") + if len(parts) > 0 { + var namespace string + if len(parts) == 1 { + // avro schema does not define a namespace, use the Go namespace + msgFQNGo := reflect.TypeOf(msg).String() + msgFQNGo = strings.TrimLeft(msgFQNGo, "*") + partsMsg := strings.Split(msgFQNGo, ".") + if len(partsMsg) > 2 { + for i := 0; i < len(partsMsg)-1; i++ { + if i == 0 { + namespace += parts[0] + } else { + namespace += fmt.Sprintf(".%v", parts[i]) + } + } + } else { + namespace = partsMsg[0] + } + } else if len(parts) == 2 { + namespace = parts[0] + } else if len(parts) > 2 { + for i := 0; i < len(parts)-1; i++ { + if i == 0 { + namespace += parts[0] + } else { + namespace += fmt.Sprintf(".%v", parts[i]) + } + } + + } + data["name"] = parts[len(parts)-1] + data["namespace"] = namespace + fullyQualifiedName = fmt.Sprintf("%v.%v", namespace, data["name"]) + } + modifiedJSON, err := json.Marshal(data) + if err != nil { + return nil, fullyQualifiedName, err + } + + return modifiedJSON, fullyQualifiedName, nil +} + +// Serialize implements serialization of generic Avro data +func (s *SpecificSerializer) SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) { + if msg == nil { + return nil, nil + } + + var avroMsg SpecificAvroMessage + switch t := msg.(type) { + case SpecificAvroMessage: + avroMsg = t + default: + return nil, fmt.Errorf("serialization target must be an avro message. Got '%v'", t) + } + + modifiedJSON, msgFQN, err := s.addFullyQualifiedNameToSchema(avroMsg.Schema(), msg) + if err != nil { + fmt.Println("Error marshaling JSON when adding fullyQualifiedName:", err) + } + + if len(subject) > 0 { + if msgFQN != subject[0] { + return nil, fmt.Errorf(`the payload's fullyQualifiedName: '%v' does not match the subject: '%v'`, msgFQN, subject[0]) + } + } + + var id = 0 + info := schemaregistry.SchemaInfo{ + Schema: string(modifiedJSON), + } + + id, err = s.GetID(msgFQN, avroMsg, info) + if err != nil { + return nil, err + } + var buf bytes.Buffer + err = avroMsg.Serialize(&buf) + if err != nil { + return nil, err + } + payload, err := s.WriteBytes(id, buf.Bytes()) + if err != nil { + return nil, err + } + return payload, nil +} + // NewSpecificDeserializer creates an Avro deserializer for Avro-generated objects func NewSpecificDeserializer(client schemaregistry.Client, serdeType serde.Type, conf *DeserializerConfig) (*SpecificDeserializer, error) { s := &SpecificDeserializer{} @@ -99,9 +199,134 @@ func NewSpecificDeserializer(client schemaregistry.Client, serdeType serde.Type, if err != nil { return nil, err } + s.MessageFactory = s.avroMessageFactory return s, nil } +func (s *SpecificDeserializer) DeserializeRecordName(payload []byte) (interface{}, error) { + if payload == nil { + return nil, nil + } + + info, err := s.GetSchema("", payload) + if err != nil { + return nil, err + } + + // recreate the fullyQualifiedName + var data map[string]interface{} + if err := json.Unmarshal([]byte(info.Schema), &data); err != nil { + fmt.Println("Error unmarshaling JSON:", err) + } + name := data["name"].(string) + namespace := data["namespace"].(string) + fullyQualifiedName := fmt.Sprintf("%s.%s", namespace, name) + + writer, err := s.toAvroType(info) + if err != nil { + return nil, err + } + + subject, err := s.SubjectNameStrategy(fullyQualifiedName, s.SerdeType, info) + if err != nil { + return nil, err + } + + msg, err := s.MessageFactory(subject, fullyQualifiedName) + if err != nil { + return nil, err + } + + if msg == struct{}{} { + codec, err := goavro.NewCodec(info.Schema) + if err != nil { + return nil, err + } + + native, _, err := codec.NativeFromBinary(payload[5:]) + if err != nil { + return nil, err + } + + return native, nil + } + + var avroMsg SpecificAvroMessage + switch t := msg.(type) { + case SpecificAvroMessage: + avroMsg = t + default: + return nil, fmt.Errorf("deserialization target must be an avro message. Got '%v'", t) + } + reader, err := s.toAvroType(schemaregistry.SchemaInfo{Schema: avroMsg.Schema()}) + if err != nil { + return nil, err + } + deser, err := compiler.Compile(writer, reader) + if err != nil { + return nil, err + } + r := bytes.NewReader(payload[5:]) + + if err = vm.Eval(r, deser, avroMsg); err != nil { + return nil, err + } + return avroMsg, nil +} + +func (s *SpecificDeserializer) DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error { + if payload == nil { + return nil + } + + info, err := s.GetSchema("", payload) + if err != nil { + return err + } + + // recreate the fullyQualifiedName + var data map[string]interface{} + if err := json.Unmarshal([]byte(info.Schema), &data); err != nil { + return err + } + name := data["name"].(string) + namespace := data["namespace"].(string) + fullyQualifiedName := fmt.Sprintf("%s.%s", namespace, name) + + v, ok := subjects[fullyQualifiedName] + if !ok { + return fmt.Errorf("unfound subject declaration") + } + + writer, err := s.toAvroType(info) + if err != nil { + return err + } + + var avroMsg SpecificAvroMessage + switch t := v.(type) { + case SpecificAvroMessage: + avroMsg = t + default: + return fmt.Errorf("deserialization target must be an avro message. Got '%v'", t) + } + reader, err := s.toAvroType(schemaregistry.SchemaInfo{Schema: avroMsg.Schema()}) + if err != nil { + return err + } + deser, err := compiler.Compile(writer, reader) + if err != nil { + return err + } + r := bytes.NewReader(payload[5:]) + + if err = vm.Eval(r, deser, avroMsg); err != nil { + return err + } + + return nil +} + // Deserialize implements deserialization of specific Avro data func (s *SpecificDeserializer) Deserialize(topic string, payload []byte) (interface{}, error) { if payload == nil { @@ -123,6 +348,21 @@ func (s *SpecificDeserializer) Deserialize(topic string, payload []byte) (interf if err != nil { return nil, err } + + if msg == struct{}{} { + codec, err := goavro.NewCodec(info.Schema) + if err != nil { + return nil, err + } + + native, _, err := codec.NativeFromBinary(payload[5:]) + if err != nil { + return nil, err + } + + return native, nil + } + var avroMsg SpecificAvroMessage switch t := msg.(type) { case SpecificAvroMessage: @@ -182,3 +422,7 @@ func (s *SpecificDeserializer) toAvroType(schema schemaregistry.SchemaInfo) (sch ns := parser.NewNamespace(false) return resolveAvroReferences(s.Client, schema, ns) } + +func (s *SpecificDeserializer) avroMessageFactory(subject string, name string) (interface{}, error) { + return struct{}{}, nil +} diff --git a/schemaregistry/serde/avro/avro_specific_test.go b/schemaregistry/serde/avro/avro_specific_test.go index 4335e4f22..a5ec184b0 100644 --- a/schemaregistry/serde/avro/avro_specific_test.go +++ b/schemaregistry/serde/avro/avro_specific_test.go @@ -18,11 +18,13 @@ package avro import ( "errors" + "fmt" "testing" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/test" + rn "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/test/avro/recordname" ) func testMessageFactorySpecific(subject string, name string) (interface{}, error) { @@ -153,3 +155,292 @@ func TestSpecificAvroSerdeWithCycle(t *testing.T) { msg, err := deser.Deserialize("topic1", bytes) serde.MaybeFail("deserialization", err, serde.Expect(msg, &obj)) } + +// as the avro schema does not define namespace +// use the Go namespace recordname.DemoSchema +var exampleNamespace = "recordname.DemoSchema" +var example = &rn.DemoSchema{ + StringField: "demoSchema from example", +} + +// Declare mapBP as a global variable +var mapBP = map[string]rn.BasicPerson{ + "first": { + Number: &rn.UnionLongNull{Long: 1}, + Name: rn.UnionString{String: "Flo"}, + }, + "second": { + Number: &rn.UnionLongNull{Long: 2}, + Name: rn.UnionString{String: "Paul"}, + }, +} + +// namespace is python.test.advanced.advanced +var complexDTNamespace = "python.test.advanced.advanced" +var complexDT = &rn.Advanced{ + Number: &rn.UnionLongNull{Long: 10}, + Name: rn.UnionString{String: "Ari"}, + Friends: mapBP, +} + +func TestAvroSpecificSerdeDeserializeRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(example, exampleNamespace) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(complexDT, complexDTNamespace) + serde.MaybeFail("serialization", err) + + deser, err := NewSpecificDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `map[BoolField:false BytesField:[] DoubleField:0 IntField:0 StringField:demoSchema from example]`)) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `map[family:map[] friends:map[first:map[name:map[string:Flo] number:map[long:1]] second:map[name:map[string:Paul] number:map[long:2]]] name:map[string:Ari] number:map[long:10]]`)) +} + +func RegisterMessageFactorySpecific() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case exampleNamespace: + return &rn.DemoSchema{}, nil + case complexDTNamespace: + return &rn.Advanced{}, nil + } + return nil, fmt.Errorf("No matching receiver") + } +} + +func RegisterMessageFactoryNoReceiverSpecific() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + return nil, fmt.Errorf("No matching receiver") + } +} + +func RegisterMessageFactoryInvalidReceiverSpecific() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case pizza: + return &LinkedList{}, nil + case linkedList: + return "", nil + } + return nil, fmt.Errorf("No matching receiver") + } +} + +func TestAvroSpecificSerdeDeserializeRecordNameWithHandler(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(example) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(complexDT) + serde.MaybeFail("serialization", err) + + deser, err := NewSpecificDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + deser.MessageFactory = RegisterMessageFactorySpecific() + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*rn.DemoSchema).StringField, example.StringField)) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*rn.Advanced).Number.Long, complexDT.Number.Long)) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*rn.Advanced).Name.String, complexDT.Name.String)) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*rn.Advanced).Friends["first"].Name.String, complexDT.Friends["first"].Name.String)) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*rn.Advanced).Friends["second"].Number.Long, complexDT.Friends["second"].Number.Long)) +} + +func TestAvroSpecificSerdeDeserializeRecordNameWithHandlerNoReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(example) + serde.MaybeFail("serialization", err) + + deser, err := NewSpecificDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid receiver + deser.MessageFactory = RegisterMessageFactoryNoReceiverSpecific() + + newobj, err := deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "No matching receiver")) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, nil)) +} + +func TestAvroSpecificSerdeDeserializeRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(example) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(complexDT) + serde.MaybeFail("serialization", err) + + deser, err := NewSpecificDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid schema + deser.MessageFactory = RegisterMessageFactoryInvalidReceiverSpecific() + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "No matching receiver")) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "No matching receiver")) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, nil)) +} + +func TestAvroSpecificSerdeDeserializeIntoRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(example) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(complexDT) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[exampleNamespace] = &rn.DemoSchema{} + receivers[complexDTNamespace] = &rn.Advanced{} + + deser, err := NewSpecificDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[exampleNamespace].(*rn.DemoSchema).StringField, example.StringField)) + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[complexDTNamespace].(*rn.Advanced).Number.Long, complexDT.Number.Long)) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[complexDTNamespace].(*rn.Advanced).Name.String, complexDT.Name.String)) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[complexDTNamespace].(*rn.Advanced).Friends["first"].Name.String, complexDT.Friends["first"].Name.String)) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[complexDTNamespace].(*rn.Advanced).Friends["second"].Number.Long, complexDT.Friends["second"].Number.Long)) +} + +func TestAvroSpecificSerdeDeserializeIntoRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(example) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[invalidSchema] = &rn.DemoSchema{} + + deser, err := NewSpecificDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) + serde.MaybeFail("deserialization", serde.Expect(fmt.Sprintf("%v", receivers[invalidSchema]), `&{0 0 false []}`)) +} + +func TestAvroSpecificSerdeDeserializeIntoRecordNameWithInvalidReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(example) + serde.MaybeFail("serialization", err) + + bytesInner, err := ser.SerializeRecordName(complexDT) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[exampleNamespace] = &rn.Advanced{} + receivers[complexDTNamespace] = "" + + deser, err := NewGenericDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", receivers[exampleNamespace]), `&{ { 0} map[] map[]}`)) + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "destination is not a pointer string")) + serde.MaybeFail("deserialization", serde.Expect(receivers[complexDTNamespace], "")) +} + +func TestAvroSpecificSerdeRecordNamePayloadMismatchSubject(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSpecificSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + _, err = ser.SerializeRecordName(example, "test.Pizza") + serde.MaybeFail("serialization", serde.Expect(err.Error(), "the payload's fullyQualifiedName: 'recordname.DemoSchema' does not match the subject: 'test.Pizza'")) +} diff --git a/schemaregistry/serde/config.go b/schemaregistry/serde/config.go index e70a2ee89..172013e18 100644 --- a/schemaregistry/serde/config.go +++ b/schemaregistry/serde/config.go @@ -16,6 +16,10 @@ package serde +const ( + topicRecordNameStrategy = "topicRecordNameStrategy" +) + // SerializerConfig is used to pass multiple configuration options to the serializers. type SerializerConfig struct { // AutoRegisterSchemas determines whether to automatically register schemas during serialization @@ -26,6 +30,8 @@ type SerializerConfig struct { UseLatestVersion bool // NormalizeSchemas determines whether to normalize schemas during serialization NormalizeSchemas bool + + SubjectNameStrategy string } // NewSerializerConfig returns a new configuration instance with sane defaults. @@ -40,6 +46,14 @@ func NewSerializerConfig() *SerializerConfig { return c } +func NewSerializerConfigTopRecNameStrat() *SerializerConfig { + c := NewSerializerConfig() + + c.SubjectNameStrategy = topicRecordNameStrategy + + return c +} + // DeserializerConfig is used to pass multiple configuration options to the deserializers. type DeserializerConfig struct { } diff --git a/schemaregistry/serde/jsonschema/json_schema.go b/schemaregistry/serde/jsonschema/json_schema.go index 4d60e6a77..a2188877d 100644 --- a/schemaregistry/serde/jsonschema/json_schema.go +++ b/schemaregistry/serde/jsonschema/json_schema.go @@ -18,13 +18,14 @@ package jsonschema import ( "encoding/json" - "io" - "strings" - + "fmt" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/invopop/jsonschema" jsonschema2 "github.com/santhosh-tekuri/jsonschema/v5" + "io" + "reflect" + "strings" ) // Serializer represents a JSON Schema serializer @@ -59,15 +60,19 @@ func (s *Serializer) Serialize(topic string, msg interface{}) ([]byte, error) { if msg == nil { return nil, nil } + jschema := jsonschema.Reflect(msg) + raw, err := json.Marshal(jschema) if err != nil { return nil, err } + info := schemaregistry.SchemaInfo{ Schema: string(raw), SchemaType: "JSON", } + id, err := s.GetID(topic, msg, info) if err != nil { return nil, err @@ -97,6 +102,103 @@ func (s *Serializer) Serialize(topic string, msg interface{}) ([]byte, error) { return nil, err } return payload, nil + +} + +func (s *Serializer) addFullyQualifiedNameToSchema(jsonBytes []byte, msgFQN string) ([]byte, error) { + var data map[string]interface{} + if err := json.Unmarshal(jsonBytes, &data); err != nil { + return nil, err + } + + parts := strings.Split(msgFQN, ".") + if len(parts) > 0 { + var namespace string + var name string + if len(parts) == 2 { + namespace = parts[0] + name = parts[1] + } else if len(parts) > 2 { + for i := 0; i < len(parts)-1; i++ { + if i == 0 { + namespace += parts[0] + } else { + namespace += fmt.Sprintf(".%v", parts[i]) + } + } + name = parts[len(parts)-1] + + } + data["name"] = name + data["namespace"] = namespace + } + return json.Marshal(data) +} + +// SerializeRecordName implements serialization of generic data to JSON +func (s *Serializer) SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) { + if msg == nil { + return nil, nil + } + + // get the fully qualified name + msgFQN := reflect.TypeOf(msg).String() + msgFQN = strings.TrimLeft(msgFQN, "*") // in case + + if len(subject) > 0 { + if msgFQN != subject[0] { + return nil, fmt.Errorf(`the payload's fullyQualifiedName: '%v' does not match the subject: '%v'`, msgFQN, subject[0]) + } + } + + jschema := jsonschema.Reflect(msg) + + // Marshal the schema into a JSON []byte + schemaBytes, err := json.Marshal(jschema) + if err != nil { + return nil, err + } + + raw, err := s.addFullyQualifiedNameToSchema(schemaBytes, msgFQN) + if err != nil { + fmt.Println("Error marshaling JSON when adding fullyQualifiedName:", err) + } + + info := schemaregistry.SchemaInfo{ + Schema: string(raw), + SchemaType: "JSON", + } + + id, err := s.GetID(msgFQN, msg, info) + if err != nil { + return nil, err + } + raw, err = json.Marshal(msg) + if err != nil { + return nil, err + } + if s.validate { + // Need to unmarshal to pure interface + var obj interface{} + err = json.Unmarshal(raw, &obj) + if err != nil { + return nil, err + } + jschema, err := toJSONSchema(s.Client, info) + if err != nil { + return nil, err + } + err = jschema.Validate(obj) + if err != nil { + return nil, err + } + } + payload, err := s.WriteBytes(id, raw) + if err != nil { + return nil, err + } + return payload, nil + } // NewDeserializer creates a JSON deserializer for generic objects @@ -108,6 +210,7 @@ func NewDeserializer(client schemaregistry.Client, serdeType serde.Type, conf *D if err != nil { return nil, err } + s.MessageFactory = s.jsonMessageFactory return s, nil } @@ -120,6 +223,7 @@ func (s *Deserializer) Deserialize(topic string, payload []byte) (interface{}, e if err != nil { return nil, err } + if s.validate { // Need to unmarshal to pure interface var obj interface{} @@ -140,10 +244,12 @@ func (s *Deserializer) Deserialize(topic string, payload []byte) (interface{}, e if err != nil { return nil, err } + msg, err := s.MessageFactory(subject, "") if err != nil { return nil, err } + err = json.Unmarshal(payload[5:], msg) if err != nil { return nil, err @@ -151,6 +257,139 @@ func (s *Deserializer) Deserialize(topic string, payload []byte) (interface{}, e return msg, nil } +func (s *Deserializer) deserializeStringField(bytes []byte, fieldName string) (string, error) { + var fieldNameBytes []byte + var fieldValueBytes []byte + fieldNameLen := 0 + readingFieldName := true + + for _, b := range bytes { + if readingFieldName { + if fieldNameLen == 0 { + // The first byte of the field name indicates its length + fieldNameLen = int(b) + } else { + // Accumulate bytes for the field name + fieldNameBytes = append(fieldNameBytes, b) + if len(fieldNameBytes) == fieldNameLen { + readingFieldName = false + } + } + } else { + // Accumulate bytes for the field value + fieldValueBytes = append(fieldValueBytes, b) + } + } + + if fieldName != string(fieldNameBytes) { + return "", fmt.Errorf("field not found: %s", fieldName) + } + + return string(fieldValueBytes), nil +} + +// DeserializeRecordName deserialise bytes +func (s *Deserializer) DeserializeRecordName(payload []byte) (interface{}, error) { + if payload == nil { + return nil, nil + } + + info, err := s.GetSchema("", payload) + if err != nil { + return nil, err + } + + // recreate the fullyQualifiedName + var data map[string]interface{} + if err := json.Unmarshal([]byte(info.Schema), &data); err != nil { + fmt.Println("Error unmarshaling JSON:", err) + } + name := data["name"].(string) + namespace := data["namespace"].(string) + fullyQualifiedName := fmt.Sprintf("%s.%s", namespace, name) + + if s.validate { + // Need to unmarshal to pure interface + var obj interface{} + err = json.Unmarshal(payload[5:], &obj) + if err != nil { + return nil, err + } + jschema, err := toJSONSchema(s.Client, info) + if err != nil { + return nil, err + } + err = jschema.Validate(obj) + if err != nil { + return nil, err + } + } + + subject, err := s.SubjectNameStrategy(fullyQualifiedName, s.SerdeType, info) + if err != nil { + return nil, err + } + + msg, err := s.MessageFactory(subject, fullyQualifiedName) + if err != nil { + return nil, err + } + + err = json.Unmarshal(payload[5:], msg) + if err != nil { + return nil, err + } + return msg, nil +} + +// DeserializeIntoRecordName deserialize bytes into the map interface{} +func (s *Deserializer) DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error { + if payload == nil { + return fmt.Errorf("Empty payload") + } + + info, err := s.GetSchema("", payload) + if err != nil { + return err + } + + // recreate the fullyQualifiedName + var data map[string]interface{} + if err := json.Unmarshal([]byte(info.Schema), &data); err != nil { + fmt.Println("Error unmarshaling JSON:", err) + } + fullyQualifiedName := fmt.Sprintf("%s.%s", data["namespace"].(string), data["name"].(string)) + + v, ok := subjects[fullyQualifiedName] + if !ok { + return fmt.Errorf("unfound subject declaration") + } + + if s.validate { + // Need to unmarshal to pure interface + var obj interface{} + err = json.Unmarshal(payload[5:], &obj) + if err != nil { + return err + } + jschema, err := toJSONSchema(s.Client, info) + if err != nil { + return err + } + err = jschema.Validate(obj) + if err != nil { + return err + } + } + + err = json.Unmarshal(payload[5:], v) + if err != nil { + return err + } + return nil + +} + // DeserializeInto implements deserialization of generic data from JSON to the given object func (s *Deserializer) DeserializeInto(topic string, payload []byte, msg interface{}) error { if payload == nil { @@ -199,3 +438,8 @@ func toJSONSchema(c schemaregistry.Client, schema schemaregistry.SchemaInfo) (*j } return compiler.Compile(url) } + +func (s *Deserializer) jsonMessageFactory(subject string, name string) (interface{}, error) { + var msg map[string]interface{} + return &msg, nil +} diff --git a/schemaregistry/serde/jsonschema/json_schema_test.go b/schemaregistry/serde/jsonschema/json_schema_test.go index 3f32a8b5c..b0ff1ce24 100644 --- a/schemaregistry/serde/jsonschema/json_schema_test.go +++ b/schemaregistry/serde/jsonschema/json_schema_test.go @@ -17,6 +17,7 @@ package jsonschema import ( + "fmt" "testing" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" @@ -105,3 +106,315 @@ type JSONLinkedList struct { Value int32 Next *JSONLinkedList } + +const ( + linkedList = "jsonschema.LinkedList" + pizza = "jsonschema.Pizza" + invalidSchema = "invalidSchema" +) + +type LinkedList struct { + Value int +} + +type Pizza struct { + Size string + Toppings []string +} + +type Author struct { + Name string +} + +var ( + inner = LinkedList{ + Value: 100, + } + + obj = Pizza{ + Size: "Extra extra large", + Toppings: []string{"anchovies", "mushrooms"}, + } +) + +func TestJSONSerdeDeserializeRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `&map[Value:100]`)) + // access the newobj payload + if objPtr, ok := newobj.(*map[string]interface{}); ok { + // objPtr is now a pointer to a map[string]interface{} + if objPtr != nil { + // Dereference the pointer to access the map + obj := *objPtr + if value, ok := obj["Value"].(interface{}); ok { + serde.MaybeFail("deserialization", serde.Expect(value.(float64), float64(100))) + } else { + fmt.Println("Value is not of type int") + } + } else { + fmt.Println("objPtr is nil") + } + } + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `&map[Size:Extra extra large Toppings:[anchovies mushrooms]]`)) +} + +func RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case linkedList: + return &LinkedList{}, nil + case pizza: + return &Pizza{}, nil + } + return nil, fmt.Errorf("No matching receiver") + } +} + +func RegisterMessageFactoryNoReceiver() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + return nil, fmt.Errorf("No matching receiver") + } +} + +func RegisterMessageFactoryInvalidReceiver() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case pizza: + return &LinkedList{}, nil + case linkedList: + return "", nil + } + return nil, fmt.Errorf("No matching receiver") + } +} + +func TestJSONSerdeDeserializeRecordNameWithHandler(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner, linkedList) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + deser.MessageFactory = RegisterMessageFactory() + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*LinkedList).Value, inner.Value)) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*Pizza).Size, obj.Size)) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*Pizza).Toppings[0], obj.Toppings[0])) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*Pizza).Toppings[1], obj.Toppings[1])) +} + +func TestJSONSerdeDeserializeRecordNameWithHandlerNoReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid receiver + deser.MessageFactory = RegisterMessageFactoryNoReceiver() + + newobj, err := deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "No matching receiver")) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, nil)) +} + +func TestJSONSerdeDeserializeRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid schema + deser.MessageFactory = RegisterMessageFactoryInvalidReceiver() + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, nil)) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "json: Unmarshal(non-pointer string)")) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserializeInvalidReceiver", err) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", newobj), `&{0}`)) +} + +func TestJSONSerdeDeserializeIntoRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[linkedList] = &LinkedList{} + receivers[pizza] = &Pizza{} + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(int(receivers[linkedList].(*LinkedList).Value), 100)) + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[pizza].(*Pizza).Toppings[0], obj.Toppings[0])) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[pizza].(*Pizza).Toppings[1], obj.Toppings[1])) +} + +func TestJSONSerdeDeserializeIntoRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[invalidSchema] = &Pizza{} + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) + serde.MaybeFail("deserialization", serde.Expect(receivers[invalidSchema].(*Pizza).Size, "")) +} + +func TestJSONSerdeDeserializeIntoRecordNameWithInvalidReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + bytesInner, err := ser.SerializeRecordName(&inner, linkedList) + serde.MaybeFail("serialization", err) + + aut := Author{ + Name: "aut", + } + bytesAut, err := ser.SerializeRecordName(&aut) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[pizza] = &LinkedList{} + receivers[linkedList] = "" + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(fmt.Sprintf("%v", receivers[pizza]), `&{0}`)) + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "json: Unmarshal(non-pointer string)")) + err = deser.DeserializeIntoRecordName(receivers, bytesAut) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) +} + +func TestJSONSerdeRecordNamePayloadUnmatchSubject(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + _, err = ser.SerializeRecordName(&obj, "test.Pizza") + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "the payload's fullyQualifiedName: 'jsonschema.Pizza' does not match the subject: 'test.Pizza'")) +} diff --git a/schemaregistry/serde/protobuf/protobuf.go b/schemaregistry/serde/protobuf/protobuf.go index d74fcc966..f46e9d69d 100644 --- a/schemaregistry/serde/protobuf/protobuf.go +++ b/schemaregistry/serde/protobuf/protobuf.go @@ -21,6 +21,7 @@ import ( "fmt" "io" "log" + "reflect" "strings" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" @@ -150,8 +151,66 @@ func (s *Deserializer) ConfigureDeserializer(client schemaregistry.Client, serde return nil } +// SerializeRecordName serialize a protbuf data, here set to match the interface +func (s *Serializer) SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) { + if msg == nil { + return nil, nil + } + var protoMsg proto.Message + switch t := msg.(type) { + case proto.Message: + protoMsg = t + default: + return nil, fmt.Errorf("serialization target must be a protobuf message. Got '%v'", t) + } + + messageDescriptor := protoMsg.ProtoReflect().Descriptor() + + fullName := string(messageDescriptor.FullName()) + + if len(subject) > 0 { + if fullName != subject[0] { + return nil, fmt.Errorf(`the payload's fullyQualifiedName: '%v' does not match the subject: '%v'`, fullName, subject[0]) + } + } + + autoRegister := s.Conf.AutoRegisterSchemas + normalize := s.Conf.NormalizeSchemas + fileDesc, deps, err := s.toProtobufSchema(protoMsg) + if err != nil { + return nil, err + } + metadata, err := s.resolveDependencies(fileDesc, deps, "", autoRegister, normalize) + if err != nil { + return nil, err + } + info := schemaregistry.SchemaInfo{ + Schema: metadata.Schema, + SchemaType: metadata.SchemaType, + References: metadata.References, + } + + id, err := s.GetID(fullName, protoMsg, info) + if err != nil { + return nil, err + } + msgIndexBytes := toMessageIndexBytes(protoMsg.ProtoReflect().Descriptor()) + msgBytes, err := proto.Marshal(protoMsg) + if err != nil { + return nil, err + } + + payload, err := s.WriteBytes(id, append(msgIndexBytes, msgBytes...)) + if err != nil { + return nil, err + } + return payload, nil + +} + // Serialize implements serialization of Protobuf data func (s *Serializer) Serialize(topic string, msg interface{}) ([]byte, error) { + if msg == nil { return nil, nil } @@ -162,6 +221,7 @@ func (s *Serializer) Serialize(topic string, msg interface{}) ([]byte, error) { default: return nil, fmt.Errorf("serialization target must be a protobuf message. Got '%v'", t) } + autoRegister := s.Conf.AutoRegisterSchemas normalize := s.Conf.NormalizeSchemas fileDesc, deps, err := s.toProtobufSchema(protoMsg) @@ -177,6 +237,7 @@ func (s *Serializer) Serialize(topic string, msg interface{}) ([]byte, error) { SchemaType: metadata.SchemaType, References: metadata.References, } + id, err := s.GetID(topic, protoMsg, info) if err != nil { return nil, err @@ -186,6 +247,7 @@ func (s *Serializer) Serialize(topic string, msg interface{}) ([]byte, error) { if err != nil { return nil, err } + payload, err := s.WriteBytes(id, append(msgIndexBytes, msgBytes...)) if err != nil { return nil, err @@ -345,32 +407,51 @@ func NewDeserializer(client schemaregistry.Client, serdeType serde.Type, conf *D return s, nil } -// Deserialize implements deserialization of Protobuf data +// Deserialize deserialize events with subjects register with the TopicNameStrategy func (s *Deserializer) Deserialize(topic string, payload []byte) (interface{}, error) { - if payload == nil { - return nil, nil - } - info, err := s.GetSchema(topic, payload) + bytesRead, messageDesc, info, err := s.setMessageDescriptor(topic, payload) if err != nil { return nil, err } - fd, err := s.toFileDesc(info) + + subject, err := s.SubjectNameStrategy(topic, s.SerdeType, info) if err != nil { return nil, err } - bytesRead, msgIndexes, err := readMessageIndexes(payload[5:]) + msg, err := s.MessageFactory(subject, messageDesc.GetFullyQualifiedName()) if err != nil { return nil, err } - messageDesc, err := toMessageDesc(fd, msgIndexes) + var protoMsg proto.Message + switch t := msg.(type) { + case proto.Message: + protoMsg = t + default: + return nil, fmt.Errorf("deserialization target must be a protobuf message. Got '%v'", t) + } + err = proto.Unmarshal(payload[5+bytesRead:], protoMsg) + return protoMsg, err +} + +// DeserializeRecordName deserialize events with subjects register with the RecordNameStrategy +func (s *Deserializer) DeserializeRecordName(payload []byte) (interface{}, error) { + if payload == nil { + return nil, nil + } + + bytesRead, messageDesc, info, err := s.setMessageDescriptor("", payload) if err != nil { return nil, err } - subject, err := s.SubjectNameStrategy(topic, s.SerdeType, info) + + msgFullyQlfName := messageDesc.GetFullyQualifiedName() + + subject, err := s.SubjectNameStrategy(msgFullyQlfName, s.SerdeType, info) if err != nil { return nil, err } - msg, err := s.MessageFactory(subject, messageDesc.GetFullyQualifiedName()) + + msg, err := s.MessageFactory(subject, msgFullyQlfName) if err != nil { return nil, err } @@ -385,11 +466,41 @@ func (s *Deserializer) Deserialize(topic string, payload []byte) (interface{}, e return protoMsg, err } +func (s *Deserializer) setMessageDescriptor(subject string, payload []byte) (int, *desc.MessageDescriptor, schemaregistry.SchemaInfo, error) { + + var info = schemaregistry.SchemaInfo{} + info, err := s.GetSchema(subject, payload) + if err != nil { + return 0, nil, info, err + } + + fd, err := s.toFileDesc(info) + if err != nil { + return 0, nil, info, err + } + bytesRead, msgIndexes, err := readMessageIndexes(payload[5:]) + if err != nil { + return 0, nil, info, err + } + messageDesc, err := toMessageDesc(fd, msgIndexes) + if err != nil { + return 0, nil, info, err + } + + return bytesRead, messageDesc, info, nil +} + // DeserializeInto implements deserialization of Protobuf data to the given object func (s *Deserializer) DeserializeInto(topic string, payload []byte, msg interface{}) error { if payload == nil { return nil } + + bytesRead, messageDesc, _, err := s.setMessageDescriptor(topic, payload) + if err != nil { + return err + } + var protoMsg proto.Message switch t := msg.(type) { case proto.Message: @@ -397,11 +508,45 @@ func (s *Deserializer) DeserializeInto(topic string, payload []byte, msg interfa default: return fmt.Errorf("deserialization target must be a protobuf message. Got '%v'", t) } - bytesRead, _, err := readMessageIndexes(payload[5:]) + + protoInfo := reflect.TypeOf(protoMsg).Elem() + if protoInfo.Name() != messageDesc.GetName() { + return fmt.Errorf("recipient proto object differs from incoming events") + } + + return proto.Unmarshal(payload[5+bytesRead:], protoMsg) +} + +// DeserializeIntoRecordName deserialize bytes with recordNameStrategy to some given objects +func (s *Deserializer) DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error { + if payload == nil { + return nil + } + + bytesRead, messageDesc, _, err := s.setMessageDescriptor("", payload) if err != nil { return err } - return proto.Unmarshal(payload[5+bytesRead:], protoMsg) + + msgFullyQlfName := messageDesc.GetFullyQualifiedName() + if msg, ok := subjects[msgFullyQlfName]; ok { + var protoMsg proto.Message + switch t := msg.(type) { + case proto.Message: + protoMsg = t + default: + return fmt.Errorf("deserialization target must be a protobuf message. Got '%v'", t) + } + + protoInfo := reflect.TypeOf(protoMsg).Elem() + if protoInfo.Name() != messageDesc.GetName() { + return fmt.Errorf("recipient proto object differs from incoming events") + } + + return proto.Unmarshal(payload[5+bytesRead:], protoMsg) + } else { + return fmt.Errorf("unfound subject declaration") + } } func (s *Deserializer) toFileDesc(info schemaregistry.SchemaInfo) (*desc.FileDescriptor, error) { diff --git a/schemaregistry/serde/protobuf/protobuf_test.go b/schemaregistry/serde/protobuf/protobuf_test.go index 3d8dcdfd4..b67b74099 100644 --- a/schemaregistry/serde/protobuf/protobuf_test.go +++ b/schemaregistry/serde/protobuf/protobuf_test.go @@ -17,11 +17,13 @@ package protobuf import ( + "errors" "testing" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/test" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/test/proto/recordname" "google.golang.org/protobuf/proto" ) @@ -184,3 +186,344 @@ func TestProtobufSerdeWithCycle(t *testing.T) { newobj, err := deser.Deserialize("topic1", bytes) serde.MaybeFail("deserialization", err, serde.Expect(newobj.(proto.Message).ProtoReflect(), obj.ProtoReflect())) } + +// Test strategies +func TestProtobufSerdeDeserializeInto(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + obj := test.Pizza{ + Size: "Extra extra large", + Toppings: []string{"anchovies", "mushrooms"}, + } + + topic := "topic" + + bytesInner, err := ser.Serialize(topic, &obj) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + innerReceiver := &test.LinkedList{} + + err = deser.DeserializeInto(topic, bytesInner, innerReceiver) + serde.MaybeFail("deserializeRecordNameValidSchema", serde.Expect(err.Error(), "recipient proto object differs from incoming events")) +} + +const ( + linkedList = "recordname.LinkedList" + pizza = "recordname.Pizza" + invalidSchema = "invalidSchema" +) + +var ( + inner = recordname.LinkedList{ + Value: 100, + } + + obj = recordname.Pizza{ + Size: "Extra extra large", + Toppings: []string{"anchovies", "mushrooms"}, + } +) + +func TestProtobufSerdeDeserializeRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner, linkedList) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(proto.Message).ProtoReflect(), inner.ProtoReflect())) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(proto.Message).ProtoReflect(), obj.ProtoReflect())) +} + +func RegisterMessageFactory() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case linkedList: + return &test.LinkedList{}, nil + case pizza: + return &test.Pizza{}, nil + } + return nil, errors.New("No matching receiver") + } +} + +func RegisterMessageFactoryNoReceiver() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + return nil, errors.New("No matching receiver") + } +} + +func RegisterMessageFactoryInvalidReceiver() func(string, string) (interface{}, error) { + return func(subject string, name string) (interface{}, error) { + switch name { + case pizza: + return &test.LinkedList{}, nil + case linkedList: + return "", nil + } + return nil, errors.New("No matching receiver") + } +} + +func TestProtobufSerdeDeserializeRecordNameWithHandler(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner, linkedList) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + deser.MessageFactory = RegisterMessageFactory() + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + newobj, err := deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*test.LinkedList).Value, inner.Value)) + + newobj, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*test.Pizza).Size, obj.Size)) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*test.Pizza).Toppings[0], obj.Toppings[0])) + serde.MaybeFail("deserialization", err, serde.Expect(newobj.(*test.Pizza).Toppings[1], obj.Toppings[1])) +} + +func TestProtobufSerdeDeserializeRecordNameWithHandlerNoReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid receiver + deser.MessageFactory = RegisterMessageFactoryNoReceiver() + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + newobj, err := deser.DeserializeRecordName(bytesObj) + + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "No matching receiver")) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(newobj, nil)) +} + +func TestProtobufSerdeDeserializeRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + // register invalid schema + deser.MessageFactory = RegisterMessageFactoryInvalidReceiver() + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + _, err = deser.DeserializeRecordName(bytesObj) + serde.MaybeFail("deserialization", err) + + _, err = deser.DeserializeRecordName(bytesInner) + serde.MaybeFail("deserializeInvalidReceiver", serde.Expect(err.Error(), "deserialization target must be a protobuf message. Got ''")) +} + +func TestProtobufSerdeDeserializeIntoRecordName(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesInner, err := ser.SerializeRecordName(&inner, linkedList) + serde.MaybeFail("serialization", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[linkedList] = &test.LinkedList{} + receivers[pizza] = &test.Pizza{} + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", err, serde.Expect(int(receivers[linkedList].(*test.LinkedList).Value), 100)) + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[pizza].(*test.Pizza).Toppings[0], obj.Toppings[0])) + serde.MaybeFail("deserialization", err, serde.Expect(receivers[pizza].(*test.Pizza).Toppings[1], obj.Toppings[1])) +} + +func TestProtobufSerdeDeserializeIntoRecordNameWithInvalidSchema(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj) + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[invalidSchema] = &test.Pizza{} + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) + serde.MaybeFail("deserialization", serde.Expect(receivers[invalidSchema].(*test.Pizza).Size, "")) +} + +func TestProtobufSerdeDeserializeIntoRecordNameWithInvalidReceiver(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + bytesObj, err := ser.SerializeRecordName(&obj, pizza) + serde.MaybeFail("serialization", err) + + bytesInner, err := ser.SerializeRecordName(&inner) + serde.MaybeFail("serialization", err) + + aut := recordname.Author{ + Name: "aut", + } + bytesAut, err := ser.SerializeRecordName(&aut, "recordname.Author") + serde.MaybeFail("serialization", err) + + var receivers = make(map[string]interface{}) + receivers[pizza] = &test.LinkedList{} + receivers[linkedList] = "" + + deser, err := NewDeserializer(client, serde.ValueSerde, NewDeserializerConfig()) + + serde.MaybeFail("Deserializer configuration", err) + deser.Client = ser.Client + + deser.ProtoRegistry.RegisterMessage(inner.ProtoReflect().Type()) + deser.ProtoRegistry.RegisterMessage(obj.ProtoReflect().Type()) + + err = deser.DeserializeIntoRecordName(receivers, bytesObj) + // serde.MaybeFail("deserialization", serde.Expect(err.Error(), "deserialization target must be a protobuf message")) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "recipient proto object differs from incoming events")) + + err = deser.DeserializeIntoRecordName(receivers, bytesInner) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "deserialization target must be a protobuf message. Got ''")) + + err = deser.DeserializeIntoRecordName(receivers, bytesAut) + serde.MaybeFail("deserialization", serde.Expect(err.Error(), "unfound subject declaration")) +} + +func TestProtobufSerdeSubjectMismatchPayload(t *testing.T) { + serde.MaybeFail = serde.InitFailFunc(t) + var err error + conf := schemaregistry.NewConfig("mock://") + + client, err := schemaregistry.NewClient(conf) + serde.MaybeFail("Schema Registry configuration", err) + + ser, err := NewSerializer(client, serde.ValueSerde, NewSerializerConfig()) + serde.MaybeFail("Serializer configuration", err) + + _, err = ser.SerializeRecordName(&obj, "test.Pizza") + serde.MaybeFail("serialization", serde.Expect(err.Error(), "the payload's fullyQualifiedName: 'recordname.Pizza' does not match the subject: 'test.Pizza'")) +} diff --git a/schemaregistry/serde/serde.go b/schemaregistry/serde/serde.go index a57c4db5f..5b10036c0 100644 --- a/schemaregistry/serde/serde.go +++ b/schemaregistry/serde/serde.go @@ -57,6 +57,7 @@ type Serializer interface { // Serialize will serialize the given message, which should be a pointer. // For example, in Protobuf, messages are always a pointer to a struct and never just a struct. Serialize(topic string, msg interface{}) ([]byte, error) + SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) Close() } @@ -64,10 +65,13 @@ type Serializer interface { type Deserializer interface { ConfigureDeserializer(client schemaregistry.Client, serdeType Type, conf *DeserializerConfig) error // Deserialize will call the MessageFactory to create an object - // into which we will unmarshal data. Deserialize(topic string, payload []byte) (interface{}, error) // DeserializeInto will unmarshal data into the given object. DeserializeInto(topic string, payload []byte, msg interface{}) error + + DeserializeRecordName(payload []byte) (interface{}, error) + DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error + Close() } @@ -99,7 +103,13 @@ func (s *BaseSerializer) ConfigureSerializer(client schemaregistry.Client, serde s.Client = client s.Conf = conf s.SerdeType = serdeType - s.SubjectNameStrategy = TopicNameStrategy + + if conf.SubjectNameStrategy == topicRecordNameStrategy { + s.SubjectNameStrategy = TopicRecordNameStrategy + } else { + s.SubjectNameStrategy = TopicNameStrategy + } + return nil } @@ -127,29 +137,43 @@ func TopicNameStrategy(topic string, serdeType Type, schema schemaregistry.Schem return topic + suffix, nil } +// TopicRecordNameStrategy creates a subject name by appending -[key|value] to the topic name. +func TopicRecordNameStrategy(topic string, serdeType Type, schema schemaregistry.SchemaInfo) (string, error) { + suffix := "-value" + if serdeType == KeySerde { + suffix = "-key" + } + // TODO how to pass the fullyQualifiedName + // fullyQualifiedName := schema.Subject + // return fmt.Sprintf("%s-%s%s", topic, fullyQualifiedName, suffix), nil + return topic + suffix, nil +} + // GetID returns a schema ID for the given schema -func (s *BaseSerializer) GetID(topic string, msg interface{}, info schemaregistry.SchemaInfo) (int, error) { +func (s *BaseSerializer) GetID(subject string, msg interface{}, info schemaregistry.SchemaInfo) (int, error) { autoRegister := s.Conf.AutoRegisterSchemas useSchemaID := s.Conf.UseSchemaID useLatest := s.Conf.UseLatestVersion normalizeSchema := s.Conf.NormalizeSchemas var id = -1 - subject, err := s.SubjectNameStrategy(topic, s.SerdeType, info) + fullSubject, err := s.SubjectNameStrategy(subject, s.SerdeType, info) if err != nil { return -1, err } + if autoRegister { - id, err = s.Client.Register(subject, info, normalizeSchema) + id, err = s.Client.Register(fullSubject, info, normalizeSchema) if err != nil { return -1, err } } else if useSchemaID >= 0 { - info, err = s.Client.GetBySubjectAndID(subject, useSchemaID) + info, err = s.Client.GetBySubjectAndID(fullSubject, useSchemaID) if err != nil { return -1, err } - id, err = s.Client.GetID(subject, info, false) + + id, err = s.Client.GetID(fullSubject, info, false) if err != nil { return -1, err } @@ -157,7 +181,7 @@ func (s *BaseSerializer) GetID(topic string, msg interface{}, info schemaregistr return -1, fmt.Errorf("failed to match schema ID (%d != %d)", id, useSchemaID) } } else if useLatest { - metadata, err := s.Client.GetLatestSchemaMetadata(subject) + metadata, err := s.Client.GetLatestSchemaMetadata(fullSubject) if err != nil { return -1, err } @@ -166,16 +190,17 @@ func (s *BaseSerializer) GetID(topic string, msg interface{}, info schemaregistr SchemaType: metadata.SchemaType, References: metadata.References, } - id, err = s.Client.GetID(subject, info, false) + id, err = s.Client.GetID(fullSubject, info, false) if err != nil { return -1, err } } else { - id, err = s.Client.GetID(subject, info, normalizeSchema) + id, err = s.Client.GetID(fullSubject, info, normalizeSchema) if err != nil { return -1, err } } + return id, nil } @@ -200,17 +225,23 @@ func (s *BaseSerializer) WriteBytes(id int, msgBytes []byte) ([]byte, error) { } // GetSchema returns a schema for a payload -func (s *BaseDeserializer) GetSchema(topic string, payload []byte) (schemaregistry.SchemaInfo, error) { +func (s *BaseDeserializer) GetSchema(subject string, payload []byte) (schemaregistry.SchemaInfo, error) { info := schemaregistry.SchemaInfo{} if payload[0] != magicByte { return info, fmt.Errorf("unknown magic byte") } id := binary.BigEndian.Uint32(payload[1:5]) - subject, err := s.SubjectNameStrategy(topic, s.SerdeType, info) - if err != nil { - return info, err + if subject != "" { + var err error + subject, err = s.SubjectNameStrategy(subject, s.SerdeType, info) + if err != nil { + return info, err + } + + return s.Client.GetBySubjectAndID(subject, int(id)) + } else { + return s.Client.GetByID(int(id)) } - return s.Client.GetBySubjectAndID(subject, int(id)) } // ResolveReferences resolves schema references diff --git a/schemaregistry/test/avro/recordname/advanced.go b/schemaregistry/test/avro/recordname/advanced.go new file mode 100644 index 000000000..40c980f70 --- /dev/null +++ b/schemaregistry/test/avro/recordname/advanced.go @@ -0,0 +1,249 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +var _ = fmt.Printf + +// advanced schema for tests +type Advanced struct { + // age + Number *UnionLongNull `json:"number"` + // a name + Name UnionString `json:"name"` + // friends + Friends map[string]BasicPerson `json:"friends"` + // family + Family map[string]BasicPerson `json:"family"` +} + +const AdvancedAvroCRC64Fingerprint = "\x05\x9aT\x12\xf0z\x0f\xfa" + +func NewAdvanced() Advanced { + r := Advanced{} + r.Number = NewUnionLongNull() + + r.Name = NewUnionString() + + r.Friends = make(map[string]BasicPerson) + + r.Family = make(map[string]BasicPerson) + + return r +} + +func DeserializeAdvanced(r io.Reader) (Advanced, error) { + t := NewAdvanced() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func DeserializeAdvancedFromSchema(r io.Reader, schema string) (Advanced, error) { + t := NewAdvanced() + + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func writeAdvanced(r Advanced, w io.Writer) error { + var err error + err = writeUnionLongNull(r.Number, w) + if err != nil { + return err + } + err = writeUnionString(r.Name, w) + if err != nil { + return err + } + err = writeMapBasicPerson(r.Friends, w) + if err != nil { + return err + } + err = writeMapBasicPerson(r.Family, w) + if err != nil { + return err + } + return err +} + +func (r Advanced) Serialize(w io.Writer) error { + return writeAdvanced(r, w) +} + +func (r Advanced) Schema() string { + return "{\"doc\":\"advanced schema for tests\",\"fields\":[{\"doc\":\"age\",\"name\":\"number\",\"type\":[\"long\",\"null\"]},{\"doc\":\"a name\",\"name\":\"name\",\"type\":[\"string\"]},{\"doc\":\"friends\",\"name\":\"friends\",\"type\":{\"type\":\"map\",\"values\":{\"fields\":[{\"doc\":\"friend age\",\"name\":\"number\",\"type\":[\"long\",\"null\"]},{\"doc\":\"friend name\",\"name\":\"name\",\"type\":[\"string\"]}],\"name\":\"basicPerson\",\"namespace\":\"python.test.advanced\",\"type\":\"record\"}}},{\"doc\":\"family\",\"name\":\"family\",\"type\":{\"namespace\":\"python.test.advanced\",\"type\":\"map\",\"values\":\"python.test.advanced.basicPerson\"}}],\"name\":\"python.test.advanced.advanced\",\"type\":\"record\"}" +} + +func (r Advanced) SchemaName() string { + return "python.test.advanced.advanced" +} + +func (_ Advanced) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ Advanced) SetInt(v int32) { panic("Unsupported operation") } +func (_ Advanced) SetLong(v int64) { panic("Unsupported operation") } +func (_ Advanced) SetFloat(v float32) { panic("Unsupported operation") } +func (_ Advanced) SetDouble(v float64) { panic("Unsupported operation") } +func (_ Advanced) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ Advanced) SetString(v string) { panic("Unsupported operation") } +func (_ Advanced) SetUnionElem(v int64) { panic("Unsupported operation") } + +func (r *Advanced) Get(i int) types.Field { + switch i { + case 0: + r.Number = NewUnionLongNull() + + return r.Number + case 1: + r.Name = NewUnionString() + + w := types.Record{Target: &r.Name} + + return w + + case 2: + r.Friends = make(map[string]BasicPerson) + + w := MapBasicPersonWrapper{Target: &r.Friends} + + return &w + + case 3: + r.Family = make(map[string]BasicPerson) + + w := MapBasicPersonWrapper{Target: &r.Family} + + return &w + + } + panic("Unknown field index") +} + +func (r *Advanced) SetDefault(i int) { + switch i { + } + panic("Unknown field index") +} + +func (r *Advanced) NullField(i int) { + switch i { + case 0: + r.Number = nil + return + } + panic("Not a nullable field index") +} + +func (_ Advanced) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ Advanced) AppendArray() types.Field { panic("Unsupported operation") } +func (_ Advanced) HintSize(int) { panic("Unsupported operation") } +func (_ Advanced) Finalize() {} + +func (_ Advanced) AvroCRC64Fingerprint() []byte { + return []byte(AdvancedAvroCRC64Fingerprint) +} + +func (r Advanced) MarshalJSON() ([]byte, error) { + var err error + output := make(map[string]json.RawMessage) + output["number"], err = json.Marshal(r.Number) + if err != nil { + return nil, err + } + output["name"], err = json.Marshal(r.Name) + if err != nil { + return nil, err + } + output["friends"], err = json.Marshal(r.Friends) + if err != nil { + return nil, err + } + output["family"], err = json.Marshal(r.Family) + if err != nil { + return nil, err + } + return json.Marshal(output) +} + +func (r *Advanced) UnmarshalJSON(data []byte) error { + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + + var val json.RawMessage + val = func() json.RawMessage { + if v, ok := fields["number"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Number); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for number") + } + val = func() json.RawMessage { + if v, ok := fields["name"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Name); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for name") + } + val = func() json.RawMessage { + if v, ok := fields["friends"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Friends); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for friends") + } + val = func() json.RawMessage { + if v, ok := fields["family"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Family); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for family") + } + return nil +} diff --git a/schemaregistry/test/avro/recordname/basic_person.go b/schemaregistry/test/avro/recordname/basic_person.go new file mode 100644 index 000000000..d11c72818 --- /dev/null +++ b/schemaregistry/test/avro/recordname/basic_person.go @@ -0,0 +1,182 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +var _ = fmt.Printf + +type BasicPerson struct { + // friend age + Number *UnionLongNull `json:"number"` + // friend name + Name UnionString `json:"name"` +} + +const BasicPersonAvroCRC64Fingerprint = "Y0\xfc0\xae\x13kW" + +func NewBasicPerson() BasicPerson { + r := BasicPerson{} + r.Number = NewUnionLongNull() + + r.Name = NewUnionString() + + return r +} + +func DeserializeBasicPerson(r io.Reader) (BasicPerson, error) { + t := NewBasicPerson() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func DeserializeBasicPersonFromSchema(r io.Reader, schema string) (BasicPerson, error) { + t := NewBasicPerson() + + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func writeBasicPerson(r BasicPerson, w io.Writer) error { + var err error + err = writeUnionLongNull(r.Number, w) + if err != nil { + return err + } + err = writeUnionString(r.Name, w) + if err != nil { + return err + } + return err +} + +func (r BasicPerson) Serialize(w io.Writer) error { + return writeBasicPerson(r, w) +} + +func (r BasicPerson) Schema() string { + return "{\"fields\":[{\"doc\":\"friend age\",\"name\":\"number\",\"type\":[\"long\",\"null\"]},{\"doc\":\"friend name\",\"name\":\"name\",\"type\":[\"string\"]}],\"name\":\"python.test.advanced.basicPerson\",\"type\":\"record\"}" +} + +func (r BasicPerson) SchemaName() string { + return "python.test.advanced.basicPerson" +} + +func (_ BasicPerson) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ BasicPerson) SetInt(v int32) { panic("Unsupported operation") } +func (_ BasicPerson) SetLong(v int64) { panic("Unsupported operation") } +func (_ BasicPerson) SetFloat(v float32) { panic("Unsupported operation") } +func (_ BasicPerson) SetDouble(v float64) { panic("Unsupported operation") } +func (_ BasicPerson) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ BasicPerson) SetString(v string) { panic("Unsupported operation") } +func (_ BasicPerson) SetUnionElem(v int64) { panic("Unsupported operation") } + +func (r *BasicPerson) Get(i int) types.Field { + switch i { + case 0: + r.Number = NewUnionLongNull() + + return r.Number + case 1: + r.Name = NewUnionString() + + w := types.Record{Target: &r.Name} + + return w + + } + panic("Unknown field index") +} + +func (r *BasicPerson) SetDefault(i int) { + switch i { + } + panic("Unknown field index") +} + +func (r *BasicPerson) NullField(i int) { + switch i { + case 0: + r.Number = nil + return + } + panic("Not a nullable field index") +} + +func (_ BasicPerson) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ BasicPerson) AppendArray() types.Field { panic("Unsupported operation") } +func (_ BasicPerson) HintSize(int) { panic("Unsupported operation") } +func (_ BasicPerson) Finalize() {} + +func (_ BasicPerson) AvroCRC64Fingerprint() []byte { + return []byte(BasicPersonAvroCRC64Fingerprint) +} + +func (r BasicPerson) MarshalJSON() ([]byte, error) { + var err error + output := make(map[string]json.RawMessage) + output["number"], err = json.Marshal(r.Number) + if err != nil { + return nil, err + } + output["name"], err = json.Marshal(r.Name) + if err != nil { + return nil, err + } + return json.Marshal(output) +} + +func (r *BasicPerson) UnmarshalJSON(data []byte) error { + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + + var val json.RawMessage + val = func() json.RawMessage { + if v, ok := fields["number"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Number); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for number") + } + val = func() json.RawMessage { + if v, ok := fields["name"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.Name); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for name") + } + return nil +} diff --git a/schemaregistry/test/avro/recordname/bytes.go b/schemaregistry/test/avro/recordname/bytes.go new file mode 100644 index 000000000..2cd2a34a0 --- /dev/null +++ b/schemaregistry/test/avro/recordname/bytes.go @@ -0,0 +1,86 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "encoding/json" + + "github.com/actgardner/gogen-avro/v10/util" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +type Bytes []byte + +func (b *Bytes) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + *b = util.DecodeByteString(s) + return nil +} + +func (b Bytes) MarshalJSON() ([]byte, error) { + return []byte(util.EncodeByteString(b)), nil +} + +type BytesWrapper struct { + Target *Bytes +} + +func (b BytesWrapper) SetBoolean(v bool) { + panic("Unable to assign bytes to bytes field") +} + +func (b BytesWrapper) SetInt(v int32) { + panic("Unable to assign int to bytes field") +} + +func (b BytesWrapper) SetLong(v int64) { + panic("Unable to assign long to bytes field") +} + +func (b BytesWrapper) SetFloat(v float32) { + panic("Unable to assign float to bytes field") +} + +func (b BytesWrapper) SetDouble(v float64) { + panic("Unable to assign double to bytes field") +} + +func (b BytesWrapper) SetUnionElem(v int64) { + panic("Unable to assign union elem to bytes field") +} + +func (b BytesWrapper) SetBytes(v []byte) { + *(b.Target) = v +} + +func (b BytesWrapper) SetString(v string) { + *(b.Target) = []byte(v) +} + +func (b BytesWrapper) Get(i int) types.Field { + panic("Unable to get field from bytes field") +} + +func (b BytesWrapper) SetDefault(i int) { + panic("Unable to set default on bytes field") +} + +func (b BytesWrapper) AppendMap(key string) types.Field { + panic("Unable to append map key to from bytes field") +} + +func (b BytesWrapper) AppendArray() types.Field { + panic("Unable to append array element to from bytes field") +} + +func (b BytesWrapper) NullField(int) { + panic("Unable to null field in bytes field") +} + +func (b BytesWrapper) HintSize(int) { + panic("Unable to hint size in bytes field") +} + +func (b BytesWrapper) Finalize() {} diff --git a/schemaregistry/test/avro/recordname/demo_schema.go b/schemaregistry/test/avro/recordname/demo_schema.go new file mode 100644 index 000000000..19a54442a --- /dev/null +++ b/schemaregistry/test/avro/recordname/demo_schema.go @@ -0,0 +1,260 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +var _ = fmt.Printf + +type DemoSchema struct { + IntField int32 `json:"IntField"` + + DoubleField float64 `json:"DoubleField"` + + StringField string `json:"StringField"` + + BoolField bool `json:"BoolField"` + + BytesField Bytes `json:"BytesField"` +} + +const DemoSchemaAvroCRC64Fingerprint = "\xc4V\xa9\x04ʛf\xad" + +func NewDemoSchema() DemoSchema { + r := DemoSchema{} + return r +} + +func DeserializeDemoSchema(r io.Reader) (DemoSchema, error) { + t := NewDemoSchema() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func DeserializeDemoSchemaFromSchema(r io.Reader, schema string) (DemoSchema, error) { + t := NewDemoSchema() + + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + return t, err +} + +func writeDemoSchema(r DemoSchema, w io.Writer) error { + var err error + err = vm.WriteInt(r.IntField, w) + if err != nil { + return err + } + err = vm.WriteDouble(r.DoubleField, w) + if err != nil { + return err + } + err = vm.WriteString(r.StringField, w) + if err != nil { + return err + } + err = vm.WriteBool(r.BoolField, w) + if err != nil { + return err + } + err = vm.WriteBytes(r.BytesField, w) + if err != nil { + return err + } + return err +} + +func (r DemoSchema) Serialize(w io.Writer) error { + return writeDemoSchema(r, w) +} + +func (r DemoSchema) Schema() string { + return "{\"fields\":[{\"name\":\"IntField\",\"type\":\"int\"},{\"name\":\"DoubleField\",\"type\":\"double\"},{\"name\":\"StringField\",\"type\":\"string\"},{\"name\":\"BoolField\",\"type\":\"boolean\"},{\"name\":\"BytesField\",\"type\":\"bytes\"}],\"name\":\"DemoSchema\",\"type\":\"record\"}" +} + +func (r DemoSchema) SchemaName() string { + return "DemoSchema" +} + +func (_ DemoSchema) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ DemoSchema) SetInt(v int32) { panic("Unsupported operation") } +func (_ DemoSchema) SetLong(v int64) { panic("Unsupported operation") } +func (_ DemoSchema) SetFloat(v float32) { panic("Unsupported operation") } +func (_ DemoSchema) SetDouble(v float64) { panic("Unsupported operation") } +func (_ DemoSchema) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ DemoSchema) SetString(v string) { panic("Unsupported operation") } +func (_ DemoSchema) SetUnionElem(v int64) { panic("Unsupported operation") } + +func (r *DemoSchema) Get(i int) types.Field { + switch i { + case 0: + w := types.Int{Target: &r.IntField} + + return w + + case 1: + w := types.Double{Target: &r.DoubleField} + + return w + + case 2: + w := types.String{Target: &r.StringField} + + return w + + case 3: + w := types.Boolean{Target: &r.BoolField} + + return w + + case 4: + w := BytesWrapper{Target: &r.BytesField} + + return w + + } + panic("Unknown field index") +} + +func (r *DemoSchema) SetDefault(i int) { + switch i { + } + panic("Unknown field index") +} + +func (r *DemoSchema) NullField(i int) { + switch i { + } + panic("Not a nullable field index") +} + +func (_ DemoSchema) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ DemoSchema) AppendArray() types.Field { panic("Unsupported operation") } +func (_ DemoSchema) HintSize(int) { panic("Unsupported operation") } +func (_ DemoSchema) Finalize() {} + +func (_ DemoSchema) AvroCRC64Fingerprint() []byte { + return []byte(DemoSchemaAvroCRC64Fingerprint) +} + +func (r DemoSchema) MarshalJSON() ([]byte, error) { + var err error + output := make(map[string]json.RawMessage) + output["IntField"], err = json.Marshal(r.IntField) + if err != nil { + return nil, err + } + output["DoubleField"], err = json.Marshal(r.DoubleField) + if err != nil { + return nil, err + } + output["StringField"], err = json.Marshal(r.StringField) + if err != nil { + return nil, err + } + output["BoolField"], err = json.Marshal(r.BoolField) + if err != nil { + return nil, err + } + output["BytesField"], err = json.Marshal(r.BytesField) + if err != nil { + return nil, err + } + return json.Marshal(output) +} + +func (r *DemoSchema) UnmarshalJSON(data []byte) error { + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + + var val json.RawMessage + val = func() json.RawMessage { + if v, ok := fields["IntField"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.IntField); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for IntField") + } + val = func() json.RawMessage { + if v, ok := fields["DoubleField"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.DoubleField); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for DoubleField") + } + val = func() json.RawMessage { + if v, ok := fields["StringField"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.StringField); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for StringField") + } + val = func() json.RawMessage { + if v, ok := fields["BoolField"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.BoolField); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for BoolField") + } + val = func() json.RawMessage { + if v, ok := fields["BytesField"]; ok { + return v + } + return nil + }() + + if val != nil { + if err := json.Unmarshal([]byte(val), &r.BytesField); err != nil { + return err + } + } else { + return fmt.Errorf("no value specified for BytesField") + } + return nil +} diff --git a/schemaregistry/test/avro/recordname/map_basic_person.go b/schemaregistry/test/avro/recordname/map_basic_person.go new file mode 100644 index 000000000..752a71827 --- /dev/null +++ b/schemaregistry/test/avro/recordname/map_basic_person.go @@ -0,0 +1,71 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" + "io" +) + +func writeMapBasicPerson(r map[string]BasicPerson, w io.Writer) error { + err := vm.WriteLong(int64(len(r)), w) + if err != nil || len(r) == 0 { + return err + } + for k, e := range r { + err = vm.WriteString(k, w) + if err != nil { + return err + } + err = writeBasicPerson(e, w) + if err != nil { + return err + } + } + return vm.WriteLong(0, w) +} + +type MapBasicPersonWrapper struct { + Target *map[string]BasicPerson + keys []string + values []BasicPerson +} + +func (_ *MapBasicPersonWrapper) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetInt(v int32) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetLong(v int64) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetFloat(v float32) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetDouble(v float64) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetString(v string) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetUnionElem(v int64) { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) Get(i int) types.Field { panic("Unsupported operation") } +func (_ *MapBasicPersonWrapper) SetDefault(i int) { panic("Unsupported operation") } + +func (r *MapBasicPersonWrapper) HintSize(s int) { + if r.keys == nil { + r.keys = make([]string, 0, s) + r.values = make([]BasicPerson, 0, s) + } +} + +func (r *MapBasicPersonWrapper) NullField(_ int) { + panic("Unsupported operation") +} + +func (r *MapBasicPersonWrapper) Finalize() { + for i := range r.keys { + (*r.Target)[r.keys[i]] = r.values[i] + } +} + +func (r *MapBasicPersonWrapper) AppendMap(key string) types.Field { + r.keys = append(r.keys, key) + var v BasicPerson + v = NewBasicPerson() + + r.values = append(r.values, v) + return &types.Record{Target: &r.values[len(r.values)-1]} +} + +func (_ *MapBasicPersonWrapper) AppendArray() types.Field { panic("Unsupported operation") } diff --git a/schemaregistry/test/avro/recordname/union_long_null.go b/schemaregistry/test/avro/recordname/union_long_null.go new file mode 100644 index 000000000..ee09fc58d --- /dev/null +++ b/schemaregistry/test/avro/recordname/union_long_null.go @@ -0,0 +1,142 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +type UnionLongNullTypeEnum int + +const ( + UnionLongNullTypeEnumLong UnionLongNullTypeEnum = 0 +) + +type UnionLongNull struct { + Long int64 + Null *types.NullVal + UnionType UnionLongNullTypeEnum +} + +func writeUnionLongNull(r *UnionLongNull, w io.Writer) error { + + if r == nil { + err := vm.WriteLong(1, w) + return err + } + + err := vm.WriteLong(int64(r.UnionType), w) + if err != nil { + return err + } + switch r.UnionType { + case UnionLongNullTypeEnumLong: + return vm.WriteLong(r.Long, w) + } + return fmt.Errorf("invalid value for *UnionLongNull") +} + +func NewUnionLongNull() *UnionLongNull { + return &UnionLongNull{} +} + +func (r *UnionLongNull) Serialize(w io.Writer) error { + return writeUnionLongNull(r, w) +} + +func DeserializeUnionLongNull(r io.Reader) (*UnionLongNull, error) { + t := NewUnionLongNull() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, t) + + if err != nil { + return t, err + } + return t, err +} + +func DeserializeUnionLongNullFromSchema(r io.Reader, schema string) (*UnionLongNull, error) { + t := NewUnionLongNull() + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, t) + + if err != nil { + return t, err + } + return t, err +} + +func (r *UnionLongNull) Schema() string { + return "[\"long\",\"null\"]" +} + +func (_ *UnionLongNull) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ *UnionLongNull) SetInt(v int32) { panic("Unsupported operation") } +func (_ *UnionLongNull) SetFloat(v float32) { panic("Unsupported operation") } +func (_ *UnionLongNull) SetDouble(v float64) { panic("Unsupported operation") } +func (_ *UnionLongNull) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ *UnionLongNull) SetString(v string) { panic("Unsupported operation") } + +func (r *UnionLongNull) SetLong(v int64) { + + r.UnionType = (UnionLongNullTypeEnum)(v) +} + +func (r *UnionLongNull) Get(i int) types.Field { + + switch i { + case 0: + return &types.Long{Target: (&r.Long)} + case 1: + return r.Null + } + panic("Unknown field index") +} +func (_ *UnionLongNull) NullField(i int) { panic("Unsupported operation") } +func (_ *UnionLongNull) HintSize(i int) { panic("Unsupported operation") } +func (_ *UnionLongNull) SetDefault(i int) { panic("Unsupported operation") } +func (_ *UnionLongNull) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ *UnionLongNull) AppendArray() types.Field { panic("Unsupported operation") } +func (_ *UnionLongNull) Finalize() {} + +func (r *UnionLongNull) MarshalJSON() ([]byte, error) { + + if r == nil { + return []byte("null"), nil + } + + switch r.UnionType { + case UnionLongNullTypeEnumLong: + return json.Marshal(map[string]interface{}{"long": r.Long}) + } + return nil, fmt.Errorf("invalid value for *UnionLongNull") +} + +func (r *UnionLongNull) UnmarshalJSON(data []byte) error { + + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + if len(fields) > 1 { + return fmt.Errorf("more than one type supplied for union") + } + if value, ok := fields["long"]; ok { + r.UnionType = 0 + return json.Unmarshal([]byte(value), &r.Long) + } + return fmt.Errorf("invalid value for *UnionLongNull") +} diff --git a/schemaregistry/test/avro/recordname/union_string.go b/schemaregistry/test/avro/recordname/union_string.go new file mode 100644 index 000000000..275abd91b --- /dev/null +++ b/schemaregistry/test/avro/recordname/union_string.go @@ -0,0 +1,130 @@ +// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT. +package recordname + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/actgardner/gogen-avro/v10/compiler" + "github.com/actgardner/gogen-avro/v10/vm" + "github.com/actgardner/gogen-avro/v10/vm/types" +) + +type UnionStringTypeEnum int + +const ( + UnionStringTypeEnumString UnionStringTypeEnum = 0 +) + +type UnionString struct { + String string + UnionType UnionStringTypeEnum +} + +func writeUnionString(r UnionString, w io.Writer) error { + + err := vm.WriteLong(int64(r.UnionType), w) + if err != nil { + return err + } + switch r.UnionType { + case UnionStringTypeEnumString: + return vm.WriteString(r.String, w) + } + return fmt.Errorf("invalid value for UnionString") +} + +func NewUnionString() UnionString { + return UnionString{} +} + +func (r UnionString) Serialize(w io.Writer) error { + return writeUnionString(r, w) +} + +func DeserializeUnionString(r io.Reader) (UnionString, error) { + t := NewUnionString() + deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + + if err != nil { + return t, err + } + return t, err +} + +func DeserializeUnionStringFromSchema(r io.Reader, schema string) (UnionString, error) { + t := NewUnionString() + deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema())) + if err != nil { + return t, err + } + + err = vm.Eval(r, deser, &t) + + if err != nil { + return t, err + } + return t, err +} + +func (r UnionString) Schema() string { + return "[\"string\"]" +} + +func (_ UnionString) SetBoolean(v bool) { panic("Unsupported operation") } +func (_ UnionString) SetInt(v int32) { panic("Unsupported operation") } +func (_ UnionString) SetFloat(v float32) { panic("Unsupported operation") } +func (_ UnionString) SetDouble(v float64) { panic("Unsupported operation") } +func (_ UnionString) SetBytes(v []byte) { panic("Unsupported operation") } +func (_ UnionString) SetString(v string) { panic("Unsupported operation") } + +func (r *UnionString) SetLong(v int64) { + + r.UnionType = (UnionStringTypeEnum)(v) +} + +func (r *UnionString) Get(i int) types.Field { + + switch i { + case 0: + return &types.String{Target: (&r.String)} + } + panic("Unknown field index") +} +func (_ UnionString) NullField(i int) { panic("Unsupported operation") } +func (_ UnionString) HintSize(i int) { panic("Unsupported operation") } +func (_ UnionString) SetDefault(i int) { panic("Unsupported operation") } +func (_ UnionString) AppendMap(key string) types.Field { panic("Unsupported operation") } +func (_ UnionString) AppendArray() types.Field { panic("Unsupported operation") } +func (_ UnionString) Finalize() {} + +func (r UnionString) MarshalJSON() ([]byte, error) { + + switch r.UnionType { + case UnionStringTypeEnumString: + return json.Marshal(map[string]interface{}{"string": r.String}) + } + return nil, fmt.Errorf("invalid value for UnionString") +} + +func (r *UnionString) UnmarshalJSON(data []byte) error { + + var fields map[string]json.RawMessage + if err := json.Unmarshal(data, &fields); err != nil { + return err + } + if len(fields) > 1 { + return fmt.Errorf("more than one type supplied for union") + } + if value, ok := fields["string"]; ok { + r.UnionType = 0 + return json.Unmarshal([]byte(value), &r.String) + } + return fmt.Errorf("invalid value for UnionString") +} diff --git a/schemaregistry/test/proto/recordname/cycle.pb.go b/schemaregistry/test/proto/recordname/cycle.pb.go new file mode 100644 index 000000000..99520d5d3 --- /dev/null +++ b/schemaregistry/test/proto/recordname/cycle.pb.go @@ -0,0 +1,155 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v4.23.4 +// source: recordname/cycle.proto + +package recordname + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type LinkedList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` + Next *LinkedList `protobuf:"bytes,10,opt,name=next,proto3" json:"next,omitempty"` +} + +func (x *LinkedList) Reset() { + *x = LinkedList{} + if protoimpl.UnsafeEnabled { + mi := &file_recordname_cycle_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LinkedList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LinkedList) ProtoMessage() {} + +func (x *LinkedList) ProtoReflect() protoreflect.Message { + mi := &file_recordname_cycle_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LinkedList.ProtoReflect.Descriptor instead. +func (*LinkedList) Descriptor() ([]byte, []int) { + return file_recordname_cycle_proto_rawDescGZIP(), []int{0} +} + +func (x *LinkedList) GetValue() int32 { + if x != nil { + return x.Value + } + return 0 +} + +func (x *LinkedList) GetNext() *LinkedList { + if x != nil { + return x.Next + } + return nil +} + +var File_recordname_cycle_proto protoreflect.FileDescriptor + +var file_recordname_cycle_proto_rawDesc = []byte{ + 0x0a, 0x16, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x6e, 0x61, 0x6d, 0x65, 0x2f, 0x63, 0x79, 0x63, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, + 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x4e, 0x0a, 0x0a, 0x4c, 0x69, 0x6e, 0x6b, 0x65, 0x64, 0x4c, 0x69, + 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x05, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2a, 0x0a, 0x04, 0x6e, 0x65, 0x78, 0x74, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x6e, + 0x61, 0x6d, 0x65, 0x2e, 0x4c, 0x69, 0x6e, 0x6b, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x04, + 0x6e, 0x65, 0x78, 0x74, 0x42, 0x17, 0x5a, 0x15, 0x74, 0x65, 0x73, 0x74, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x6e, 0x61, 0x6d, 0x65, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_recordname_cycle_proto_rawDescOnce sync.Once + file_recordname_cycle_proto_rawDescData = file_recordname_cycle_proto_rawDesc +) + +func file_recordname_cycle_proto_rawDescGZIP() []byte { + file_recordname_cycle_proto_rawDescOnce.Do(func() { + file_recordname_cycle_proto_rawDescData = protoimpl.X.CompressGZIP(file_recordname_cycle_proto_rawDescData) + }) + return file_recordname_cycle_proto_rawDescData +} + +var file_recordname_cycle_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_recordname_cycle_proto_goTypes = []interface{}{ + (*LinkedList)(nil), // 0: recordname.LinkedList +} +var file_recordname_cycle_proto_depIdxs = []int32{ + 0, // 0: recordname.LinkedList.next:type_name -> recordname.LinkedList + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_recordname_cycle_proto_init() } +func file_recordname_cycle_proto_init() { + if File_recordname_cycle_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_recordname_cycle_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LinkedList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_recordname_cycle_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_recordname_cycle_proto_goTypes, + DependencyIndexes: file_recordname_cycle_proto_depIdxs, + MessageInfos: file_recordname_cycle_proto_msgTypes, + }.Build() + File_recordname_cycle_proto = out.File + file_recordname_cycle_proto_rawDesc = nil + file_recordname_cycle_proto_goTypes = nil + file_recordname_cycle_proto_depIdxs = nil +} diff --git a/schemaregistry/test/proto/recordname/cycle.proto b/schemaregistry/test/proto/recordname/cycle.proto new file mode 100644 index 000000000..c0914786c --- /dev/null +++ b/schemaregistry/test/proto/recordname/cycle.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package recordname; +option go_package="test/proto/recordname"; + +message LinkedList { + int32 value = 1; + LinkedList next = 10; +} diff --git a/schemaregistry/test/proto/recordname/example.pb.go b/schemaregistry/test/proto/recordname/example.pb.go new file mode 100644 index 000000000..66e656f18 --- /dev/null +++ b/schemaregistry/test/proto/recordname/example.pb.go @@ -0,0 +1,233 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v4.23.4 +// source: recordname/example.proto + +package recordname + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Author struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Id int32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + Works []string `protobuf:"bytes,4,rep,name=works,proto3" json:"works,omitempty"` +} + +func (x *Author) Reset() { + *x = Author{} + if protoimpl.UnsafeEnabled { + mi := &file_recordname_example_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Author) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Author) ProtoMessage() {} + +func (x *Author) ProtoReflect() protoreflect.Message { + mi := &file_recordname_example_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Author.ProtoReflect.Descriptor instead. +func (*Author) Descriptor() ([]byte, []int) { + return file_recordname_example_proto_rawDescGZIP(), []int{0} +} + +func (x *Author) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Author) GetId() int32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Author) GetWorks() []string { + if x != nil { + return x.Works + } + return nil +} + +type Pizza struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Size string `protobuf:"bytes,1,opt,name=size,proto3" json:"size,omitempty"` + Toppings []string `protobuf:"bytes,2,rep,name=toppings,proto3" json:"toppings,omitempty"` +} + +func (x *Pizza) Reset() { + *x = Pizza{} + if protoimpl.UnsafeEnabled { + mi := &file_recordname_example_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Pizza) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Pizza) ProtoMessage() {} + +func (x *Pizza) ProtoReflect() protoreflect.Message { + mi := &file_recordname_example_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Pizza.ProtoReflect.Descriptor instead. +func (*Pizza) Descriptor() ([]byte, []int) { + return file_recordname_example_proto_rawDescGZIP(), []int{1} +} + +func (x *Pizza) GetSize() string { + if x != nil { + return x.Size + } + return "" +} + +func (x *Pizza) GetToppings() []string { + if x != nil { + return x.Toppings + } + return nil +} + +var File_recordname_example_proto protoreflect.FileDescriptor + +var file_recordname_example_proto_rawDesc = []byte{ + 0x0a, 0x18, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x6e, 0x61, 0x6d, 0x65, 0x2f, 0x65, 0x78, 0x61, + 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x72, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x42, 0x0a, 0x06, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, + 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x05, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x22, 0x37, 0x0a, 0x05, 0x50, 0x69, + 0x7a, 0x7a, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x6f, 0x70, 0x70, 0x69, + 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x74, 0x6f, 0x70, 0x70, 0x69, + 0x6e, 0x67, 0x73, 0x42, 0x17, 0x5a, 0x15, 0x74, 0x65, 0x73, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x6e, 0x61, 0x6d, 0x65, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_recordname_example_proto_rawDescOnce sync.Once + file_recordname_example_proto_rawDescData = file_recordname_example_proto_rawDesc +) + +func file_recordname_example_proto_rawDescGZIP() []byte { + file_recordname_example_proto_rawDescOnce.Do(func() { + file_recordname_example_proto_rawDescData = protoimpl.X.CompressGZIP(file_recordname_example_proto_rawDescData) + }) + return file_recordname_example_proto_rawDescData +} + +var file_recordname_example_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_recordname_example_proto_goTypes = []interface{}{ + (*Author)(nil), // 0: recordname.Author + (*Pizza)(nil), // 1: recordname.Pizza +} +var file_recordname_example_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_recordname_example_proto_init() } +func file_recordname_example_proto_init() { + if File_recordname_example_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_recordname_example_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Author); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_recordname_example_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Pizza); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_recordname_example_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_recordname_example_proto_goTypes, + DependencyIndexes: file_recordname_example_proto_depIdxs, + MessageInfos: file_recordname_example_proto_msgTypes, + }.Build() + File_recordname_example_proto = out.File + file_recordname_example_proto_rawDesc = nil + file_recordname_example_proto_goTypes = nil + file_recordname_example_proto_depIdxs = nil +} diff --git a/schemaregistry/test/proto/recordname/example.proto b/schemaregistry/test/proto/recordname/example.proto new file mode 100644 index 000000000..9fa7d713b --- /dev/null +++ b/schemaregistry/test/proto/recordname/example.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package recordname; +option go_package="test/proto/recordname"; + +message Author { + string name = 1; + int32 id = 2; + repeated string works = 4; +} + +message Pizza { + string size = 1; + repeated string toppings = 2; +} From 1fd081063581f5af54a08abd50d1db83daff3287 Mon Sep 17 00:00:00 2001 From: Jerome Bidault Date: Sat, 23 Sep 2023 19:19:07 +0700 Subject: [PATCH 2/2] Cleanup and add comments in schemaregistry --- schemaregistry/schemaregistry_client.go | 19 ++++--------------- schemaregistry/serde/avro/avro_generic.go | 5 ++--- schemaregistry/serde/avro/avro_specific.go | 4 +++- schemaregistry/serde/config.go | 1 + 4 files changed, 10 insertions(+), 19 deletions(-) diff --git a/schemaregistry/schemaregistry_client.go b/schemaregistry/schemaregistry_client.go index ba8450b3d..1cbba970e 100644 --- a/schemaregistry/schemaregistry_client.go +++ b/schemaregistry/schemaregistry_client.go @@ -88,13 +88,11 @@ func (sd *SchemaInfo) MarshalJSON() ([]byte, error) { Schema string `json:"schema,omitempty"` SchemaType string `json:"schemaType,omitempty"` References []Reference `json:"references,omitempty"` - // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` - Subject string `json:"subject,omitempty"` + Subject string `json:"subject,omitempty"` }{ sd.Schema, sd.SchemaType, sd.References, - // sd.SchemaFullyQualifiedName, // added sd.Subject, }) } @@ -106,8 +104,7 @@ func (sd *SchemaInfo) UnmarshalJSON(b []byte) error { Schema string `json:"schema,omitempty"` SchemaType string `json:"schemaType,omitempty"` References []Reference `json:"references,omitempty"` - // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` - Subject string `json:"subject,omitempty"` + Subject string `json:"subject,omitempty"` } err = json.Unmarshal(b, &tmp) @@ -115,7 +112,6 @@ func (sd *SchemaInfo) UnmarshalJSON(b []byte) error { sd.Schema = tmp.Schema sd.SchemaType = tmp.SchemaType sd.References = tmp.References - // sd.SchemaFullyQualifiedName = tmp.SchemaFullyQualifiedName // added sd.Subject = tmp.Subject return err @@ -138,7 +134,6 @@ func (sd *SchemaMetadata) MarshalJSON() ([]byte, error) { ID int `json:"id,omitempty"` Subject string `json:"subject,omitempty"` Version int `json:"version,omitempty"` - // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` }{ sd.Schema, sd.SchemaType, @@ -146,7 +141,6 @@ func (sd *SchemaMetadata) MarshalJSON() ([]byte, error) { sd.ID, sd.Subject, sd.Version, - // sd.SchemaFullyQualifiedName, }) } @@ -160,7 +154,6 @@ func (sd *SchemaMetadata) UnmarshalJSON(b []byte) error { ID int `json:"id,omitempty"` Subject string `json:"subject,omitempty"` Version int `json:"version,omitempty"` - // SchemaFullyQualifiedName string `json:"schemaFullyQualifiedName,omitempty"` } err = json.Unmarshal(b, &tmp) @@ -171,7 +164,6 @@ func (sd *SchemaMetadata) UnmarshalJSON(b []byte) error { sd.ID = tmp.ID sd.Subject = tmp.Subject sd.Version = tmp.Version - // sd.SchemaFullyQualifiedName = tmp.SchemaFullyQualifiedName return err } @@ -368,7 +360,6 @@ func (c *client) GetByID(id int) (schema SchemaInfo, err error) { } } else { - // newInfo = subjIDPayload.(subjectOnlyIDPayload).SchemaInfo newInfo = subjIDPayload.(*SchemaInfo) } @@ -376,7 +367,7 @@ func (c *client) GetByID(id int) (schema SchemaInfo, err error) { return *newInfo, err } -// GetBySubjectAndID returns the schema identified by id +// GetBySubjectAndID returns the schema identified by subject and id or id // Returns Schema object on success func (c *client) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, err error) { cacheKey := subjectID{ @@ -402,11 +393,10 @@ func (c *client) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, e err = c.restService.handleRequest(newRequest("GET", schemas, nil, id), &metadata) } if err == nil { - // newInfo = &SchemaInfo{ newInfo.Schema = metadata.Schema newInfo.SchemaType = metadata.SchemaType newInfo.References = metadata.References - //} + c.idToSchemaCache.Put(cacheKey, newInfo) } } else { @@ -430,7 +420,6 @@ func (c *client) GetID(subject string, schema SchemaInfo, normalize bool) (id in c.schemaToIdCacheLock.RLock() idValue, ok := c.schemaToIdCache.Get(cacheKey) - // log.Println("schemaregistry_client.go - GetID - idValue from cache: ", idValue) c.schemaToIdCacheLock.RUnlock() if ok { return idValue.(int), nil diff --git a/schemaregistry/serde/avro/avro_generic.go b/schemaregistry/serde/avro/avro_generic.go index f8fb6afe6..da5182e10 100644 --- a/schemaregistry/serde/avro/avro_generic.go +++ b/schemaregistry/serde/avro/avro_generic.go @@ -80,7 +80,7 @@ func (s *GenericSerializer) addFullyQualifiedNameToSchema(avroStr, msgFQN string return json.Marshal(data) } -// Serialize implements serialization of generic Avro data +// SerializeRecordName implements serialization of generic Avro data func (s *GenericSerializer) SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) { if msg == nil { return nil, nil @@ -192,8 +192,6 @@ func (s *GenericDeserializer) DeserializeRecordName(payload []byte) (interface{} namespace := data["namespace"].(string) fullyQualifiedName := fmt.Sprintf("%s.%s", namespace, name) - // fmt.Println("see the info schema: ", info.Schema) - writer, name, err := s.toType(info) if err != nil { return nil, err @@ -228,6 +226,7 @@ func (s *GenericDeserializer) DeserializeRecordName(payload []byte) (interface{} } +// DeserializeIntoRecordName implements deserialization of generic Avro data func (s *GenericDeserializer) DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error { if payload == nil { return fmt.Errorf("Empty payload") diff --git a/schemaregistry/serde/avro/avro_specific.go b/schemaregistry/serde/avro/avro_specific.go index 5d2abf55a..ed9bd6fdf 100644 --- a/schemaregistry/serde/avro/avro_specific.go +++ b/schemaregistry/serde/avro/avro_specific.go @@ -146,7 +146,7 @@ func (s *SpecificSerializer) addFullyQualifiedNameToSchema(avroStr string, msg i return modifiedJSON, fullyQualifiedName, nil } -// Serialize implements serialization of generic Avro data +// SerializeRecordName implements serialization of generic Avro data func (s *SpecificSerializer) SerializeRecordName(msg interface{}, subject ...string) ([]byte, error) { if msg == nil { return nil, nil @@ -203,6 +203,7 @@ func NewSpecificDeserializer(client schemaregistry.Client, serdeType serde.Type, return s, nil } +// DeserializeRecordName implements deserialization of specific Avro data func (s *SpecificDeserializer) DeserializeRecordName(payload []byte) (interface{}, error) { if payload == nil { return nil, nil @@ -274,6 +275,7 @@ func (s *SpecificDeserializer) DeserializeRecordName(payload []byte) (interface{ return avroMsg, nil } +// DeserializeIntoRecordName implements deserialization of specific Avro data func (s *SpecificDeserializer) DeserializeIntoRecordName(subjects map[string]interface{}, payload []byte) error { if payload == nil { return nil diff --git a/schemaregistry/serde/config.go b/schemaregistry/serde/config.go index 172013e18..a1f6920cc 100644 --- a/schemaregistry/serde/config.go +++ b/schemaregistry/serde/config.go @@ -46,6 +46,7 @@ func NewSerializerConfig() *SerializerConfig { return c } +// NewSerializerConfigTopRecNameStrat set the configurations for the TopicRecordNameStrategy func NewSerializerConfigTopRecNameStrat() *SerializerConfig { c := NewSerializerConfig()