From 48acc03b9ddd85b5c8c268b480d5a9a3d523b9ad Mon Sep 17 00:00:00 2001 From: Magnus Edenhill Date: Mon, 15 Jul 2019 15:19:24 +0200 Subject: [PATCH] Bump librdkafka requirement to v1.1.0 --- README.md | 2 +- kafka/00version.go | 12 +- kafka/api.html | 359 +++++++++++++++++++++++++++++++++++--- kafka/generated_errors.go | 16 +- 4 files changed, 352 insertions(+), 37 deletions(-) diff --git a/README.md b/README.md index 999ec0813..8549df7ac 100644 --- a/README.md +++ b/README.md @@ -127,7 +127,7 @@ Getting Started Installing librdkafka --------------------- -This client for Go depends on librdkafka v1.0.0 or later, so you either need to install librdkafka +This client for Go depends on librdkafka v1.1.0 or later, so you either need to install librdkafka through your OS/distributions package manager, or download and build it from source. - For Debian and Ubuntu based distros, install `librdkafka-dev` from the standard diff --git a/kafka/00version.go b/kafka/00version.go index 57b295409..e9ea11b45 100644 --- a/kafka/00version.go +++ b/kafka/00version.go @@ -1,7 +1,7 @@ package kafka /** - * Copyright 2016-2018 Confluent Inc. + * Copyright 2016-2019 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,19 +29,19 @@ import ( //defines and strings in sync. // -#define MIN_RD_KAFKA_VERSION 0x01000000 +#define MIN_RD_KAFKA_VERSION 0x01010000 #ifdef __APPLE__ -#define MIN_VER_ERRSTR "confluent-kafka-go requires librdkafka v1.0.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`" +#define MIN_VER_ERRSTR "confluent-kafka-go requires librdkafka v1.1.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`" #else -#define MIN_VER_ERRSTR "confluent-kafka-go requires librdkafka v1.0.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html" +#define MIN_VER_ERRSTR "confluent-kafka-go requires librdkafka v1.1.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html" #endif #if RD_KAFKA_VERSION < MIN_RD_KAFKA_VERSION #ifdef __APPLE__ -#error "confluent-kafka-go requires librdkafka v1.0.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`" +#error "confluent-kafka-go requires librdkafka v1.1.0 or later. Install the latest version of librdkafka from Homebrew by running `brew install librdkafka` or `brew upgrade librdkafka`" #else -#error "confluent-kafka-go requires librdkafka v1.0.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html" +#error "confluent-kafka-go requires librdkafka v1.1.0 or later. Install the latest version of librdkafka from the Confluent repositories, see http://docs.confluent.io/current/installation.html" #endif #endif */ diff --git a/kafka/api.html b/kafka/api.html index a041b8d42..5ca5b90ce 100644 --- a/kafka/api.html +++ b/kafka/api.html @@ -219,6 +219,16 @@

* `KafkaError` - client (error codes are prefixed with _) or broker error. These errors are normally just informational since the client will try its best to automatically recover (eventually). +

+

+ * `OAuthBearerTokenRefresh` - retrieval of a new SASL/OAUTHBEARER token is required. +This event only occurs with sasl.mechanism=OAUTHBEARER. +Be sure to invoke SetOAuthBearerToken() on the Producer/Consumer/AdminClient +instance when a successful token retrieval is completed, otherwise be sure to +invoke SetOAuthBearerTokenFailure() to indicate that retrieval failed (or +if setting the token failed, which could happen if an extension doesn't meet +the required regular expression); invoking SetOAuthBearerTokenFailure() will +schedule a new event for 10 seconds later so another retrieval can be attempted.

Hint: If your application registers a signal notification @@ -308,6 +318,16 @@

func (a *AdminClient) GetMetadata(topic *string, allTopics bool, timeoutMs int) (*Metadata, error) +
+ + func (a *AdminClient) SetOAuthBearerToken(oauthBearerToken OAuthBearerToken) error + +
+
+ + func (a *AdminClient) SetOAuthBearerTokenFailure(errstr string) error + +
func (a *AdminClient) String() string @@ -513,6 +533,11 @@

func (c *Consumer) GetMetadata(topic *string, allTopics bool, timeoutMs int) (*Metadata, error)

+
+ + func (c *Consumer) GetWatermarkOffsets(topic string, partition int32) (low, high int64, err error) + +
func (c *Consumer) OffsetsForTimes(times []TopicPartition, timeoutMs int) (offsets []TopicPartition, err error) @@ -548,6 +573,16 @@

func (c *Consumer) Seek(partition TopicPartition, timeoutMs int) error

+
+ + func (c *Consumer) SetOAuthBearerToken(oauthBearerToken OAuthBearerToken) error + +
+
+ + func (c *Consumer) SetOAuthBearerTokenFailure(errstr string) error + +
func (c *Consumer) StoreOffsets(offsets []TopicPartition) (storedOffsets []TopicPartition, err error) @@ -678,6 +713,21 @@

type Metadata

+
+ + type OAuthBearerToken + +
+
+ + type OAuthBearerTokenRefresh + +
+
+ + func (o OAuthBearerTokenRefresh) String() string + +
type Offset @@ -793,6 +843,16 @@

func (p *Producer) QueryWatermarkOffsets(topic string, partition int32, timeoutMs int) (low, high int64, err error)

+
+ + func (p *Producer) SetOAuthBearerToken(oauthBearerToken OAuthBearerToken) error + +
+
+ + func (p *Producer) SetOAuthBearerTokenFailure(errstr string) error + +
func (p *Producer) String() string @@ -1081,7 +1141,7 @@

const PartitionAny = int32(C.RD_KAFKA_PARTITION_UA)

func - + LibraryVersion

+ NewAdminClient

+ NewAdminClientFromConsumer

+ NewAdminClientFromProducer

+ Close

+ SetOAuthBearerToken + + +

+
func (a *AdminClient) SetOAuthBearerToken(oauthBearerToken OAuthBearerToken) error
+

+ SetOAuthBearerToken sets the the data to be transmitted +to a broker during SASL/OAUTHBEARER authentication. It will return nil +on success, otherwise an error if: +1) the token data is invalid (meaning an expiration time in the past +or either a token value or an extension key or value that does not meet +the regular expression requirements as per + + https://tools.ietf.org/html/rfc7628#section-3.1 + + ); +2) SASL/OAUTHBEARER is not supported by the underlying librdkafka build; +3) SASL/OAUTHBEARER is supported but is not configured as the client's +authentication mechanism. +

+

+ func (*AdminClient) + + SetOAuthBearerTokenFailure + + +

+
func (a *AdminClient) SetOAuthBearerTokenFailure(errstr string) error
+

+ SetOAuthBearerTokenFailure sets the error message describing why token +retrieval/setting failed; it also schedules a new token refresh event for 10 +seconds later so the attempt may be retried. It will return nil on +success, otherwise an error if: +1) SASL/OAUTHBEARER is not supported by the underlying librdkafka build; +2) SASL/OAUTHBEARER is supported but is not configured as the client's +authentication mechanism.

func (*AdminClient) @@ -1917,7 +2021,7 @@

func (*Consumer) - + Assignment

+ Committed

+ GetMetadata

+ GetWatermarkOffsets + + +

+
func (c *Consumer) GetWatermarkOffsets(topic string, partition int32) (low, high int64, err error)
+

+ GetWatermarkOffsets returns the cached low and high offsets for the given topic +and partition. The high offset is populated on every fetch response or via calling QueryWatermarkOffsets. +The low offset is populated every statistics.interval.ms if that value is set. +OffsetInvalid will be returned if there is no cached offset for either value.

func (*Consumer) - + OffsetsForTimes

+ Pause

+ QueryWatermarkOffsets
Consumer) QueryWatermarkOffsets(topic string, partition int32, timeoutMs int) (low, high int64, err error)

- QueryWatermarkOffsets returns the broker's low and high offsets for the given topic -and partition. + QueryWatermarkOffsets queries the broker for the low and high offsets for the given topic and partition.

func (*Consumer) @@ -2152,7 +2271,7 @@

func (*Consumer) - + Resume

+ SetOAuthBearerToken + + +

+
func (c *Consumer) SetOAuthBearerToken(oauthBearerToken OAuthBearerToken) error
+

+ SetOAuthBearerToken sets the the data to be transmitted +to a broker during SASL/OAUTHBEARER authentication. It will return nil +on success, otherwise an error if: +1) the token data is invalid (meaning an expiration time in the past +or either a token value or an extension key or value that does not meet +the regular expression requirements as per + + https://tools.ietf.org/html/rfc7628#section-3.1 + + ); +2) SASL/OAUTHBEARER is not supported by the underlying librdkafka build; +3) SASL/OAUTHBEARER is supported but is not configured as the client's +authentication mechanism. +

+

+ func (*Consumer) + + SetOAuthBearerTokenFailure + + +

+
func (c *Consumer) SetOAuthBearerTokenFailure(errstr string) error
+

+ SetOAuthBearerTokenFailure sets the error message describing why token +retrieval/setting failed; it also schedules a new token refresh event for 10 +seconds later so the attempt may be retried. It will return nil on +success, otherwise an error if: +1) SASL/OAUTHBEARER is not supported by the underlying librdkafka build; +2) SASL/OAUTHBEARER is supported but is not configured as the client's +authentication mechanism. +

func (*Consumer) @@ -2255,7 +2418,7 @@

func (*Consumer) - + Subscription

+ ErrorCode

ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_LISTENER_NOT_FOUND) // ErrTopicDeletionDisabled Topic deletion is disabled ErrTopicDeletionDisabled ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_TOPIC_DELETION_DISABLED) + // ErrFencedLeaderEpoch Leader epoch is older than broker epoch + ErrFencedLeaderEpoch ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_FENCED_LEADER_EPOCH) + // ErrUnknownLeaderEpoch Leader epoch is newer than broker epoch + ErrUnknownLeaderEpoch ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_UNKNOWN_LEADER_EPOCH) // ErrUnsupportedCompressionType Unsupported compression type ErrUnsupportedCompressionType ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_UNSUPPORTED_COMPRESSION_TYPE) + // ErrStaleBrokerEpoch Broker epoch has changed + ErrStaleBrokerEpoch ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_STALE_BROKER_EPOCH) + // ErrOffsetNotAvailable Leader high watermark is not caught up + ErrOffsetNotAvailable ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_OFFSET_NOT_AVAILABLE) + // ErrMemberIDRequired Group member needs a valid member ID + ErrMemberIDRequired ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_MEMBER_ID_REQUIRED) + // ErrPreferredLeaderNotAvailable Preferred leader was not available + ErrPreferredLeaderNotAvailable ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_PREFERRED_LEADER_NOT_AVAILABLE) + // ErrGroupMaxSizeReached Consumer group has reached maximum size + ErrGroupMaxSizeReached ErrorCode = ErrorCode(C.RD_KAFKA_RESP_ERR_GROUP_MAX_SIZE_REACHED) )

func (ErrorCode) - + String

+ Handle

OAuthBearerToken) error
+
+    // SetOAuthBearerTokenFailure sets the error message describing why token
+    // retrieval/setting failed; it also schedules a new token refresh event for 10
+    // seconds later so the attempt may be retried. It will return nil on
+    // success, otherwise an error if:
+    // 1) SASL/OAUTHBEARER is not supported by the underlying librdkafka build;
+    // 2) SASL/OAUTHBEARER is supported but is not configured as the client's
+    // authentication mechanism.
+    SetOAuthBearerTokenFailure(errstr string) error
     // contains filtered or unexported methods
 }

OriginatingBroker BrokerMetadata } +

+ type + + OAuthBearerToken + + +

+

+ OAuthBearerToken represents the data to be transmitted +to a broker during SASL/OAUTHBEARER authentication. +

+
type OAuthBearerToken struct {
+    // Token value, often (but not necessarily) a JWS compact serialization
+    // as per https://tools.ietf.org/html/rfc7515#section-3.1; it must meet
+    // the regular expression for a SASL/OAUTHBEARER value defined at
+    // https://tools.ietf.org/html/rfc7628#section-3.1
+    TokenValue string
+    // Metadata about the token indicating when it expires (local time);
+    // it must represent a time in the future
+    Expiration time.Time
+    // Metadata about the token indicating the Kafka principal name
+    // to which it applies (for example, "admin")
+    Principal string
+    // SASL extensions, if any, to be communicated to the broker during
+    // authentication (all keys and values of which must meet the regular
+    // expressions defined at https://tools.ietf.org/html/rfc7628#section-3.1,
+    // and it must not contain the reserved "auth" key)
+    Extensions map[string]string
+}
+
+

+ type + + OAuthBearerTokenRefresh + + +

+

+ OAuthBearerTokenRefresh indicates token refresh is required +

+
type OAuthBearerTokenRefresh struct {
+    // Config is the value of the sasl.oauthbearer.config property
+    Config string
+}
+
+

+ func (OAuthBearerTokenRefresh) + + String + + +

+
func (o OAuthBearerTokenRefresh) String() string

type @@ -3116,7 +3372,7 @@

func (*Producer) - + GetFatalError

+ GetMetadata

+ OffsetsForTimes

+ QueryWatermarkOffsets

+ SetOAuthBearerToken + + +

+
func (p *Producer) SetOAuthBearerToken(oauthBearerToken OAuthBearerToken) error
+

+ SetOAuthBearerToken sets the the data to be transmitted +to a broker during SASL/OAUTHBEARER authentication. It will return nil +on success, otherwise an error if: +1) the token data is invalid (meaning an expiration time in the past +or either a token value or an extension key or value that does not meet +the regular expression requirements as per + + https://tools.ietf.org/html/rfc7628#section-3.1 + + ); +2) SASL/OAUTHBEARER is not supported by the underlying librdkafka build; +3) SASL/OAUTHBEARER is supported but is not configured as the client's +authentication mechanism. +

+

+ func (*Producer) + + SetOAuthBearerTokenFailure + + +

+
func (p *Producer) SetOAuthBearerTokenFailure(errstr string) error
+

+ SetOAuthBearerTokenFailure sets the error message describing why token +retrieval/setting failed; it also schedules a new token refresh event for 10 +seconds later so the attempt may be retried. It will return nil on +success, otherwise an error if: +1) SASL/OAUTHBEARER is not supported by the underlying librdkafka build; +2) SASL/OAUTHBEARER is supported but is not configured as the client's +authentication mechanism.

func (*Producer) @@ -3253,7 +3553,7 @@

func (*Producer) - + TestFatalError

+ TopicPartition

string Partition int32 Offset Offset + Metadata *string Error error }

func (TopicPartition) - + String

TopicPartition) String() string

type - + TopicPartitions

TopicPartition

func (TopicPartitions) - + Len

TopicPartitions) Len() int

func (TopicPartitions) - + Less

TopicPartitions) Less(i, j int) bool

func (TopicPartitions) - + Swap