-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
) Fixes #39 Co-authored-by: Johan Wärlander <[email protected]>
- Loading branch information
1 parent
7fc77ad
commit 32744af
Showing
16 changed files
with
783 additions
and
41 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
# Kafka Connector Extension 1.2.0, released 2021-06-29 | ||
|
||
Code name: Fixed bug in consuming all offsets | ||
|
||
## Summary | ||
|
||
In this release, we fixed a bug that can go into infinite waiting loop if polled records from Kafka are empty. | ||
|
||
## Bugfixes | ||
|
||
* #39: Fixed bug related to consuming all offsets when already at end of partition | ||
|
||
### Runtime Dependency Updates | ||
|
||
* Updated `io.confluent:kafka-avro-serializer:6.1.1` to `6.2.0` | ||
|
||
### Test Dependency Updates | ||
|
||
* Added `com.exasol:test-db-builder-java:3.2.0` | ||
* Added `com.exasol:exasol-testcontainers:3.5.3` | ||
* Added `com.exasol:hamcrest-resultset-matcher:1.4.0` | ||
* Added `org.testcontainers:kafka:1.15.3` | ||
* Updated `org.mockito:mockito-core:3.11.0` to `3.11.2` | ||
* Updated `io.github.embeddedkafka:embedded-kafka-schema-registry:6.1.1` to `6.2.0` | ||
|
||
### Plugin Updates | ||
|
||
* Updated `org.scoverage:sbt-coveralls:1.2.7` to `1.3.1` | ||
* Updated `net.bzzt:sbt-reproducible-builds:0.25` to `0.28` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
129 changes: 129 additions & 0 deletions
129
src/it/scala/com/exasol/cloudetl/kafka/docker/BaseDockerIntegrationTest.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,129 @@ | ||
package com.exasol.cloudetl.kafka | ||
|
||
import java.io.File | ||
import java.nio.file.Paths | ||
import java.sql.Connection | ||
|
||
import com.exasol.containers.ExasolContainer | ||
import com.exasol.dbbuilder.dialects.Column | ||
import com.exasol.dbbuilder.dialects.exasol.ExasolObjectFactory | ||
import com.exasol.dbbuilder.dialects.exasol.ExasolSchema | ||
import com.exasol.dbbuilder.dialects.exasol.udf.UdfScript | ||
|
||
import org.scalatest.BeforeAndAfterAll | ||
import org.scalatest.funsuite.AnyFunSuite | ||
|
||
trait BaseDockerIntegrationTest extends AnyFunSuite with BeforeAndAfterAll { | ||
private[this] val JAR_DIRECTORY_PATTERN = "scala-" | ||
private[this] val JAR_NAME_PATTERN = "exasol-kafka-connector-extension-" | ||
private[this] val DEFAULT_EXASOL_DOCKER_IMAGE = "7.0.10" | ||
|
||
val network = DockerNamedNetwork("kafka-it-tests", true) | ||
val exasolContainer = { | ||
val c: ExasolContainer[_] = new ExasolContainer(getExasolDockerImageVersion()) | ||
c.withNetwork(network) | ||
c.withReuse(true) | ||
c | ||
} | ||
var factory: ExasolObjectFactory = _ | ||
var schema: ExasolSchema = _ | ||
var connection: Connection = _ | ||
val assembledJarName = getAssembledJarName() | ||
|
||
override def beforeAll(): Unit = { | ||
exasolContainer.start() | ||
connection = getConnection() | ||
} | ||
|
||
override def afterAll(): Unit = { | ||
connection.close() | ||
exasolContainer.stop() | ||
} | ||
|
||
def installKafkaConnector(schemaName: String): Unit = { | ||
executeStmt(s"DROP SCHEMA IF EXISTS $schemaName CASCADE;") | ||
factory = new ExasolObjectFactory(getConnection()) | ||
schema = factory.createSchema(schemaName) | ||
createKafkaImportDeploymentScripts() | ||
uploadJarToBucket() | ||
} | ||
|
||
def executeStmt(sql: String): Unit = { | ||
connection.createStatement().execute(sql) | ||
() | ||
} | ||
|
||
def executeQuery(sql: String): java.sql.ResultSet = | ||
connection.createStatement().executeQuery(sql) | ||
|
||
private[this] def getConnection(): java.sql.Connection = | ||
exasolContainer.createConnection("") | ||
|
||
private[this] def getAssembledJarName(): String = { | ||
val jarDir = findFileOrDirectory("target", JAR_DIRECTORY_PATTERN) | ||
findFileOrDirectory("target/" + jarDir, JAR_NAME_PATTERN) | ||
} | ||
|
||
private[this] def createKafkaImportDeploymentScripts(): Unit = { | ||
val jarPath = s"/buckets/bfsdefault/default/$assembledJarName" | ||
schema | ||
.createUdfBuilder("KAFKA_CONSUMER") | ||
.language(UdfScript.Language.JAVA) | ||
.inputType(UdfScript.InputType.SET) | ||
.emits() | ||
.bucketFsContent("com.exasol.cloudetl.kafka.KafkaConsumerQueryGenerator", jarPath) | ||
.build() | ||
schema | ||
.createUdfBuilder("KAFKA_IMPORT") | ||
.language(UdfScript.Language.JAVA) | ||
.inputType(UdfScript.InputType.SET) | ||
.emits() | ||
.bucketFsContent("com.exasol.cloudetl.kafka.KafkaTopicDataImporter", jarPath) | ||
.build() | ||
schema | ||
.createUdfBuilder("KAFKA_METADATA") | ||
.language(UdfScript.Language.JAVA) | ||
.inputType(UdfScript.InputType.SET) | ||
.parameter("params", "VARCHAR(2000)") | ||
.parameter("kafka_partition", "DECIMAL(18, 0)") | ||
.parameter("kafka_offset", "DECIMAL(36, 0)") | ||
.emits( | ||
new Column("partition_index", "DECIMAL(18, 0)"), | ||
new Column("max_offset", "DECIMAL(36, 0)") | ||
) | ||
.bucketFsContent("com.exasol.cloudetl.kafka.KafkaTopicMetadataReader", jarPath) | ||
.build() | ||
() | ||
} | ||
|
||
private[this] def uploadJarToBucket(): Unit = { | ||
val jarDir = findFileOrDirectory("target", JAR_DIRECTORY_PATTERN) | ||
val jarPath = Paths.get("target", jarDir, assembledJarName) | ||
exasolContainer.getDefaultBucket.uploadFile(jarPath, assembledJarName) | ||
} | ||
|
||
private[this] def findFileOrDirectory(searchDirectory: String, name: String): String = { | ||
val files = listDirectoryFiles(searchDirectory) | ||
val jarFile = files.find(_.getName.contains(name)) | ||
jarFile match { | ||
case Some(jarFilename) => jarFilename.getName | ||
case None => | ||
throw new IllegalArgumentException( | ||
s"Cannot find a file or a directory with pattern '$name' in '$searchDirectory'" | ||
) | ||
} | ||
} | ||
|
||
private[this] def listDirectoryFiles(directoryName: String): List[File] = { | ||
val directory = new File(directoryName) | ||
if (directory.exists && directory.isDirectory) { | ||
directory.listFiles.toList | ||
} else { | ||
List.empty[File] | ||
} | ||
} | ||
|
||
private[this] def getExasolDockerImageVersion(): String = | ||
System.getProperty("EXASOL_DOCKER_VERSION", DEFAULT_EXASOL_DOCKER_IMAGE) | ||
|
||
} |
Oops, something went wrong.