diff --git a/Dockerfile b/Dockerfile index 568c4ab..de9d116 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,7 +9,7 @@ ENV GRADLE_BIN "${GRADLE_HOME}/bin" ENV CONFLUENT_VERSION "$CONFLUENT_VERSION" ENV CONFLUENT_HOME "/opt/confluent" ENV CONFLUENT_BIN "${CONFLUENT_HOME}/bin" -ENV PATH "${PATH}:${CONFLUENT_BIN}:${GRADLE_BIN}:/root/.local/bin" +ENV PATH "${PATH}:${CONFLUENT_BIN}:${GRADLE_BIN}" ENV COLORTERM "truecolor" ENV TERM "xterm-256color" @@ -26,11 +26,7 @@ RUN apt update \ postgresql-client \ mysql-client \ mosquitto-clients \ - python-is-python3 \ - python3 \ - pipx \ - && rm -rf /var/lib/apt/lists/* \ - && pipx install kaskade + && rm -rf /var/lib/apt/lists/* RUN wget -q "http://packages.confluent.io/archive/$(echo "${CONFLUENT_VERSION}" | cut -c 1-3)/confluent-community-${CONFLUENT_VERSION}.zip" -O /tmp/confluent.zip \ && unzip /tmp/confluent.zip -d /tmp \ @@ -41,3 +37,11 @@ RUN wget -q "https://services.gradle.org/distributions/gradle-8.8-bin.zip" -O /t && unzip /tmp/gradle.zip -d /tmp \ && mv "/tmp/gradle-${GRADLE_VERSION}" "${GRADLE_HOME}" \ && rm /tmp/gradle.zip + +RUN mkdir -p "${CONFLUENT_HOME}/logs" \ + && chmod 777 "${CONFLUENT_HOME}/logs" + +RUN mkdir -p "/home/ubuntu/.gradle" \ + && chmod 777 "/home/ubuntu/.gradle" + +USER ubuntu diff --git a/docker-compose.yml b/docker-compose.yml index ce9e91d..5afa5c5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -206,7 +206,7 @@ services: CLUSTER_ID: ${CLUSTER_ID} volumes: - .:/kafka-sandbox - - gradle_cache:/root/.gradle + - gradle_cache:/home/ubuntu/.gradle entrypoint: /bin/bash tty: true diff --git a/md/json-producer-and-consumer.md b/md/json-producer-and-consumer.md index a39c92f..290159f 100644 --- a/md/json-producer-and-consumer.md +++ b/md/json-producer-and-consumer.md @@ -26,7 +26,7 @@ Producer: ```java if (useSchemaRegistry) { props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaJsonSchemaSerializer.class); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://schema-registry:8081"); + props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); } else { props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaJsonSerializer.class); } @@ -37,7 +37,7 @@ Consumer: ```java if (useSchemaRegistry) { props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaJsonSchemaDeserializer.class); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://schema-registry:8081"); + props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); } else { props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaJsonDeserializer.class); } @@ -63,30 +63,16 @@ kafka-topics --create --bootstrap-server kafka1:9092 \ --topic client.schema.users ``` -### Produce - -Produce **without** Schema Registry: +### Produce/Consume without Schema Registry ```bash gradle kafka-json-clients:run --args="produce client.users 100" -``` - -Produce **with** Schema Registry: - -```bash -gradle kafka-json-clients:run --args="produce -s client.schema.users 100" -``` - -### Consume - -Consume **without** Schema Registry: - -```bash gradle kafka-json-clients:run --args="consume client.users" ``` -Consume **with** Schema Registry: +### Produce/Consume with Schema Registry ```bash +gradle kafka-json-clients:run --args="produce -s client.schema.users 100" gradle kafka-json-clients:run --args="consume -s client.schema.users" -``` \ No newline at end of file +``` diff --git a/md/kafka-connect-mqtt-example.md b/md/kafka-connect-mqtt-example.md index 26fa935..ea1e995 100644 --- a/md/kafka-connect-mqtt-example.md +++ b/md/kafka-connect-mqtt-example.md @@ -11,14 +11,14 @@ docker compose --profile mqtt up -d In one terminal, subscribe to mqtt topics: ```bash -mosquitto_sub -h mosquitto -t 'house/+/brightness' +mosquitto_sub -h mosquitto -t "house/+/brightness" ``` In another terminal, publish messages: ```bash -mosquitto_pub -h mosquitto -t 'house/room/brightness' -m '800LM' -mosquitto_pub -h mosquitto -t 'house/kitchen/brightness' -m '1000LM' +mosquitto_pub -h mosquitto -t "house/room/brightness" -m "800LM" +mosquitto_pub -h mosquitto -t "house/kitchen/brightness" -m "1000LM" ``` ### Create Source Connector @@ -47,8 +47,8 @@ kafka-console-consumer --from-beginning --group connect.mqtt \ In another terminal, publish new messages to the MQTT broker: ```bash -mosquitto_pub -h mosquitto -t 'house/room/brightness' -m '810LM' -mosquitto_pub -h mosquitto -t 'house/kitchen/brightness' -m '1020LM' +mosquitto_pub -h mosquitto -t "house/room/brightness" -m "810LM" +mosquitto_pub -h mosquitto -t "house/kitchen/brightness" -m "1020LM" ``` Deleting the connector: diff --git a/md/kafka-mqtt-proxy.md b/md/kafka-mqtt-proxy.md index f63922c..b6bc5a5 100644 --- a/md/kafka-mqtt-proxy.md +++ b/md/kafka-mqtt-proxy.md @@ -26,7 +26,7 @@ kafka-topics --create \ Publish using mqtt proxy: ```bash -mosquitto_pub -h kafka-mqtt -p 1884 -t 'house/room/temperature' -m '20C' +mosquitto_pub -h kafka-mqtt -p 1884 -t "house/room/temperature" -m "20C" ``` Check the data: diff --git a/md/ksqldb-queries.md b/md/ksqldb-queries.md index bd0e894..2c3dc88 100644 --- a/md/ksqldb-queries.md +++ b/md/ksqldb-queries.md @@ -38,3 +38,10 @@ ksql -f kafka-ksqldb/ksql/insert-orders.ksql http://ksqldb:8088 ```bash ksql -e "PRINT 'ksqldb.order_sizes' FROM BEGINNING;" http://ksqldb:8088 ``` + +### Drop + +```bash +ksql -e "DROP STREAM ORDERSIZES;" http://ksqldb:8088 +ksql -e "DROP STREAM ORDERS;" http://ksqldb:8088 +``` diff --git a/md/ksqldb-tests.md b/md/ksqldb-tests.md index 037bc94..834dc29 100644 --- a/md/ksqldb-tests.md +++ b/md/ksqldb-tests.md @@ -10,7 +10,7 @@ One interesting feature that ksqlDB has is the test runner, it allows you to tes ksql-test-runner -e kafka-ksqldb-extensions/extensions/ \ -s kafka-ksqldb/ksql/create-orders.ksql \ -i kafka-ksqldb/tests/orders-input.json \ - -o kafka-ksqldb/tests/orders-output.json | grep '>>>' + -o kafka-ksqldb/tests/orders-output.json | grep ">>>" ``` diff --git a/md/protobuf-producer-and-consumer.md b/md/protobuf-producer-and-consumer.md index b9819e7..4fdccf6 100644 --- a/md/protobuf-producer-and-consumer.md +++ b/md/protobuf-producer-and-consumer.md @@ -25,7 +25,7 @@ Producer: ```java if (useSchemaRegistry) { props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaProtobufSerializer.class); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://schema-registry:8081"); + props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); } else { // ProtobufSerializer is a custom class props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufSerializer.class); @@ -37,7 +37,7 @@ Consumer: ```java if (useSchemaRegistry) { props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaProtobufDeserializer.class); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://schema-registry:8081"); + props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL); } else { // ProtobufDeserializer is a custom class props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufDeserializer.class); @@ -66,30 +66,16 @@ kafka-topics --create --bootstrap-server kafka1:9092 \ --topic client.schema.invoices ``` -### Produce - -Produce **without** Schema Registry: +### Produce/Consume without Schema Registry ```bash gradle kafka-protobuf-clients:run --args="produce client.invoices 100" -``` - -Produce **with** Schema Registry: - -```bash -gradle kafka-protobuf-clients:run --args="produce -s client.schema.invoices 100" -``` - -### Consume - -Consume **without** Schema Registry: - -```bash gradle kafka-protobuf-clients:run --args="consume client.invoices" ``` -Consume **with** Schema Registry: +### Produce/Consume with Schema Registry ```bash +gradle kafka-protobuf-clients:run --args="produce -s client.schema.invoices 100" gradle kafka-protobuf-clients:run --args="consume -s client.schema.invoices" ``` \ No newline at end of file diff --git a/md/sandbox-environment.md b/md/sandbox-environment.md index 0347944..aa11a56 100644 --- a/md/sandbox-environment.md +++ b/md/sandbox-environment.md @@ -3,7 +3,7 @@ For opening the sandbox environment just run: ```bash -docker compose exec -u $(id -u):$(id -g) cli bash +docker compose exec cli bash ``` > [!IMPORTANT]