diff --git a/.github/actions/update-docs/action.yml b/.github/actions/update-docs/action.yml
index bc7877497..fc9d17c96 100644
--- a/.github/actions/update-docs/action.yml
+++ b/.github/actions/update-docs/action.yml
@@ -15,10 +15,10 @@ inputs:
required: true
version:
description: "Version name to be deployed by mike"
- required: true
+ required: true
release:
description: "Determines if the set version is a stable and latest version, otherwise it is a dev version. (Default false)"
- default: 'false'
+ default: "false"
required: false
runs:
@@ -35,14 +35,13 @@ runs:
run: |
poetry install --with docs
- - name: Update ${{ github.head_ref }} branch
+ - name: Update gh-pages branch
shell: bash
run: |
git config --local user.name ${{ inputs.username }}
git config --local user.email ${{ inputs.email }}
git config --local user.password ${{ inputs.token }}
-
- git pull
+ git fetch origin gh-pages
- name: Deploy ${{ inputs.version }} version of the documentation with mike
shell: bash
diff --git a/README.md b/README.md
index 9dd25fd9c..5241c662a 100644
--- a/README.md
+++ b/README.md
@@ -29,6 +29,10 @@ You can install it with [pip](https://github.com/pypa/pip):
pip install kpops
```
+# GitHub action
+
+Please refer to the [GitHub Actions section](https://bakdata.github.io/kpops/latest/user/references/ci-integration/github-actions) for the documentation.
+
## Contributing
We are happy if you want to contribute to this project.
diff --git a/actions/kpops-runner/action.yaml b/action.yaml
similarity index 74%
rename from actions/kpops-runner/action.yaml
rename to action.yaml
index ff2533251..eeb4346cd 100644
--- a/actions/kpops-runner/action.yaml
+++ b/action.yaml
@@ -11,17 +11,14 @@ inputs:
working-directory:
description: "The root directory containing the config.yaml, pipelines folder and defaults"
default: "."
- pipeline-base-dir:
- description: "Directory where relative pipeline variables are initialized from"
- required: false
defaults:
description: "Defaults folder path"
required: false
config:
- description: "config.yaml file path"
+ description: "Directory containing the config*.yaml file(s)"
required: false
- components:
- description: "Components package path"
+ environment:
+ description: "Environment to run KPOps in"
required: false
filter-type:
description: "Whether to include/exclude the steps defined in KPOPS_PIPELINE_STEPS (default is include)"
@@ -73,10 +70,15 @@ runs:
shell: bash
run: |
echo "::group::install kpops package"
- pip install -r "${{ steps.requirements.outputs.path }}"
+ # Check if kpops-version contains ".dev"
+ if [[ "${{ inputs.kpops-version }}" == *".dev"* ]]; then
+ pip install -r "${{ steps.requirements.outputs.path }}" -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/
+ else
+ pip install -r "${{ steps.requirements.outputs.path }}"
+ fi
echo "::endgroup::"
- name: ${{ inputs.command }} ${{ inputs.pipeline }} pipeline
shell: bash
working-directory: ${{inputs.working-directory}}
- run: kpops ${{ inputs.command }} ${{ inputs.pipeline }} ${{ inputs.components }} ${{ (inputs.defaults != '' && format('--defaults {0}', inputs.defaults)) || '' }} ${{ (inputs.config != '' && format('--config {0}', inputs.config)) || '' }} ${{ (inputs.pipeline-base-dir != '' && format('--pipeline-base-dir {0}', inputs.pipeline-base-dir)) || '' }} ${{ (inputs.filter-type != '' && format('--filter-type {0}', inputs.filter-type)) || '' }}
+ run: kpops ${{ inputs.command }} ${{ inputs.pipeline }} ${{ (inputs.defaults != '' && format('--defaults {0}', inputs.defaults)) || '' }} ${{ (inputs.config != '' && format('--config {0}', inputs.config)) || '' }} ${{ (inputs.environment != '' && format('--environment {0}', inputs.environment)) || '' }} ${{ (inputs.filter-type != '' && format('--filter-type {0}', inputs.filter-type)) || '' }}
diff --git a/actions/kpops-runner/README.md b/actions/kpops-runner/README.md
deleted file mode 100644
index 9ee35bfe1..000000000
--- a/actions/kpops-runner/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# KPOps runner
-
-Please refer to the [GitHub Actions section](https://bakdata.github.io/kpops/latest/user/references/ci-integration/github-actions) for the documentation.
diff --git a/config.yaml b/config.yaml
index 46d0cf8b3..7d0e97a54 100644
--- a/config.yaml
+++ b/config.yaml
@@ -1,5 +1,3 @@
-environment: development
-brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
-kafka_connect_host: "http://localhost:8083"
-kafka_rest_host: "http://localhost:8082"
-schema_registry_url: "http://localhost:8081"
+kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+components_module: tests.pipeline.test_components
+pipeline_base_dir: tests/pipeline
diff --git a/docs/docs/developer/auto-generation.md b/docs/docs/developer/auto-generation.md
index 249f52b77..a530e9f72 100644
--- a/docs/docs/developer/auto-generation.md
+++ b/docs/docs/developer/auto-generation.md
@@ -10,7 +10,7 @@ Auto generation happens mostly with [`pre-commit`](https://pre-commit.com/) hook
- `cli_env_vars.env` -- All CLI environment variables in a `dotenv` file.
- `cli_env_vars.md` -- All CLI environment variables in a table.
-- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`PipelineConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/pipeline_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it.
+- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`KpopsConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/kpops_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it.
- `config_env_vars.env` -- Almost all pipeline config environment variables in a table.
- `variable_substitution.yaml` -- A copy of `./tests/pipeline/resources/component-type-substitution/pipeline.yaml` used as an example of substitution.
diff --git a/docs/docs/resources/architecture/components-hierarchy.md b/docs/docs/resources/architecture/components-hierarchy.md
index fec927ddf..ce24acc46 100644
--- a/docs/docs/resources/architecture/components-hierarchy.md
+++ b/docs/docs/resources/architecture/components-hierarchy.md
@@ -1,20 +1,26 @@
```mermaid
flowchart BT
KubernetesApp --> PipelineComponent
- KafkaConnector --> PipelineComponent
- KafkaApp --> KubernetesApp
+ KafkaApp --> PipelineComponent
+ HelmApp --> KubernetesApp
+ StreamsBootstrap --> HelmApp
StreamsApp --> KafkaApp
+ StreamsApp --> StreamsBootstrap
ProducerApp --> KafkaApp
+ ProducerApp --> StreamsBootstrap
+ KafkaConnector --> PipelineComponent
KafkaSourceConnector --> KafkaConnector
KafkaSinkConnector --> KafkaConnector
-
- click KubernetesApp "../kubernetes-app"
- click KafkaApp "../kafka-app"
- click StreamsApp "../streams-app"
- click ProducerApp "../producer-app"
- click KafkaConnector "../kafka-connector"
- click KafkaSourceConnector "../kafka-source-connector"
- click KafkaSinkConnector "../kafka-sink-connector"
+
+ click KubernetesApp "/kpops/user/core-concepts/components/kubernetes-app"
+ click HelmApp "/kpops/user/core-concepts/components/helm-app"
+ click KafkaApp "/kpops/user/core-concepts/components/kafka-app"
+ click StreamsBootstrap "/kpops/user/core-concepts/components/streams-bootstrap"
+ click StreamsApp "/kpops/user/core-concepts/components/streams-app"
+ click ProducerApp "/kpops/user/core-concepts/components/producer-app"
+ click KafkaConnector "/kpops/user/core-concepts/components/kafka-connector"
+ click KafkaSourceConnector "/kpops/user/core-concepts/components/kafka-source-connector"
+ click KafkaSinkConnector "/kpops/user/core-concepts/components/kafka-sink-connector"
```
KPOps component hierarchy
diff --git a/docs/docs/resources/editor_integration/settings.json b/docs/docs/resources/editor_integration/settings.json
index bead179c9..1938692c3 100644
--- a/docs/docs/resources/editor_integration/settings.json
+++ b/docs/docs/resources/editor_integration/settings.json
@@ -1,6 +1,16 @@
{
"yaml.schemas": {
- "https://bakdata.github.io/kpops/2.0/schema/config.json": "config.yaml",
- "https://bakdata.github.io/kpops/2.0/schema/pipeline.json": "pipeline.yaml"
+ "https://bakdata.github.io/kpops/3.0/schema/pipeline.json": [
+ "pipeline.yaml",
+ "pipeline_*.yaml"
+ ],
+ "https://bakdata.github.io/kpops/3.0/schema/defaults.json": [
+ "defaults.yaml",
+ "defaults_*.yaml"
+ ],
+ "https://bakdata.github.io/kpops/3.0/schema/config.json": [
+ "config.yaml",
+ "config_*.yaml"
+ ]
}
}
diff --git a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml
index c431a71b9..a47ad6b50 100644
--- a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml
+++ b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml
@@ -1,14 +1,16 @@
+helm-app.yaml:
+- app-helm-app.yaml
+- repo_config-helm-app.yaml
kafka-app.yaml:
+- prefix.yaml
+- from_.yaml
+- to.yaml
- app-kafka-app.yaml
-- version-kafka-app.yaml
kafka-connector.yaml:
- prefix.yaml
- from_.yaml
- to.yaml
-- namespace.yaml
- app-kafka-connector.yaml
-- repo_config-kafka-connector.yaml
-- version-kafka-connector.yaml
- resetter_values.yaml
kafka-sink-connector.yaml: []
kafka-source-connector.yaml:
@@ -20,8 +22,6 @@ kubernetes-app.yaml:
- to.yaml
- namespace.yaml
- app-kubernetes-app.yaml
-- repo_config-kubernetes-app.yaml
-- version.yaml
producer-app.yaml:
- from_-producer-app.yaml
- app-producer-app.yaml
diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml
index 21af5971c..2ac8b59b2 100644
--- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml
+++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml
@@ -1,5 +1,5 @@
kpops_components_fields:
- kafka-app:
+ helm-app:
- name
- prefix
- from_
@@ -8,35 +8,35 @@ kpops_components_fields:
- app
- repo_config
- version
+ kafka-app:
+ - name
+ - prefix
+ - from_
+ - to
+ - app
kafka-connector:
- name
- prefix
- from_
- to
- - namespace
- app
- - repo_config
- - version
+ - resetter_namespace
- resetter_values
kafka-sink-connector:
- name
- prefix
- from_
- to
- - namespace
- app
- - repo_config
- - version
+ - resetter_namespace
- resetter_values
kafka-source-connector:
- name
- prefix
- from_
- to
- - namespace
- app
- - repo_config
- - version
+ - resetter_namespace
- resetter_values
- offset_topic
kubernetes-app:
@@ -46,8 +46,6 @@ kpops_components_fields:
- to
- namespace
- app
- - repo_config
- - version
pipeline-component:
- name
- prefix
@@ -71,12 +69,76 @@ kpops_components_fields:
- app
- repo_config
- version
+ streams-bootstrap:
+ - name
+ - prefix
+ - from_
+ - to
+ - namespace
+ - app
+ - repo_config
+ - version
kpops_components_inheritance_ref:
- kafka-app: kubernetes-app
- kafka-connector: pipeline-component
- kafka-sink-connector: kafka-connector
- kafka-source-connector: kafka-connector
- kubernetes-app: pipeline-component
- pipeline-component: base-defaults-component
- producer-app: kafka-app
- streams-app: kafka-app
+ helm-app:
+ bases:
+ - kubernetes-app
+ parents:
+ - kubernetes-app
+ - pipeline-component
+ kafka-app:
+ bases:
+ - pipeline-component
+ parents:
+ - pipeline-component
+ kafka-connector:
+ bases:
+ - pipeline-component
+ parents:
+ - pipeline-component
+ kafka-sink-connector:
+ bases:
+ - kafka-connector
+ parents:
+ - kafka-connector
+ - pipeline-component
+ kafka-source-connector:
+ bases:
+ - kafka-connector
+ parents:
+ - kafka-connector
+ - pipeline-component
+ kubernetes-app:
+ bases:
+ - pipeline-component
+ parents:
+ - pipeline-component
+ pipeline-component:
+ bases: []
+ parents: []
+ producer-app:
+ bases:
+ - kafka-app
+ - streams-bootstrap
+ parents:
+ - kafka-app
+ - streams-bootstrap
+ - helm-app
+ - kubernetes-app
+ - pipeline-component
+ streams-app:
+ bases:
+ - kafka-app
+ - streams-bootstrap
+ parents:
+ - kafka-app
+ - streams-bootstrap
+ - helm-app
+ - kubernetes-app
+ - pipeline-component
+ streams-bootstrap:
+ bases:
+ - helm-app
+ parents:
+ - helm-app
+ - kubernetes-app
+ - pipeline-component
diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml
index 485c3c253..c7d08112c 100644
--- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml
+++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml
@@ -1,37 +1,33 @@
-kafka-app.yaml:
+helm-app.yaml:
- prefix.yaml
- from_.yaml
- to.yaml
- namespace.yaml
+- app-helm-app.yaml
+- repo_config-helm-app.yaml
+- version.yaml
+kafka-app.yaml:
+- prefix.yaml
+- from_.yaml
+- to.yaml
- app-kafka-app.yaml
-- repo_config-kubernetes-app.yaml
-- version-kafka-app.yaml
kafka-connector.yaml:
- prefix.yaml
- from_.yaml
- to.yaml
-- namespace.yaml
- app-kafka-connector.yaml
-- repo_config-kafka-connector.yaml
-- version-kafka-connector.yaml
- resetter_values.yaml
kafka-sink-connector.yaml:
- prefix.yaml
- from_.yaml
- to.yaml
-- namespace.yaml
- app-kafka-connector.yaml
-- repo_config-kafka-connector.yaml
-- version-kafka-connector.yaml
- resetter_values.yaml
kafka-source-connector.yaml:
- prefix.yaml
- from_-kafka-source-connector.yaml
- to.yaml
-- namespace.yaml
- app-kafka-connector.yaml
-- repo_config-kafka-connector.yaml
-- version-kafka-connector.yaml
- resetter_values.yaml
- offset_topic-kafka-source-connector.yaml
kubernetes-app.yaml:
@@ -40,15 +36,13 @@ kubernetes-app.yaml:
- to.yaml
- namespace.yaml
- app-kubernetes-app.yaml
-- repo_config-kubernetes-app.yaml
-- version.yaml
producer-app.yaml:
- prefix.yaml
- from_-producer-app.yaml
- to.yaml
- namespace.yaml
- app-producer-app.yaml
-- repo_config-kubernetes-app.yaml
+- repo_config-helm-app.yaml
- version-kafka-app.yaml
streams-app.yaml:
- prefix.yaml
@@ -56,5 +50,5 @@ streams-app.yaml:
- to.yaml
- namespace.yaml
- app-streams-app.yaml
-- repo_config-kubernetes-app.yaml
+- repo_config-helm-app.yaml
- version-kafka-app.yaml
diff --git a/docs/docs/resources/pipeline-components/headers/helm-app.yaml b/docs/docs/resources/pipeline-components/headers/helm-app.yaml
new file mode 100644
index 000000000..2c8bf9fea
--- /dev/null
+++ b/docs/docs/resources/pipeline-components/headers/helm-app.yaml
@@ -0,0 +1,3 @@
+# Kubernetes app managed through Helm with an associated Helm chart
+- type: helm-app
+ name: helm-app # required
diff --git a/docs/docs/resources/pipeline-components/helm-app.yaml b/docs/docs/resources/pipeline-components/helm-app.yaml
new file mode 100644
index 000000000..1bd2ce3c0
--- /dev/null
+++ b/docs/docs/resources/pipeline-components/helm-app.yaml
@@ -0,0 +1,63 @@
+# Kubernetes app managed through Helm with an associated Helm chart
+- type: helm-app
+ name: helm-app # required
+ # Pipeline prefix that will prefix every component name. If you wish to not
+ # have any prefix you can specify an empty string.
+ prefix: ${pipeline.name}-
+ from: # Must not be null
+ topics: # read from topic
+ ${pipeline.name}-input-topic:
+ type: input # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra
+ ${pipeline.name}-input-pattern-topic:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ ${pipeline.name}-extra-pattern-topic:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ components: # read from specific component
+ account-producer:
+ type: output # Implied when role is NOT specified
+ other-producer:
+ role: some-role # Implies `type` to be extra
+ component-as-input-pattern:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ component-as-extra-pattern:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ # Topic(s) into which the component will write output
+ to:
+ topics:
+ ${pipeline.name}-output-topic:
+ type: output # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
+ ${pipeline.name}-error-topic:
+ type: error
+ # Currently KPOps supports Avro and JSON schemas.
+ key_schema: key-schema # must implement SchemaProvider to use
+ value_schema: value-schema
+ partitions_count: 1
+ replication_factor: 1
+ configs: # https://kafka.apache.org/documentation/#topicconfigs
+ cleanup.policy: compact
+ models: # SchemaProvider is initiated with the values given here
+ model: model
+ namespace: namespace # required
+ # `app` contains application-specific settings, hence it does not have a rigid
+ # structure. The fields below are just an example.
+ app: # required
+ image: exampleImage # Example
+ debug: false # Example
+ commandLine: {} # Example
+ # Helm repository configuration (optional)
+ # If not set the helm repo add will not be called. Useful when using local Helm charts
+ repo_config:
+ repository_name: bakdata-streams-bootstrap # required
+ url: https://bakdata.github.io/streams-bootstrap/ # required
+ repo_auth_flags:
+ username: user
+ password: pass
+ ca_file: /home/user/path/to/ca-file
+ insecure_skip_tls_verify: false
+ version: "1.0.0" # Helm chart version
diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml
index 6d8045ad5..60fbbfb13 100644
--- a/docs/docs/resources/pipeline-components/kafka-app.yaml
+++ b/docs/docs/resources/pipeline-components/kafka-app.yaml
@@ -4,16 +4,16 @@
name: kafka-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -29,11 +29,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -44,23 +44,11 @@
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` can contain application-specific settings, hence the user is free to
# add the key-value pairs they need.
app: # required
streams: # required
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
nameOverride: override-with-this-name # kafka-app-specific
imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app
- # Helm repository configuration (optional)
- # If not set the helm repo add will not be called. Useful when using local Helm charts
- repo_config:
- repository_name: bakdata-streams-bootstrap # required
- url: https://bakdata.github.io/streams-bootstrap/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "2.12.0" # Helm chart version
diff --git a/docs/docs/resources/pipeline-components/kafka-connector.yaml b/docs/docs/resources/pipeline-components/kafka-connector.yaml
index d44aa7bce..b231ae4cc 100644
--- a/docs/docs/resources/pipeline-components/kafka-connector.yaml
+++ b/docs/docs/resources/pipeline-components/kafka-connector.yaml
@@ -2,16 +2,16 @@
name: kafka-connector # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -27,11 +27,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -42,22 +42,11 @@
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
diff --git a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml
index 017511e5b..8e100d1b3 100644
--- a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml
+++ b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml
@@ -3,16 +3,16 @@
name: kafka-sink-connector # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -28,11 +28,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -43,22 +43,11 @@
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
diff --git a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml
index d4cbcb24c..fc1f4e8c4 100644
--- a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml
+++ b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml
@@ -3,17 +3,17 @@
name: kafka-source-connector # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
# The source connector has no `from` section
# from:
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -24,22 +24,11 @@
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
diff --git a/docs/docs/resources/pipeline-components/kubernetes-app.yaml b/docs/docs/resources/pipeline-components/kubernetes-app.yaml
index ffe41894c..66ed21bb2 100644
--- a/docs/docs/resources/pipeline-components/kubernetes-app.yaml
+++ b/docs/docs/resources/pipeline-components/kubernetes-app.yaml
@@ -3,16 +3,16 @@
name: kubernetes-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -28,11 +28,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -50,14 +50,3 @@
image: exampleImage # Example
debug: false # Example
commandLine: {} # Example
- # Helm repository configuration (optional)
- # If not set the helm repo add will not be called. Useful when using local Helm charts
- repo_config:
- repository_name: bakdata-streams-bootstrap # required
- url: https://bakdata.github.io/streams-bootstrap/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.0" # Helm chart version
diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml
index 27c5d45c1..9a3f93a9e 100644
--- a/docs/docs/resources/pipeline-components/pipeline.yaml
+++ b/docs/docs/resources/pipeline-components/pipeline.yaml
@@ -1,19 +1,18 @@
-# Base component for Kafka-based components.
-# Producer or streaming apps should inherit from this class.
-- type: kafka-app # required
- name: kafka-app # required
+# Kubernetes app managed through Helm with an associated Helm chart
+- type: helm-app
+ name: helm-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -29,11 +28,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -45,14 +44,12 @@
models: # SchemaProvider is initiated with the values given here
model: model
namespace: namespace # required
- # `app` can contain application-specific settings, hence the user is free to
- # add the key-value pairs they need.
+ # `app` contains application-specific settings, hence it does not have a rigid
+ # structure. The fields below are just an example.
app: # required
- streams: # required
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
- nameOverride: override-with-this-name # kafka-app-specific
- imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app
+ image: exampleImage # Example
+ debug: false # Example
+ commandLine: {} # Example
# Helm repository configuration (optional)
# If not set the helm repo add will not be called. Useful when using local Helm charts
repo_config:
@@ -63,22 +60,76 @@
password: pass
ca_file: /home/user/path/to/ca-file
insecure_skip_tls_verify: false
- version: "2.12.0" # Helm chart version
+ version: "1.0.0" # Helm chart version
+# Base component for Kafka-based components.
+# Producer or streaming apps should inherit from this class.
+- type: kafka-app # required
+ name: kafka-app # required
+ # Pipeline prefix that will prefix every component name. If you wish to not
+ # have any prefix you can specify an empty string.
+ prefix: ${pipeline.name}-
+ from: # Must not be null
+ topics: # read from topic
+ ${pipeline.name}-input-topic:
+ type: input # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra
+ ${pipeline.name}-input-pattern-topic:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ ${pipeline.name}-extra-pattern-topic:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ components: # read from specific component
+ account-producer:
+ type: output # Implied when role is NOT specified
+ other-producer:
+ role: some-role # Implies `type` to be extra
+ component-as-input-pattern:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ component-as-extra-pattern:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ # Topic(s) into which the component will write output
+ to:
+ topics:
+ ${pipeline.name}-output-topic:
+ type: output # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
+ ${pipeline.name}-error-topic:
+ type: error
+ # Currently KPOps supports Avro and JSON schemas.
+ key_schema: key-schema # must implement SchemaProvider to use
+ value_schema: value-schema
+ partitions_count: 1
+ replication_factor: 1
+ configs: # https://kafka.apache.org/documentation/#topicconfigs
+ cleanup.policy: compact
+ models: # SchemaProvider is initiated with the values given here
+ model: model
+ # `app` can contain application-specific settings, hence the user is free to
+ # add the key-value pairs they need.
+ app: # required
+ streams: # required
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
+ nameOverride: override-with-this-name # kafka-app-specific
+ imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app
# Kafka sink connector
- type: kafka-sink-connector
name: kafka-sink-connector # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -94,11 +145,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -109,22 +160,11 @@
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
@@ -134,17 +174,17 @@
name: kafka-source-connector # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
# The source connector has no `from` section
# from:
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -155,22 +195,11 @@
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
@@ -183,16 +212,16 @@
name: kubernetes-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -208,11 +237,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -230,17 +259,6 @@
image: exampleImage # Example
debug: false # Example
commandLine: {} # Example
- # Helm repository configuration (optional)
- # If not set the helm repo add will not be called. Useful when using local Helm charts
- repo_config:
- repository_name: bakdata-streams-bootstrap # required
- url: https://bakdata.github.io/streams-bootstrap/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.0" # Helm chart version
# Holds configuration to use as values for the streams bootstrap producer-app Helm
# chart.
# More documentation on ProducerApp:
@@ -249,17 +267,17 @@
name: producer-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
# from: # While the producer-app does inherit from kafka-app, it does not need a
# `from` section, hence it does not support it.
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -275,8 +293,8 @@
# https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app
app: # required
streams: # required, producer-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
outputTopic: output_topic
extraOutputTopics:
output_role1: output_topic1
@@ -299,16 +317,16 @@
name: streams-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -324,11 +342,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -346,8 +364,8 @@
app: # required
# Streams Bootstrap streams section
streams: # required, streams-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
inputTopics:
- topic1
- topic2
diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml
index 7a01ad24b..784873617 100644
--- a/docs/docs/resources/pipeline-components/producer-app.yaml
+++ b/docs/docs/resources/pipeline-components/producer-app.yaml
@@ -6,17 +6,17 @@
name: producer-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
# from: # While the producer-app does inherit from kafka-app, it does not need a
# `from` section, hence it does not support it.
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -32,8 +32,8 @@
# https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app
app: # required
streams: # required, producer-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
outputTopic: output_topic
extraOutputTopics:
output_role1: output_topic1
diff --git a/docs/docs/resources/pipeline-components/sections/app-helm-app.yaml b/docs/docs/resources/pipeline-components/sections/app-helm-app.yaml
new file mode 100644
index 000000000..e2b6cbae0
--- /dev/null
+++ b/docs/docs/resources/pipeline-components/sections/app-helm-app.yaml
@@ -0,0 +1,6 @@
+ # `app` contains application-specific settings, hence it does not have a rigid
+ # structure. The fields below are just an example.
+ app: # required
+ image: exampleImage # Example
+ debug: false # Example
+ commandLine: {} # Example
diff --git a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml
index 991e862e0..5ae8be6d6 100644
--- a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml
+++ b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml
@@ -2,7 +2,7 @@
# add the key-value pairs they need.
app: # required
streams: # required
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
nameOverride: override-with-this-name # kafka-app-specific
imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app
diff --git a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml
index 5cd9b000b..0fe6680cd 100644
--- a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml
+++ b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml
@@ -2,8 +2,8 @@
# https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app
app: # required
streams: # required, producer-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
outputTopic: output_topic
extraOutputTopics:
output_role1: output_topic1
diff --git a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml
index 44f6604aa..e3577aa5f 100644
--- a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml
+++ b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml
@@ -4,8 +4,8 @@
app: # required
# Streams Bootstrap streams section
streams: # required, streams-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
inputTopics:
- topic1
- topic2
diff --git a/docs/docs/resources/pipeline-components/sections/from_.yaml b/docs/docs/resources/pipeline-components/sections/from_.yaml
index 3f7f0dd22..777d10d0e 100644
--- a/docs/docs/resources/pipeline-components/sections/from_.yaml
+++ b/docs/docs/resources/pipeline-components/sections/from_.yaml
@@ -1,12 +1,12 @@
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
diff --git a/docs/docs/resources/pipeline-components/sections/prefix.yaml b/docs/docs/resources/pipeline-components/sections/prefix.yaml
index 91fbda223..b4d03f519 100644
--- a/docs/docs/resources/pipeline-components/sections/prefix.yaml
+++ b/docs/docs/resources/pipeline-components/sections/prefix.yaml
@@ -1,3 +1,3 @@
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
diff --git a/docs/docs/resources/pipeline-components/sections/repo_config-kubernetes-app.yaml b/docs/docs/resources/pipeline-components/sections/repo_config-helm-app.yaml
similarity index 100%
rename from docs/docs/resources/pipeline-components/sections/repo_config-kubernetes-app.yaml
rename to docs/docs/resources/pipeline-components/sections/repo_config-helm-app.yaml
diff --git a/docs/docs/resources/pipeline-components/sections/to.yaml b/docs/docs/resources/pipeline-components/sections/to.yaml
index dd81be9ef..7ebaf60df 100644
--- a/docs/docs/resources/pipeline-components/sections/to.yaml
+++ b/docs/docs/resources/pipeline-components/sections/to.yaml
@@ -1,11 +1,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml
index 0dde5be5c..1e79eaf0b 100644
--- a/docs/docs/resources/pipeline-components/streams-app.yaml
+++ b/docs/docs/resources/pipeline-components/streams-app.yaml
@@ -4,16 +4,16 @@
name: streams-app # required
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -29,11 +29,11 @@
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -51,8 +51,8 @@
app: # required
# Streams Bootstrap streams section
streams: # required, streams-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
inputTopics:
- topic1
- topic2
diff --git a/docs/docs/resources/pipeline-config/config.yaml b/docs/docs/resources/pipeline-config/config.yaml
index 3b08c0708..0707280c7 100644
--- a/docs/docs/resources/pipeline-config/config.yaml
+++ b/docs/docs/resources/pipeline-config/config.yaml
@@ -3,28 +3,35 @@
# The path to the folder containing the defaults.yaml file and the environment
# defaults files.
defaults_path: .
-# The environment you want to generate and deploy the pipeline to. Suffix your
-# environment files with this value (e.g. defaults_development.yaml and
-# pipeline_development.yaml for environment=development).
-# REQUIRED
-environment: development
+# Custom Python module defining project-specific KPOps components
+components_module: null
+# Base directory to the pipelines (default is current working directory)
+pipeline_base_dir: .
# The Kafka brokers address.
# REQUIRED
-brokers: "http://broker1:9092,http://broker2:9092"
+kafka_brokers: "http://broker1:9092,http://broker2:9092"
# The name of the defaults file and the prefix of the defaults environment file.
defaults_filename_prefix: defaults
-# Configures topic names.
+# Configure the topic name variables you can use in the pipeline definition.
topic_name_config:
# Configures the value for the variable ${output_topic_name}
- default_output_topic_name: ${pipeline_name}-${component_name}
+ default_output_topic_name: ${pipeline.name}-${component.name}
# Configures the value for the variable ${error_topic_name}
- default_error_topic_name: ${pipeline_name}-${component_name}-error
-# Address of the Schema Registry
-schema_registry_url: "http://localhost:8081"
-# Address of the Kafka REST Proxy.
-kafka_rest_host: "http://localhost:8082"
-# Address of Kafka Connect.
-kafka_connect_host: "http://localhost:8083"
+ default_error_topic_name: ${pipeline.name}-${component.name}-error
+# Configuration for Schema Registry.
+schema_registry:
+ # Whether the Schema Registry handler should be initialized.
+ enabled: false
+ # Address of the Schema Registry.
+ url: "http://localhost:8081"
+# Configuration for the Kafka REST Proxy.
+kafka_rest:
+ # Address of the Kafka REST Proxy.
+ url: "http://localhost:8082"
+# Configuration for Kafka Connect.
+kafka_connect:
+ # Address of Kafka Connect.
+ url: "http://localhost:8083"
# The timeout in seconds that specifies when actions like deletion or deploy
# timeout.
timeout: 300
@@ -33,14 +40,16 @@ timeout: 300
create_namespace: false
# Global flags for Helm.
helm_config:
- # Set the name of the kubeconfig context. (--kube-context)
+ # Name of kubeconfig context (`--kube-context`)
context: name
# Run Helm in Debug mode.
debug: false
+ # Kubernetes API version used for Capabilities.APIVersions
+ api_version: null
# Configure Helm Diff.
helm_diff_config:
# Set of keys that should not be checked.
- ignore:
+ ignore:
- name
- imageTag
# Whether to retain clean up jobs in the cluster or uninstall the, after
diff --git a/docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml
new file mode 100644
index 000000000..d08200203
--- /dev/null
+++ b/docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml
@@ -0,0 +1,21 @@
+# Kubernetes app managed through Helm with an associated Helm chart
+#
+# Parent of: KafkaApp
+# Child of: KubernetesApp
+helm-app:
+ # `app` contains application-specific settings, hence it does not have a rigid
+ # structure. The fields below are just an example.
+ app: # required
+ image: exampleImage # Example
+ debug: false # Example
+ commandLine: {} # Example
+ # Helm repository configuration (optional)
+ # If not set the helm repo add will not be called. Useful when using local Helm charts
+ repo_config:
+ repository_name: bakdata-streams-bootstrap # required
+ url: https://bakdata.github.io/streams-bootstrap/ # required
+ repo_auth_flags:
+ username: user
+ password: pass
+ ca_file: /home/user/path/to/ca-file
+ insecure_skip_tls_verify: false
diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml
index e0af3b7a7..a27bb38d1 100644
--- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml
+++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml
@@ -3,12 +3,53 @@
# Parent of: ProducerApp, StreamsApp
# Child of: KubernetesApp
kafka-app:
+ # Pipeline prefix that will prefix every component name. If you wish to not
+ # have any prefix you can specify an empty string.
+ prefix: ${pipeline.name}-
+ from: # Must not be null
+ topics: # read from topic
+ ${pipeline.name}-input-topic:
+ type: input # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra
+ ${pipeline.name}-input-pattern-topic:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ ${pipeline.name}-extra-pattern-topic:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ components: # read from specific component
+ account-producer:
+ type: output # Implied when role is NOT specified
+ other-producer:
+ role: some-role # Implies `type` to be extra
+ component-as-input-pattern:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ component-as-extra-pattern:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ # Topic(s) into which the component will write output
+ to:
+ topics:
+ ${pipeline.name}-output-topic:
+ type: output # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
+ ${pipeline.name}-error-topic:
+ type: error
+ # Currently KPOps supports Avro and JSON schemas.
+ key_schema: key-schema # must implement SchemaProvider to use
+ value_schema: value-schema
+ partitions_count: 1
+ replication_factor: 1
+ configs: # https://kafka.apache.org/documentation/#topicconfigs
+ cleanup.policy: compact
+ models: # SchemaProvider is initiated with the values given here
+ model: model
# `app` can contain application-specific settings, hence the user is free to
# add the key-value pairs they need.
app: # required
streams: # required
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
nameOverride: override-with-this-name # kafka-app-specific
imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app
- version: "2.12.0" # Helm chart version
diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml
index 8aa5e8ac2..40a8c117d 100644
--- a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml
+++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml
@@ -5,16 +5,16 @@
kafka-connector:
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -30,11 +30,11 @@ kafka-connector:
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -45,22 +45,11 @@ kafka-connector:
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
diff --git a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml
index d49764b8f..0780de384 100644
--- a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml
+++ b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml
@@ -1,20 +1,20 @@
# Base Kubernetes App
#
-# Parent of: KafkaApp
+# Parent of: HelmApp
# Child of: PipelineComponent
kubernetes-app:
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -30,11 +30,11 @@ kubernetes-app:
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -52,14 +52,3 @@ kubernetes-app:
image: exampleImage # Example
debug: false # Example
commandLine: {} # Example
- # Helm repository configuration (optional)
- # If not set the helm repo add will not be called. Useful when using local Helm charts
- repo_config:
- repository_name: bakdata-streams-bootstrap # required
- url: https://bakdata.github.io/streams-bootstrap/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.0" # Helm chart version
diff --git a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml
index 1d81f5ced..a5b4a0f6f 100644
--- a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml
+++ b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml
@@ -10,8 +10,8 @@ producer-app:
# https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app
app: # required
streams: # required, producer-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
outputTopic: output_topic
extraOutputTopics:
output_role1: output_topic1
diff --git a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml
index 83ff13f14..4db627950 100644
--- a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml
+++ b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml
@@ -9,8 +9,8 @@ streams-app:
app: # required
# Streams Bootstrap streams section
streams: # required, streams-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
inputTopics:
- topic1
- topic2
diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml
index e74272bdc..9711a8c6f 100644
--- a/docs/docs/resources/pipeline-defaults/defaults.yaml
+++ b/docs/docs/resources/pipeline-defaults/defaults.yaml
@@ -1,17 +1,79 @@
+# Kubernetes app managed through Helm with an associated Helm chart
+#
+# Parent of: KafkaApp
+# Child of: KubernetesApp
+helm-app:
+ # `app` contains application-specific settings, hence it does not have a rigid
+ # structure. The fields below are just an example.
+ app: # required
+ image: exampleImage # Example
+ debug: false # Example
+ commandLine: {} # Example
+ # Helm repository configuration (optional)
+ # If not set the helm repo add will not be called. Useful when using local Helm charts
+ repo_config:
+ repository_name: bakdata-streams-bootstrap # required
+ url: https://bakdata.github.io/streams-bootstrap/ # required
+ repo_auth_flags:
+ username: user
+ password: pass
+ ca_file: /home/user/path/to/ca-file
+ insecure_skip_tls_verify: false
# Base component for Kafka-based components.
#
# Parent of: ProducerApp, StreamsApp
# Child of: KubernetesApp
kafka-app:
+ # Pipeline prefix that will prefix every component name. If you wish to not
+ # have any prefix you can specify an empty string.
+ prefix: ${pipeline.name}-
+ from: # Must not be null
+ topics: # read from topic
+ ${pipeline.name}-input-topic:
+ type: input # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra
+ ${pipeline.name}-input-pattern-topic:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ ${pipeline.name}-extra-pattern-topic:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ components: # read from specific component
+ account-producer:
+ type: output # Implied when role is NOT specified
+ other-producer:
+ role: some-role # Implies `type` to be extra
+ component-as-input-pattern:
+ type: pattern # Implied to be an input pattern if `role` is undefined
+ component-as-extra-pattern:
+ type: pattern # Implied to be an extra pattern if `role` is defined
+ role: some-role
+ # Topic(s) into which the component will write output
+ to:
+ topics:
+ ${pipeline.name}-output-topic:
+ type: output # Implied when role is NOT specified
+ ${pipeline.name}-extra-topic:
+ role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
+ ${pipeline.name}-error-topic:
+ type: error
+ # Currently KPOps supports Avro and JSON schemas.
+ key_schema: key-schema # must implement SchemaProvider to use
+ value_schema: value-schema
+ partitions_count: 1
+ replication_factor: 1
+ configs: # https://kafka.apache.org/documentation/#topicconfigs
+ cleanup.policy: compact
+ models: # SchemaProvider is initiated with the values given here
+ model: model
# `app` can contain application-specific settings, hence the user is free to
# add the key-value pairs they need.
app: # required
streams: # required
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
nameOverride: override-with-this-name # kafka-app-specific
imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app
- version: "2.12.0" # Helm chart version
# Kafka connector
#
# Parent of: KafkaSinkConnector, KafkaSourceConnector
@@ -19,16 +81,16 @@ kafka-app:
kafka-connector:
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -44,11 +106,11 @@ kafka-connector:
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -59,22 +121,11 @@ kafka-connector:
cleanup.policy: compact
models: # SchemaProvider is initiated with the values given here
model: model
- namespace: namespace # required
# `app` contains application-specific settings, hence it does not have a rigid
# structure. The fields below are just an example. Extensive documentation on
# connectors: https://kafka.apache.org/documentation/#connectconfigs
app: # required
tasks.max: 1
- # Helm repository configuration for resetter
- repo_config:
- repository_name: my-repo # required
- url: https://bakdata.github.io/kafka-connect-resetter/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.6" # Helm chart version
# Overriding Kafka Connect Resetter Helm values. E.g. to override the
# Image Tag etc.
resetter_values:
@@ -95,21 +146,21 @@ kafka-source-connector:
offset_topic: offset_topic
# Base Kubernetes App
#
-# Parent of: KafkaApp
+# Parent of: HelmApp
# Child of: PipelineComponent
kubernetes-app:
# Pipeline prefix that will prefix every component name. If you wish to not
# have any prefix you can specify an empty string.
- prefix: ${pipeline_name}-
+ prefix: ${pipeline.name}-
from: # Must not be null
topics: # read from topic
- ${pipeline_name}-input-topic:
+ ${pipeline.name}-input-topic:
type: input # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra
- ${pipeline_name}-input-pattern-topic:
+ ${pipeline.name}-input-pattern-topic:
type: pattern # Implied to be an input pattern if `role` is undefined
- ${pipeline_name}-extra-pattern-topic:
+ ${pipeline.name}-extra-pattern-topic:
type: pattern # Implied to be an extra pattern if `role` is defined
role: some-role
components: # read from specific component
@@ -125,11 +176,11 @@ kubernetes-app:
# Topic(s) into which the component will write output
to:
topics:
- ${pipeline_name}-output-topic:
+ ${pipeline.name}-output-topic:
type: output # Implied when role is NOT specified
- ${pipeline_name}-extra-topic:
+ ${pipeline.name}-extra-topic:
role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined
- ${pipeline_name}-error-topic:
+ ${pipeline.name}-error-topic:
type: error
# Currently KPOps supports Avro and JSON schemas.
key_schema: key-schema # must implement SchemaProvider to use
@@ -147,17 +198,6 @@ kubernetes-app:
image: exampleImage # Example
debug: false # Example
commandLine: {} # Example
- # Helm repository configuration (optional)
- # If not set the helm repo add will not be called. Useful when using local Helm charts
- repo_config:
- repository_name: bakdata-streams-bootstrap # required
- url: https://bakdata.github.io/streams-bootstrap/ # required
- repo_auth_flags:
- username: user
- password: pass
- ca_file: /home/user/path/to/ca-file
- insecure_skip_tls_verify: false
- version: "1.0.0" # Helm chart version
# Holds configuration to use as values for the streams bootstrap producer-app Helm
# chart.
#
@@ -170,8 +210,8 @@ producer-app:
# https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app
app: # required
streams: # required, producer-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
outputTopic: output_topic
extraOutputTopics:
output_role1: output_topic1
@@ -188,8 +228,8 @@ streams-app:
app: # required
# Streams Bootstrap streams section
streams: # required, streams-app-specific
- brokers: ${brokers} # required
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers} # required
+ schemaRegistryUrl: ${config.schema_registry.url}
inputTopics:
- topic1
- topic2
diff --git a/docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml b/docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml
new file mode 100644
index 000000000..bbc3b5622
--- /dev/null
+++ b/docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml
@@ -0,0 +1,5 @@
+# Kubernetes app managed through Helm with an associated Helm chart
+#
+# Parent of: KafkaApp
+# Child of: KubernetesApp
+helm-app:
diff --git a/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml b/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml
index f99e42e6d..cc1175938 100644
--- a/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml
+++ b/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml
@@ -1,5 +1,5 @@
# Base Kubernetes App
#
-# Parent of: KafkaApp
+# Parent of: HelmApp
# Child of: PipelineComponent
kubernetes-app:
diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env
index dec1d8b3a..078f56f07 100644
--- a/docs/docs/resources/variables/cli_env_vars.env
+++ b/docs/docs/resources/variables/cli_env_vars.env
@@ -5,13 +5,17 @@
# corresponding flag does not have to be specified in commands.
# Variables marked as required can instead be set as flags.
#
-# Base directory to the pipelines (default is current working
-# directory)
-KPOPS_PIPELINE_BASE_DIR=.
-# Path to the config.yaml file
-KPOPS_CONFIG_PATH=config.yaml
+# Path to the dir containing config.yaml files
+KPOPS_CONFIG_PATH=.
# Path to defaults folder
KPOPS_DEFAULT_PATH # No default value, not required
+# Path to dotenv file. Multiple files can be provided. The files will
+# be loaded in order, with each file overriding the previous one.
+KPOPS_DOTENV_PATH # No default value, not required
+# The environment you want to generate and deploy the pipeline to.
+# Suffix your environment files with this value (e.g.
+# defaults_development.yaml for environment=development).
+KPOPS_ENVIRONMENT # No default value, not required
# Path to YAML with pipeline definition
KPOPS_PIPELINE_PATH # No default value, required
# Comma separated list of steps to apply the command on
diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md
index 763cb936e..cb459f113 100644
--- a/docs/docs/resources/variables/cli_env_vars.md
+++ b/docs/docs/resources/variables/cli_env_vars.md
@@ -1,9 +1,10 @@
These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags.
-| Name |Default Value|Required| Description |
-|-----------------------|-------------|--------|----------------------------------------------------------------------|
-|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory)|
-|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file |
-|KPOPS_DEFAULT_PATH | |False |Path to defaults folder |
-|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition |
-|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on |
+| Name |Default Value|Required| Description |
+|--------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+|KPOPS_CONFIG_PATH |. |False |Path to the dir containing config.yaml files |
+|KPOPS_DEFAULT_PATH | |False |Path to defaults folder |
+|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. |
+|KPOPS_ENVIRONMENT | |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|
+|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition |
+|KPOPS_PIPELINE_STEPS| |False |Comma separated list of steps to apply the command on |
diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env
index 308fb6334..42d2dead8 100644
--- a/docs/docs/resources/variables/config_env_vars.env
+++ b/docs/docs/resources/variables/config_env_vars.env
@@ -1,30 +1,66 @@
-# Pipeline config environment variables
+# Global config environment variables
#
# The default setup is shown. These variables are a lower priority
# alternative to the settings in `config.yaml`. Variables marked as
-# required can instead be set in the pipeline config.
+# required can instead be set in the global config.
#
-# environment
-# The environment you want to generate and deploy the pipeline to.
-# Suffix your environment files with this value (e.g.
-# defaults_development.yaml for environment=development).
-KPOPS_ENVIRONMENT # No default value, required
-# brokers
+# defaults_path
+# The path to the folder containing the defaults.yaml file and the
+# environment defaults files. Paths can either be absolute or relative
+# to `config.yaml`
+KPOPS_DEFAULTS_PATH=.
+# components_module
+# Custom Python module defining project-specific KPOps components
+KPOPS_COMPONENTS_MODULE # No default value, not required
+# pipeline_base_dir
+# Base directory to the pipelines (default is current working
+# directory)
+KPOPS_PIPELINE_BASE_DIR=.
+# kafka_brokers
# The comma separated Kafka brokers address.
KPOPS_KAFKA_BROKERS # No default value, required
-# schema_registry_url
+# defaults_filename_prefix
+# The name of the defaults file and the prefix of the defaults
+# environment file.
+KPOPS_DEFAULTS_FILENAME_PREFIX=defaults
+# topic_name_config.default_output_topic_name
+# Configures the value for the variable ${output_topic_name}
+KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline.name}-${component.name}
+# topic_name_config.default_error_topic_name
+# Configures the value for the variable ${error_topic_name}
+KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline.name}-${component.name}-error
+# schema_registry.enabled
+# Whether the Schema Registry handler should be initialized.
+KPOPS_SCHEMA_REGISTRY__ENABLED=False
+# schema_registry.url
# Address of the Schema Registry.
-KPOPS_SCHEMA_REGISTRY_URL # No default value, not required
-# kafka_rest_host
+KPOPS_SCHEMA_REGISTRY__URL=http://localhost:8081/
+# kafka_rest.url
# Address of the Kafka REST Proxy.
-KPOPS_REST_PROXY_HOST # No default value, not required
-# kafka_connect_host
+KPOPS_KAFKA_REST__URL=http://localhost:8082/
+# kafka_connect.url
# Address of Kafka Connect.
-KPOPS_CONNECT_HOST # No default value, not required
+KPOPS_KAFKA_CONNECT__URL=http://localhost:8083/
# timeout
# The timeout in seconds that specifies when actions like deletion or
# deploy timeout.
KPOPS_TIMEOUT=300
+# create_namespace
+# Flag for `helm upgrade --install`. Create the release namespace if
+# not present.
+KPOPS_CREATE_NAMESPACE=False
+# helm_config.context
+# Name of kubeconfig context (`--kube-context`)
+KPOPS_HELM_CONFIG__CONTEXT # No default value, not required
+# helm_config.debug
+# Run Helm in Debug mode
+KPOPS_HELM_CONFIG__DEBUG=False
+# helm_config.api_version
+# Kubernetes API version used for Capabilities.APIVersions
+KPOPS_HELM_CONFIG__API_VERSION # No default value, not required
+# helm_diff_config.ignore
+# Set of keys that should not be checked.
+KPOPS_HELM_DIFF_CONFIG__IGNORE # No default value, required
# retain_clean_jobs
# Whether to retain clean up jobs in the cluster or uninstall the,
# after completion.
diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md
index 2928f2ccd..ef0a7726f 100644
--- a/docs/docs/resources/variables/config_env_vars.md
+++ b/docs/docs/resources/variables/config_env_vars.md
@@ -1,11 +1,22 @@
-These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config.
+These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the global config.
-| Name |Default Value|Required| Description | Setting name |
-|-------------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------|
-|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment |
-|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers |
-|KPOPS_SCHEMA_REGISTRY_URL| |False |Address of the Schema Registry. |schema_registry_url|
-|KPOPS_REST_PROXY_HOST | |False |Address of the Kafka REST Proxy. |kafka_rest_host |
-|KPOPS_CONNECT_HOST | |False |Address of Kafka Connect. |kafka_connect_host |
-|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout |
-|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs |
+| Name | Default Value |Required| Description | Setting name |
+|--------------------------------------------------|----------------------------------------|--------|------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------|
+|KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`|defaults_path |
+|KPOPS_COMPONENTS_MODULE | |False |Custom Python module defining project-specific KPOps components |components_module |
+|KPOPS_PIPELINE_BASE_DIR |. |False |Base directory to the pipelines (default is current working directory) |pipeline_base_dir |
+|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers |
+|KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix |
+|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline.name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name|
+|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline.name}-${component.name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name |
+|KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled |
+|KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url |
+|KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url |
+|KPOPS_KAFKA_CONNECT__URL |http://localhost:8083/ |False |Address of Kafka Connect. |kafka_connect.url |
+|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout |
+|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace |
+|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context |
+|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug |
+|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version |
+|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore |
+|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs |
diff --git a/docs/docs/resources/variables/variable_substitution.yaml b/docs/docs/resources/variables/variable_substitution.yaml
index 16e042586..8a4cf60ea 100644
--- a/docs/docs/resources/variables/variable_substitution.yaml
+++ b/docs/docs/resources/variables/variable_substitution.yaml
@@ -1,9 +1,9 @@
- type: scheduled-producer
app:
labels:
- app_type: "${component_type}"
- app_name: "${component_name}"
- app_schedule: "${component_app_schedule}"
+ app_type: "${component.type}"
+ app_name: "${component.name}"
+ app_schedule: "${component.app.schedule}"
commandLine:
FAKE_ARG: "fake-arg-value"
schedule: "30 3/8 * * *"
@@ -20,11 +20,11 @@
name: "filter-app"
app:
labels:
- app_type: "${component_type}"
- app_name: "${component_name}"
- app_resources_requests_memory: "${component_app_resources_requests_memory}"
- ${component_type}: "${component_app_labels_app_name}-${component_app_labels_app_type}"
- test_placeholder_in_placeholder: "${component_app_labels_${component_type}}"
+ app_type: "${component.type}"
+ app_name: "${component.name}"
+ app_resources_requests_memory: "${component.app.resources.requests.memory}"
+ ${component.type}: "${component.app.labels.app_name}-${component.app.labels.app_type}"
+ test_placeholder_in_placeholder: "${component.app.labels.${component.type}}"
commandLine:
TYPE: "nothing"
resources:
diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json
index a2f18eb6b..98056fca0 100644
--- a/docs/docs/schema/config.json
+++ b/docs/docs/schema/config.json
@@ -1,19 +1,36 @@
{
- "$ref": "#/definitions/PipelineConfig",
- "definitions": {
+ "$defs": {
"HelmConfig": {
"description": "Global Helm configuration.",
"properties": {
"api_version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Kubernetes API version used for Capabilities.APIVersions",
- "title": "API version",
- "type": "string"
+ "title": "API version"
},
"context": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Name of kubeconfig context (`--kube-context`)",
- "example": "dev-storage",
- "title": "Context",
- "type": "string"
+ "examples": [
+ "dev-storage"
+ ],
+ "title": "Context"
},
"debug": {
"default": false,
@@ -29,7 +46,9 @@
"properties": {
"ignore": {
"description": "Set of keys that should not be checked.",
- "example": "- name\n- imageTag",
+ "examples": [
+ "- name\n- imageTag"
+ ],
"items": {
"type": "string"
},
@@ -41,184 +60,73 @@
"title": "HelmDiffConfig",
"type": "object"
},
- "PipelineConfig": {
+ "KafkaConnectConfig": {
"additionalProperties": false,
- "description": "Pipeline configuration unrelated to the components.",
+ "description": "Configuration for Kafka Connect.",
"properties": {
- "brokers": {
- "description": "The comma separated Kafka brokers address.",
- "env": "KPOPS_KAFKA_BROKERS",
- "env_names": [
- "kpops_kafka_brokers"
- ],
- "example": "broker1:9092,broker2:9092,broker3:9092",
- "title": "Brokers",
- "type": "string"
- },
- "create_namespace": {
- "default": false,
- "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.",
- "env_names": [
- "create_namespace"
- ],
- "title": "Create Namespace",
- "type": "boolean"
- },
- "defaults_filename_prefix": {
- "default": "defaults",
- "description": "The name of the defaults file and the prefix of the defaults environment file.",
- "env_names": [
- "defaults_filename_prefix"
- ],
- "title": "Defaults Filename Prefix",
- "type": "string"
- },
- "defaults_path": {
- "default": ".",
- "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`",
- "env_names": [
- "defaults_path"
- ],
- "example": "defaults",
- "format": "path",
- "title": "Defaults Path",
- "type": "string"
- },
- "environment": {
- "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).",
- "env": "KPOPS_ENVIRONMENT",
- "env_names": [
- "kpops_environment"
- ],
- "example": "development",
- "title": "Environment",
- "type": "string"
- },
- "helm_config": {
- "allOf": [
- {
- "$ref": "#/definitions/HelmConfig"
- }
- ],
- "default": {
- "api_version": null,
- "context": null,
- "debug": false
- },
- "description": "Global flags for Helm.",
- "env_names": [
- "helm_config"
- ],
- "title": "Helm Config"
- },
- "helm_diff_config": {
- "allOf": [
- {
- "$ref": "#/definitions/HelmDiffConfig"
- }
- ],
- "default": {
- "ignore": []
- },
- "description": "Configure Helm Diff.",
- "env_names": [
- "helm_diff_config"
- ],
- "title": "Helm Diff Config"
- },
- "kafka_connect_host": {
+ "url": {
+ "default": "http://localhost:8083/",
"description": "Address of Kafka Connect.",
- "env": "KPOPS_CONNECT_HOST",
- "env_names": [
- "kpops_connect_host"
- ],
- "example": "http://localhost:8083",
- "title": "Kafka Connect Host",
+ "format": "uri",
+ "minLength": 1,
+ "title": "Url",
"type": "string"
- },
- "kafka_rest_host": {
+ }
+ },
+ "title": "KafkaConnectConfig",
+ "type": "object"
+ },
+ "KafkaRestConfig": {
+ "additionalProperties": false,
+ "description": "Configuration for Kafka REST Proxy.",
+ "properties": {
+ "url": {
+ "default": "http://localhost:8082/",
"description": "Address of the Kafka REST Proxy.",
- "env": "KPOPS_REST_PROXY_HOST",
- "env_names": [
- "kpops_rest_proxy_host"
- ],
- "example": "http://localhost:8082",
- "title": "Kafka Rest Host",
+ "format": "uri",
+ "minLength": 1,
+ "title": "Url",
"type": "string"
- },
- "retain_clean_jobs": {
+ }
+ },
+ "title": "KafkaRestConfig",
+ "type": "object"
+ },
+ "SchemaRegistryConfig": {
+ "additionalProperties": false,
+ "description": "Configuration for Schema Registry.",
+ "properties": {
+ "enabled": {
"default": false,
- "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.",
- "env": "KPOPS_RETAIN_CLEAN_JOBS",
- "env_names": [
- "kpops_retain_clean_jobs"
- ],
- "title": "Retain Clean Jobs",
+ "description": "Whether the Schema Registry handler should be initialized.",
+ "title": "Enabled",
"type": "boolean"
},
- "schema_registry_url": {
+ "url": {
+ "default": "http://localhost:8081/",
"description": "Address of the Schema Registry.",
- "env": "KPOPS_SCHEMA_REGISTRY_URL",
- "env_names": [
- "kpops_schema_registry_url"
- ],
- "example": "http://localhost:8081",
- "title": "Schema Registry Url",
+ "format": "uri",
+ "minLength": 1,
+ "title": "Url",
"type": "string"
- },
- "timeout": {
- "default": 300,
- "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.",
- "env": "KPOPS_TIMEOUT",
- "env_names": [
- "kpops_timeout"
- ],
- "title": "Timeout",
- "type": "integer"
- },
- "topic_name_config": {
- "allOf": [
- {
- "$ref": "#/definitions/TopicNameConfig"
- }
- ],
- "default": {
- "default_error_topic_name": "${pipeline_name}-${component_name}-error",
- "default_output_topic_name": "${pipeline_name}-${component_name}"
- },
- "description": "Configure the topic name variables you can use in the pipeline definition.",
- "env_names": [
- "topic_name_config"
- ],
- "title": "Topic Name Config"
}
},
- "required": [
- "environment",
- "brokers"
- ],
- "title": "PipelineConfig",
+ "title": "SchemaRegistryConfig",
"type": "object"
},
"TopicNameConfig": {
"additionalProperties": false,
- "description": "Configures topic names.",
+ "description": "Configure the topic name variables you can use in the pipeline definition.",
"properties": {
"default_error_topic_name": {
- "default": "${pipeline_name}-${component_name}-error",
+ "default": "${pipeline.name}-${component.name}-error",
"description": "Configures the value for the variable ${error_topic_name}",
- "env_names": [
- "default_error_topic_name"
- ],
"title": "Default Error Topic Name",
"type": "string"
},
"default_output_topic_name": {
- "default": "${pipeline_name}-${component_name}",
+ "default": "${pipeline.name}-${component.name}",
"description": "Configures the value for the variable ${output_topic_name}",
- "env_names": [
- "default_output_topic_name"
- ],
"title": "Default Output Topic Name",
"type": "string"
}
@@ -227,5 +135,146 @@
"type": "object"
}
},
- "title": "KPOps config schema"
+ "additionalProperties": false,
+ "description": "Global configuration for KPOps project.",
+ "properties": {
+ "components_module": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Custom Python module defining project-specific KPOps components",
+ "title": "Components Module"
+ },
+ "create_namespace": {
+ "default": false,
+ "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.",
+ "title": "Create Namespace",
+ "type": "boolean"
+ },
+ "defaults_filename_prefix": {
+ "default": "defaults",
+ "description": "The name of the defaults file and the prefix of the defaults environment file.",
+ "title": "Defaults Filename Prefix",
+ "type": "string"
+ },
+ "defaults_path": {
+ "default": ".",
+ "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`",
+ "examples": [
+ "defaults",
+ "."
+ ],
+ "format": "path",
+ "title": "Defaults Path",
+ "type": "string"
+ },
+ "helm_config": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmConfig"
+ }
+ ],
+ "default": {
+ "api_version": null,
+ "context": null,
+ "debug": false
+ },
+ "description": "Global flags for Helm."
+ },
+ "helm_diff_config": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmDiffConfig"
+ }
+ ],
+ "default": {
+ "ignore": []
+ },
+ "description": "Configure Helm Diff."
+ },
+ "kafka_brokers": {
+ "description": "The comma separated Kafka brokers address.",
+ "examples": [
+ "broker1:9092,broker2:9092,broker3:9092"
+ ],
+ "title": "Kafka Brokers",
+ "type": "string"
+ },
+ "kafka_connect": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaConnectConfig"
+ }
+ ],
+ "default": {
+ "url": "http://localhost:8083/"
+ },
+ "description": "Configuration for Kafka Connect."
+ },
+ "kafka_rest": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaRestConfig"
+ }
+ ],
+ "default": {
+ "url": "http://localhost:8082/"
+ },
+ "description": "Configuration for Kafka REST Proxy."
+ },
+ "pipeline_base_dir": {
+ "default": ".",
+ "description": "Base directory to the pipelines (default is current working directory)",
+ "format": "path",
+ "title": "Pipeline Base Dir",
+ "type": "string"
+ },
+ "retain_clean_jobs": {
+ "default": false,
+ "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.",
+ "title": "Retain Clean Jobs",
+ "type": "boolean"
+ },
+ "schema_registry": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/SchemaRegistryConfig"
+ }
+ ],
+ "default": {
+ "enabled": false,
+ "url": "http://localhost:8081/"
+ },
+ "description": "Configuration for Schema Registry."
+ },
+ "timeout": {
+ "default": 300,
+ "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.",
+ "title": "Timeout",
+ "type": "integer"
+ },
+ "topic_name_config": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/TopicNameConfig"
+ }
+ ],
+ "default": {
+ "default_error_topic_name": "${pipeline.name}-${component.name}-error",
+ "default_output_topic_name": "${pipeline.name}-${component.name}"
+ },
+ "description": "Configure the topic name variables you can use in the pipeline definition."
+ }
+ },
+ "required": [
+ "kafka_brokers"
+ ],
+ "title": "KpopsConfig",
+ "type": "object"
}
diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json
new file mode 100644
index 000000000..aa5db63da
--- /dev/null
+++ b/docs/docs/schema/defaults.json
@@ -0,0 +1,1594 @@
+{
+ "$defs": {
+ "FromSection": {
+ "additionalProperties": false,
+ "description": "Holds multiple input topics.",
+ "properties": {
+ "components": {
+ "additionalProperties": {
+ "$ref": "#/$defs/FromTopic"
+ },
+ "default": {},
+ "description": "Components to read from",
+ "title": "Components",
+ "type": "object"
+ },
+ "topics": {
+ "additionalProperties": {
+ "$ref": "#/$defs/FromTopic"
+ },
+ "default": {},
+ "description": "Input topics",
+ "title": "Topics",
+ "type": "object"
+ }
+ },
+ "title": "FromSection",
+ "type": "object"
+ },
+ "FromTopic": {
+ "additionalProperties": false,
+ "description": "Input topic.",
+ "properties": {
+ "role": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`",
+ "title": "Role"
+ },
+ "type": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/InputTopicTypes"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic type"
+ }
+ },
+ "title": "FromTopic",
+ "type": "object"
+ },
+ "HelmApp": {
+ "additionalProperties": true,
+ "description": "Kubernetes app managed through Helm with an associated Helm chart.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmAppValues"
+ }
+ ],
+ "description": "Helm app values"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "namespace": {
+ "description": "Kubernetes namespace in which the component shall be deployed",
+ "title": "Namespace",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "repo_config": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/HelmRepoConfig"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "type": {
+ "const": "helm-app",
+ "title": "Type"
+ },
+ "version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Helm chart version",
+ "title": "Version"
+ }
+ },
+ "required": [
+ "name",
+ "namespace",
+ "app",
+ "type"
+ ],
+ "title": "HelmApp",
+ "type": "object"
+ },
+ "HelmAppValues": {
+ "additionalProperties": true,
+ "description": "Helm app values.",
+ "properties": {
+ "nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Override name with this value",
+ "title": "Nameoverride"
+ }
+ },
+ "title": "HelmAppValues",
+ "type": "object"
+ },
+ "HelmRepoConfig": {
+ "description": "Helm repository configuration.",
+ "properties": {
+ "repo_auth_flags": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/RepoAuthFlags"
+ }
+ ],
+ "default": {
+ "ca_file": null,
+ "cert_file": null,
+ "insecure_skip_tls_verify": false,
+ "password": null,
+ "username": null
+ },
+ "description": "Authorisation-related flags"
+ },
+ "repository_name": {
+ "description": "Name of the Helm repository",
+ "title": "Repository Name",
+ "type": "string"
+ },
+ "url": {
+ "description": "URL to the Helm repository",
+ "title": "Url",
+ "type": "string"
+ }
+ },
+ "required": [
+ "repository_name",
+ "url"
+ ],
+ "title": "HelmRepoConfig",
+ "type": "object"
+ },
+ "InputTopicTypes": {
+ "description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)",
+ "enum": [
+ "input",
+ "pattern"
+ ],
+ "title": "InputTopicTypes",
+ "type": "string"
+ },
+ "KafkaApp": {
+ "additionalProperties": true,
+ "description": "Base component for Kafka-based components.\nProducer or streaming apps should inherit from this class.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaAppValues"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ }
+ },
+ "required": [
+ "name",
+ "app"
+ ],
+ "title": "KafkaApp",
+ "type": "object"
+ },
+ "KafkaAppValues": {
+ "additionalProperties": true,
+ "description": "Settings specific to Kafka Apps.",
+ "properties": {
+ "nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Override name with this value",
+ "title": "Nameoverride"
+ },
+ "streams": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaStreamsConfig"
+ }
+ ],
+ "description": "Kafka streams config"
+ }
+ },
+ "required": [
+ "streams"
+ ],
+ "title": "KafkaAppValues",
+ "type": "object"
+ },
+ "KafkaConnector": {
+ "additionalProperties": true,
+ "description": "Base class for all Kafka connectors.\nShould only be used to set defaults",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaConnectorConfig"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "resetter_namespace": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed",
+ "title": "Resetter Namespace"
+ },
+ "resetter_values": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmAppValues"
+ }
+ ],
+ "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc."
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ }
+ },
+ "required": [
+ "name",
+ "app"
+ ],
+ "title": "KafkaConnector",
+ "type": "object"
+ },
+ "KafkaConnectorConfig": {
+ "additionalProperties": true,
+ "additional_properties": {
+ "type": "string"
+ },
+ "description": "Settings specific to Kafka Connectors.",
+ "properties": {
+ "connector.class": {
+ "title": "Connector.Class",
+ "type": "string"
+ }
+ },
+ "required": [
+ "connector.class"
+ ],
+ "title": "KafkaConnectorConfig",
+ "type": "object"
+ },
+ "KafkaSinkConnector": {
+ "additionalProperties": true,
+ "description": "Kafka sink connector model.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaConnectorConfig"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "resetter_namespace": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed",
+ "title": "Resetter Namespace"
+ },
+ "resetter_values": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmAppValues"
+ }
+ ],
+ "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc."
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "type": {
+ "const": "kafka-sink-connector",
+ "title": "Type"
+ }
+ },
+ "required": [
+ "name",
+ "app",
+ "type"
+ ],
+ "title": "KafkaSinkConnector",
+ "type": "object"
+ },
+ "KafkaSourceConnector": {
+ "additionalProperties": true,
+ "description": "Kafka source connector model.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KafkaConnectorConfig"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "offset_topic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running",
+ "title": "Offset Topic"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "resetter_namespace": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed",
+ "title": "Resetter Namespace"
+ },
+ "resetter_values": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmAppValues"
+ }
+ ],
+ "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc."
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "type": {
+ "const": "kafka-source-connector",
+ "title": "Type"
+ }
+ },
+ "required": [
+ "name",
+ "app",
+ "type"
+ ],
+ "title": "KafkaSourceConnector",
+ "type": "object"
+ },
+ "KafkaStreamsConfig": {
+ "additionalProperties": true,
+ "description": "Kafka Streams config.",
+ "properties": {
+ "brokers": {
+ "description": "Brokers",
+ "title": "Brokers",
+ "type": "string"
+ },
+ "schemaRegistryUrl": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "URL of the schema registry",
+ "title": "Schemaregistryurl"
+ }
+ },
+ "required": [
+ "brokers"
+ ],
+ "title": "KafkaStreamsConfig",
+ "type": "object"
+ },
+ "KubernetesApp": {
+ "additionalProperties": true,
+ "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/KubernetesAppValues"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "namespace": {
+ "description": "Kubernetes namespace in which the component shall be deployed",
+ "title": "Namespace",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ }
+ },
+ "required": [
+ "name",
+ "namespace",
+ "app"
+ ],
+ "title": "KubernetesApp",
+ "type": "object"
+ },
+ "KubernetesAppValues": {
+ "additionalProperties": true,
+ "description": "Settings specific to Kubernetes apps.",
+ "properties": {},
+ "title": "KubernetesAppValues",
+ "type": "object"
+ },
+ "OutputTopicTypes": {
+ "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)",
+ "enum": [
+ "output",
+ "error"
+ ],
+ "title": "OutputTopicTypes",
+ "type": "string"
+ },
+ "PipelineComponent": {
+ "additionalProperties": true,
+ "description": "Base class for all components.",
+ "properties": {
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ }
+ },
+ "required": [
+ "name"
+ ],
+ "title": "PipelineComponent",
+ "type": "object"
+ },
+ "ProducerApp": {
+ "additionalProperties": true,
+ "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/ProducerAppValues"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "default": null,
+ "description": "Producer doesn't support FromSection",
+ "title": "From",
+ "type": "null"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "namespace": {
+ "description": "Kubernetes namespace in which the component shall be deployed",
+ "title": "Namespace",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "repo_config": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmRepoConfig"
+ }
+ ],
+ "default": {
+ "repo_auth_flags": {
+ "ca_file": null,
+ "cert_file": null,
+ "insecure_skip_tls_verify": false,
+ "password": null,
+ "username": null
+ },
+ "repository_name": "bakdata-streams-bootstrap",
+ "url": "https://bakdata.github.io/streams-bootstrap/"
+ },
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "type": {
+ "const": "producer-app",
+ "title": "Type"
+ },
+ "version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": "2.9.0",
+ "description": "Helm chart version",
+ "title": "Version"
+ }
+ },
+ "required": [
+ "name",
+ "namespace",
+ "app",
+ "type"
+ ],
+ "title": "ProducerApp",
+ "type": "object"
+ },
+ "ProducerAppValues": {
+ "additionalProperties": true,
+ "description": "Settings specific to producers.",
+ "properties": {
+ "nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Override name with this value",
+ "title": "Nameoverride"
+ },
+ "streams": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/ProducerStreamsConfig"
+ }
+ ],
+ "description": "Kafka Streams settings"
+ }
+ },
+ "required": [
+ "streams"
+ ],
+ "title": "ProducerAppValues",
+ "type": "object"
+ },
+ "ProducerStreamsConfig": {
+ "additionalProperties": true,
+ "description": "Kafka Streams settings specific to Producer.",
+ "properties": {
+ "brokers": {
+ "description": "Brokers",
+ "title": "Brokers",
+ "type": "string"
+ },
+ "extraOutputTopics": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "default": {},
+ "description": "Extra output topics",
+ "title": "Extraoutputtopics",
+ "type": "object"
+ },
+ "outputTopic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Output topic",
+ "title": "Outputtopic"
+ },
+ "schemaRegistryUrl": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "URL of the schema registry",
+ "title": "Schemaregistryurl"
+ }
+ },
+ "required": [
+ "brokers"
+ ],
+ "title": "ProducerStreamsConfig",
+ "type": "object"
+ },
+ "RepoAuthFlags": {
+ "description": "Authorisation-related flags for `helm repo`.",
+ "properties": {
+ "ca_file": {
+ "anyOf": [
+ {
+ "format": "path",
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers",
+ "title": "Ca File"
+ },
+ "cert_file": {
+ "anyOf": [
+ {
+ "format": "path",
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Path to SSL certificate file to identify HTTPS client",
+ "title": "Cert File"
+ },
+ "insecure_skip_tls_verify": {
+ "default": false,
+ "description": "If true, Kubernetes API server's certificate will not be checked for validity",
+ "title": "Insecure Skip Tls Verify",
+ "type": "boolean"
+ },
+ "password": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Password",
+ "title": "Password"
+ },
+ "username": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Username",
+ "title": "Username"
+ }
+ },
+ "title": "RepoAuthFlags",
+ "type": "object"
+ },
+ "StreamsApp": {
+ "additionalProperties": true,
+ "description": "StreamsApp component that configures a streams-bootstrap app.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/StreamsAppValues"
+ }
+ ],
+ "description": "Application-specific settings"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "namespace": {
+ "description": "Kubernetes namespace in which the component shall be deployed",
+ "title": "Namespace",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "repo_config": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmRepoConfig"
+ }
+ ],
+ "default": {
+ "repo_auth_flags": {
+ "ca_file": null,
+ "cert_file": null,
+ "insecure_skip_tls_verify": false,
+ "password": null,
+ "username": null
+ },
+ "repository_name": "bakdata-streams-bootstrap",
+ "url": "https://bakdata.github.io/streams-bootstrap/"
+ },
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "type": {
+ "const": "streams-app",
+ "title": "Type"
+ },
+ "version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": "2.9.0",
+ "description": "Helm chart version",
+ "title": "Version"
+ }
+ },
+ "required": [
+ "name",
+ "namespace",
+ "app",
+ "type"
+ ],
+ "title": "StreamsApp",
+ "type": "object"
+ },
+ "StreamsAppAutoScaling": {
+ "additionalProperties": true,
+ "description": "Kubernetes Event-driven Autoscaling config.",
+ "properties": {
+ "consumerGroup": {
+ "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag.",
+ "title": "Consumer group",
+ "type": "string"
+ },
+ "cooldownPeriod": {
+ "default": 300,
+ "description": "The period to wait after the last trigger reported active before scaling the resource back to 0. https://keda.sh/docs/2.9/concepts/scaling-deployments/#cooldownperiod",
+ "title": "Cooldown period",
+ "type": "integer"
+ },
+ "enabled": {
+ "default": false,
+ "description": "",
+ "title": "Enabled",
+ "type": "boolean"
+ },
+ "idleReplicas": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount",
+ "title": "Idle replica count"
+ },
+ "lagThreshold": {
+ "description": "Average target value to trigger scaling actions.",
+ "title": "Lag threshold",
+ "type": "integer"
+ },
+ "maxReplicas": {
+ "default": 1,
+ "description": "This setting is passed to the HPA definition that KEDA will create for a given resource and holds the maximum number of replicas of the target resouce. https://keda.sh/docs/2.9/concepts/scaling-deployments/#maxreplicacount",
+ "title": "Max replica count",
+ "type": "integer"
+ },
+ "minReplicas": {
+ "default": 0,
+ "description": "Minimum number of replicas KEDA will scale the resource down to. \"https://keda.sh/docs/2.9/concepts/scaling-deployments/#minreplicacount\"",
+ "title": "Min replica count",
+ "type": "integer"
+ },
+ "offsetResetPolicy": {
+ "default": "earliest",
+ "description": "The offset reset policy for the consumer if the consumer group is not yet subscribed to a partition.",
+ "title": "Offset reset policy",
+ "type": "string"
+ },
+ "pollingInterval": {
+ "default": 30,
+ "description": "This is the interval to check each trigger on. https://keda.sh/docs/2.9/concepts/scaling-deployments/#pollinginterval",
+ "title": "Polling interval",
+ "type": "integer"
+ },
+ "topics": {
+ "default": [],
+ "description": "List of auto-generated Kafka Streams topics used by the streams app.",
+ "items": {
+ "type": "string"
+ },
+ "title": "Topics",
+ "type": "array"
+ }
+ },
+ "required": [
+ "consumerGroup",
+ "lagThreshold"
+ ],
+ "title": "StreamsAppAutoScaling",
+ "type": "object"
+ },
+ "StreamsAppValues": {
+ "additionalProperties": true,
+ "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.",
+ "properties": {
+ "autoscaling": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/StreamsAppAutoScaling"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Kubernetes event-driven autoscaling config"
+ },
+ "nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Override name with this value",
+ "title": "Nameoverride"
+ },
+ "streams": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/StreamsConfig"
+ }
+ ],
+ "description": "streams-bootstrap streams section"
+ }
+ },
+ "required": [
+ "streams"
+ ],
+ "title": "StreamsAppValues",
+ "type": "object"
+ },
+ "StreamsBootstrap": {
+ "additionalProperties": true,
+ "description": "Base for components with a streams-bootstrap Helm chart.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmAppValues"
+ }
+ ],
+ "description": "Helm app values"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "namespace": {
+ "description": "Kubernetes namespace in which the component shall be deployed",
+ "title": "Namespace",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "repo_config": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmRepoConfig"
+ }
+ ],
+ "default": {
+ "repo_auth_flags": {
+ "ca_file": null,
+ "cert_file": null,
+ "insecure_skip_tls_verify": false,
+ "password": null,
+ "username": null
+ },
+ "repository_name": "bakdata-streams-bootstrap",
+ "url": "https://bakdata.github.io/streams-bootstrap/"
+ },
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": "2.9.0",
+ "description": "Helm chart version",
+ "title": "Version"
+ }
+ },
+ "required": [
+ "name",
+ "namespace",
+ "app"
+ ],
+ "title": "StreamsBootstrap",
+ "type": "object"
+ },
+ "StreamsConfig": {
+ "additionalProperties": true,
+ "description": "Streams Bootstrap streams section.",
+ "properties": {
+ "brokers": {
+ "description": "Brokers",
+ "title": "Brokers",
+ "type": "string"
+ },
+ "config": {
+ "default": {},
+ "description": "Configuration",
+ "title": "Config",
+ "type": "object"
+ },
+ "deleteOutput": {
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup",
+ "title": "Deleteoutput"
+ },
+ "errorTopic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Error topic",
+ "title": "Errortopic"
+ },
+ "extraInputPatterns": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "default": {},
+ "description": "Extra input patterns",
+ "title": "Extrainputpatterns",
+ "type": "object"
+ },
+ "extraInputTopics": {
+ "additionalProperties": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ "default": {},
+ "description": "Extra input topics",
+ "title": "Extrainputtopics",
+ "type": "object"
+ },
+ "extraOutputTopics": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "default": {},
+ "description": "Extra output topics",
+ "title": "Extraoutputtopics",
+ "type": "object"
+ },
+ "inputPattern": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Input pattern",
+ "title": "Inputpattern"
+ },
+ "inputTopics": {
+ "default": [],
+ "description": "Input topics",
+ "items": {
+ "type": "string"
+ },
+ "title": "Inputtopics",
+ "type": "array"
+ },
+ "outputTopic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Output topic",
+ "title": "Outputtopic"
+ },
+ "schemaRegistryUrl": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "URL of the schema registry",
+ "title": "Schemaregistryurl"
+ }
+ },
+ "required": [
+ "brokers"
+ ],
+ "title": "StreamsConfig",
+ "type": "object"
+ },
+ "ToSection": {
+ "additionalProperties": false,
+ "description": "Holds multiple output topics.",
+ "properties": {
+ "models": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "default": {},
+ "description": "Data models",
+ "title": "Models",
+ "type": "object"
+ },
+ "topics": {
+ "additionalProperties": {
+ "$ref": "#/$defs/TopicConfig"
+ },
+ "default": {},
+ "description": "Output topics",
+ "title": "Topics",
+ "type": "object"
+ }
+ },
+ "title": "ToSection",
+ "type": "object"
+ },
+ "TopicConfig": {
+ "additionalProperties": false,
+ "description": "Configure an output topic.",
+ "properties": {
+ "configs": {
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "integer"
+ }
+ ]
+ },
+ "default": {},
+ "description": "Topic configs",
+ "title": "Configs",
+ "type": "object"
+ },
+ "key_schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Key schema class name",
+ "title": "Key schema"
+ },
+ "partitions_count": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Number of partitions into which the topic is divided",
+ "title": "Partitions count"
+ },
+ "replication_factor": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Replication factor of the topic",
+ "title": "Replication factor"
+ },
+ "role": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`",
+ "title": "Role"
+ },
+ "type": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/OutputTopicTypes"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic type",
+ "title": "Topic type"
+ },
+ "value_schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Value schema class name",
+ "title": "Value schema"
+ }
+ },
+ "title": "TopicConfig",
+ "type": "object"
+ }
+ },
+ "properties": {
+ "helm-app": {
+ "$ref": "#/$defs/HelmApp"
+ },
+ "kafka-app": {
+ "$ref": "#/$defs/KafkaApp"
+ },
+ "kafka-connector": {
+ "$ref": "#/$defs/KafkaConnector"
+ },
+ "kafka-sink-connector": {
+ "$ref": "#/$defs/KafkaSinkConnector"
+ },
+ "kafka-source-connector": {
+ "$ref": "#/$defs/KafkaSourceConnector"
+ },
+ "kubernetes-app": {
+ "$ref": "#/$defs/KubernetesApp"
+ },
+ "pipeline-component": {
+ "$ref": "#/$defs/PipelineComponent"
+ },
+ "producer-app": {
+ "$ref": "#/$defs/ProducerApp"
+ },
+ "streams-app": {
+ "$ref": "#/$defs/StreamsApp"
+ },
+ "streams-bootstrap": {
+ "$ref": "#/$defs/StreamsBootstrap"
+ }
+ },
+ "required": [
+ "helm-app",
+ "kafka-app",
+ "kafka-connector",
+ "kafka-sink-connector",
+ "kafka-source-connector",
+ "kubernetes-app",
+ "pipeline-component",
+ "producer-app",
+ "streams-app",
+ "streams-bootstrap"
+ ],
+ "title": "DefaultsSchema",
+ "type": "object"
+}
diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json
index 7e77b0ddd..186863f62 100644
--- a/docs/docs/schema/pipeline.json
+++ b/docs/docs/schema/pipeline.json
@@ -1,12 +1,12 @@
{
- "definitions": {
+ "$defs": {
"FromSection": {
"additionalProperties": false,
"description": "Holds multiple input topics.",
"properties": {
"components": {
"additionalProperties": {
- "$ref": "#/definitions/FromTopic"
+ "$ref": "#/$defs/FromTopic"
},
"default": {},
"description": "Components to read from",
@@ -15,7 +15,7 @@
},
"topics": {
"additionalProperties": {
- "$ref": "#/definitions/FromTopic"
+ "$ref": "#/$defs/FromTopic"
},
"default": {},
"description": "Input topics",
@@ -31,29 +31,154 @@
"description": "Input topic.",
"properties": {
"role": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`",
- "title": "Role",
- "type": "string"
+ "title": "Role"
},
"type": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/InputTopicTypes"
+ },
{
- "$ref": "#/definitions/InputTopicTypes"
+ "type": "null"
}
],
+ "default": null,
"description": "Topic type"
}
},
"title": "FromTopic",
"type": "object"
},
+ "HelmApp": {
+ "additionalProperties": true,
+ "description": "Kubernetes app managed through Helm with an associated Helm chart.",
+ "properties": {
+ "app": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/HelmAppValues"
+ }
+ ],
+ "description": "Helm app values"
+ },
+ "from": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) and/or components from which the component will read input",
+ "title": "From"
+ },
+ "name": {
+ "description": "Component name",
+ "title": "Name",
+ "type": "string"
+ },
+ "namespace": {
+ "description": "Kubernetes namespace in which the component shall be deployed",
+ "title": "Namespace",
+ "type": "string"
+ },
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
+ "type": "string"
+ },
+ "repo_config": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/HelmRepoConfig"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
+ },
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
+ },
+ "type": {
+ "const": "helm-app",
+ "title": "Type"
+ },
+ "version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Helm chart version",
+ "title": "Version"
+ }
+ },
+ "required": [
+ "name",
+ "namespace",
+ "app",
+ "type"
+ ],
+ "title": "HelmApp",
+ "type": "object"
+ },
+ "HelmAppValues": {
+ "additionalProperties": true,
+ "description": "Helm app values.",
+ "properties": {
+ "nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Override name with this value",
+ "title": "Nameoverride"
+ }
+ },
+ "title": "HelmAppValues",
+ "type": "object"
+ },
"HelmRepoConfig": {
"description": "Helm repository configuration.",
"properties": {
"repo_auth_flags": {
"allOf": [
{
- "$ref": "#/definitions/RepoAuthFlags"
+ "$ref": "#/$defs/RepoAuthFlags"
}
],
"default": {
@@ -63,8 +188,7 @@
"password": null,
"username": null
},
- "description": "Authorisation-related flags",
- "title": "Repo Auth Flags"
+ "description": "Authorisation-related flags"
},
"repository_name": {
"description": "Name of the Helm repository",
@@ -94,7 +218,8 @@
"type": "string"
},
"KafkaConnectorConfig": {
- "additionalProperties": {
+ "additionalProperties": true,
+ "additional_properties": {
"type": "string"
},
"description": "Settings specific to Kafka Connectors.",
@@ -111,23 +236,27 @@
"type": "object"
},
"KafkaSinkConnector": {
+ "additionalProperties": true,
"description": "Kafka sink connector model.",
"properties": {
"app": {
"allOf": [
{
- "$ref": "#/definitions/KafkaConnectorConfig"
+ "$ref": "#/$defs/KafkaConnectorConfig"
}
],
- "description": "Application-specific settings",
- "title": "App"
+ "description": "Application-specific settings"
},
"from": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
{
- "$ref": "#/definitions/FromSection"
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -136,93 +265,80 @@
"title": "Name",
"type": "string"
},
- "namespace": {
- "description": "Namespace in which the component shall be deployed",
- "title": "Namespace",
- "type": "string"
- },
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
},
- "repo_config": {
- "allOf": [
+ "resetter_namespace": {
+ "anyOf": [
{
- "$ref": "#/definitions/HelmRepoConfig"
+ "type": "string"
+ },
+ {
+ "type": "null"
}
],
- "default": {
- "repo_auth_flags": {
- "ca_file": null,
- "cert_file": null,
- "insecure_skip_tls_verify": false,
- "password": null,
- "username": null
- },
- "repository_name": "bakdata-kafka-connect-resetter",
- "url": "https://bakdata.github.io/kafka-connect-resetter/"
- },
- "description": "Configuration of the Helm chart repo to be used for deploying the component",
- "title": "Repo Config"
+ "default": null,
+ "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed",
+ "title": "Resetter Namespace"
},
"resetter_values": {
- "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.",
- "title": "Resetter Values",
- "type": "object"
- },
- "to": {
"allOf": [
{
- "$ref": "#/definitions/ToSection"
+ "$ref": "#/$defs/HelmAppValues"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc."
},
- "type": {
- "default": "kafka-sink-connector",
- "description": "Kafka sink connector model.",
- "enum": [
- "kafka-sink-connector"
+ "to": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
+ {
+ "type": "null"
+ }
],
- "title": "Component type",
- "type": "string"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
- "version": {
- "default": "1.0.4",
- "description": "Helm chart version",
- "title": "Version",
- "type": "string"
+ "type": {
+ "const": "kafka-sink-connector",
+ "title": "Type"
}
},
"required": [
"name",
- "namespace",
- "app"
+ "app",
+ "type"
],
"title": "KafkaSinkConnector",
"type": "object"
},
"KafkaSourceConnector": {
+ "additionalProperties": true,
"description": "Kafka source connector model.",
"properties": {
"app": {
"allOf": [
{
- "$ref": "#/definitions/KafkaConnectorConfig"
+ "$ref": "#/$defs/KafkaConnectorConfig"
}
],
- "description": "Application-specific settings",
- "title": "App"
+ "description": "Application-specific settings"
},
"from": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FromSection"
+ },
{
- "$ref": "#/definitions/FromSection"
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -231,162 +347,69 @@
"title": "Name",
"type": "string"
},
- "namespace": {
- "description": "Namespace in which the component shall be deployed",
- "title": "Namespace",
- "type": "string"
- },
"offset_topic": {
- "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running",
- "title": "Offset Topic",
- "type": "string"
- },
- "prefix": {
- "default": "${pipeline_name}-",
- "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
- "title": "Prefix",
- "type": "string"
- },
- "repo_config": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/HelmRepoConfig"
- }
- ],
- "default": {
- "repo_auth_flags": {
- "ca_file": null,
- "cert_file": null,
- "insecure_skip_tls_verify": false,
- "password": null,
- "username": null
+ "type": "string"
},
- "repository_name": "bakdata-kafka-connect-resetter",
- "url": "https://bakdata.github.io/kafka-connect-resetter/"
- },
- "description": "Configuration of the Helm chart repo to be used for deploying the component",
- "title": "Repo Config"
- },
- "resetter_values": {
- "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.",
- "title": "Resetter Values",
- "type": "object"
- },
- "to": {
- "allOf": [
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running",
+ "title": "Offset Topic"
},
- "type": {
- "default": "kafka-source-connector",
- "description": "Kafka source connector model.",
- "enum": [
- "kafka-source-connector"
- ],
- "title": "Component type",
+ "prefix": {
+ "default": "${pipeline.name}-",
+ "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
+ "title": "Prefix",
"type": "string"
},
- "version": {
- "default": "1.0.4",
- "description": "Helm chart version",
- "title": "Version",
- "type": "string"
- }
- },
- "required": [
- "name",
- "namespace",
- "app"
- ],
- "title": "KafkaSourceConnector",
- "type": "object"
- },
- "KubernetesApp": {
- "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.",
- "properties": {
- "app": {
- "allOf": [
+ "resetter_namespace": {
+ "anyOf": [
{
- "$ref": "#/definitions/KubernetesAppConfig"
- }
- ],
- "description": "Application-specific settings",
- "title": "App"
- },
- "from": {
- "allOf": [
+ "type": "string"
+ },
{
- "$ref": "#/definitions/FromSection"
+ "type": "null"
}
],
- "description": "Topic(s) and/or components from which the component will read input",
- "title": "From"
+ "default": null,
+ "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed",
+ "title": "Resetter Namespace"
},
- "name": {
- "description": "Component name",
- "title": "Name",
- "type": "string"
- },
- "namespace": {
- "description": "Namespace in which the component shall be deployed",
- "title": "Namespace",
- "type": "string"
- },
- "prefix": {
- "default": "${pipeline_name}-",
- "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
- "title": "Prefix",
- "type": "string"
- },
- "repo_config": {
+ "resetter_values": {
"allOf": [
{
- "$ref": "#/definitions/HelmRepoConfig"
+ "$ref": "#/$defs/HelmAppValues"
}
],
- "description": "Configuration of the Helm chart repo to be used for deploying the component",
- "title": "Repo Config"
+ "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc."
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "kubernetes-app",
- "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.",
- "enum": [
- "kubernetes-app"
- ],
- "title": "Component type",
- "type": "string"
- },
- "version": {
- "description": "Helm chart version",
- "title": "Version",
- "type": "string"
+ "const": "kafka-source-connector",
+ "title": "Type"
}
},
"required": [
"name",
- "namespace",
- "app"
+ "app",
+ "type"
],
- "title": "KubernetesApp",
- "type": "object"
- },
- "KubernetesAppConfig": {
- "description": "Settings specific to Kubernetes Apps.",
- "properties": {},
- "title": "KubernetesAppConfig",
+ "title": "KafkaSourceConnector",
"type": "object"
},
"OutputTopicTypes": {
@@ -399,18 +422,19 @@
"type": "string"
},
"ProducerApp": {
- "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.",
+ "additionalProperties": true,
+ "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.",
"properties": {
"app": {
"allOf": [
{
- "$ref": "#/definitions/ProducerValues"
+ "$ref": "#/$defs/ProducerAppValues"
}
],
- "description": "Application-specific settings",
- "title": "App"
+ "description": "Application-specific settings"
},
"from": {
+ "default": null,
"description": "Producer doesn't support FromSection",
"title": "From",
"type": "null"
@@ -421,12 +445,12 @@
"type": "string"
},
"namespace": {
- "description": "Namespace in which the component shall be deployed",
+ "description": "Kubernetes namespace in which the component shall be deployed",
"title": "Namespace",
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
@@ -434,7 +458,7 @@
"repo_config": {
"allOf": [
{
- "$ref": "#/definitions/HelmRepoConfig"
+ "$ref": "#/$defs/HelmRepoConfig"
}
],
"default": {
@@ -448,43 +472,81 @@
"repository_name": "bakdata-streams-bootstrap",
"url": "https://bakdata.github.io/streams-bootstrap/"
},
- "description": "Configuration of the Helm chart repo to be used for deploying the component",
- "title": "Repo Config"
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "producer-app",
- "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.",
- "enum": [
- "producer-app"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "producer-app",
+ "title": "Type"
},
"version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
"default": "2.9.0",
"description": "Helm chart version",
- "title": "Version",
- "type": "string"
+ "title": "Version"
}
},
"required": [
"name",
"namespace",
- "app"
+ "app",
+ "type"
],
"title": "ProducerApp",
"type": "object"
},
+ "ProducerAppValues": {
+ "additionalProperties": true,
+ "description": "Settings specific to producers.",
+ "properties": {
+ "nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Override name with this value",
+ "title": "Nameoverride"
+ },
+ "streams": {
+ "allOf": [
+ {
+ "$ref": "#/$defs/ProducerStreamsConfig"
+ }
+ ],
+ "description": "Kafka Streams settings"
+ }
+ },
+ "required": [
+ "streams"
+ ],
+ "title": "ProducerAppValues",
+ "type": "object"
+ },
"ProducerStreamsConfig": {
+ "additionalProperties": true,
"description": "Kafka Streams settings specific to Producer.",
"properties": {
"brokers": {
@@ -502,60 +564,68 @@
"type": "object"
},
"outputTopic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Output topic",
- "title": "Outputtopic",
- "type": "string"
+ "title": "Outputtopic"
},
"schemaRegistryUrl": {
- "description": "URL of the schema registry",
- "title": "Schemaregistryurl",
- "type": "string"
- }
- },
- "required": [
- "brokers"
- ],
- "title": "ProducerStreamsConfig",
- "type": "object"
- },
- "ProducerValues": {
- "description": "Settings specific to producers.",
- "properties": {
- "nameOverride": {
- "description": "Override name with this value",
- "title": "Nameoverride",
- "type": "string"
- },
- "streams": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/ProducerStreamsConfig"
+ "type": "string"
+ },
+ {
+ "type": "null"
}
],
- "description": "Kafka Streams settings",
- "title": "Streams"
+ "default": null,
+ "description": "URL of the schema registry",
+ "title": "Schemaregistryurl"
}
},
"required": [
- "streams"
+ "brokers"
],
- "title": "ProducerValues",
+ "title": "ProducerStreamsConfig",
"type": "object"
},
"RepoAuthFlags": {
"description": "Authorisation-related flags for `helm repo`.",
"properties": {
"ca_file": {
+ "anyOf": [
+ {
+ "format": "path",
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers",
- "format": "path",
- "title": "Ca File",
- "type": "string"
+ "title": "Ca File"
},
"cert_file": {
+ "anyOf": [
+ {
+ "format": "path",
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Path to SSL certificate file to identify HTTPS client",
- "format": "path",
- "title": "Cert File",
- "type": "string"
+ "title": "Cert File"
},
"insecure_skip_tls_verify": {
"default": false,
@@ -564,37 +634,57 @@
"type": "boolean"
},
"password": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Password",
- "title": "Password",
- "type": "string"
+ "title": "Password"
},
"username": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Username",
- "title": "Username",
- "type": "string"
+ "title": "Username"
}
},
"title": "RepoAuthFlags",
"type": "object"
},
"StreamsApp": {
- "description": "StreamsApp component that configures a streams bootstrap app.",
+ "additionalProperties": true,
+ "description": "StreamsApp component that configures a streams-bootstrap app.",
"properties": {
"app": {
"allOf": [
{
- "$ref": "#/definitions/StreamsAppConfig"
+ "$ref": "#/$defs/StreamsAppValues"
}
],
- "description": "Application-specific settings",
- "title": "App"
+ "description": "Application-specific settings"
},
"from": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/FromSection"
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -604,12 +694,12 @@
"type": "string"
},
"namespace": {
- "description": "Namespace in which the component shall be deployed",
+ "description": "Kubernetes namespace in which the component shall be deployed",
"title": "Namespace",
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
@@ -617,7 +707,7 @@
"repo_config": {
"allOf": [
{
- "$ref": "#/definitions/HelmRepoConfig"
+ "$ref": "#/$defs/HelmRepoConfig"
}
],
"default": {
@@ -631,43 +721,49 @@
"repository_name": "bakdata-streams-bootstrap",
"url": "https://bakdata.github.io/streams-bootstrap/"
},
- "description": "Configuration of the Helm chart repo to be used for deploying the component",
- "title": "Repo Config"
+ "description": "Configuration of the Helm chart repo to be used for deploying the component"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "streams-app",
- "description": "StreamsApp component that configures a streams bootstrap app.",
- "enum": [
- "streams-app"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "streams-app",
+ "title": "Type"
},
"version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
"default": "2.9.0",
"description": "Helm chart version",
- "title": "Version",
- "type": "string"
+ "title": "Version"
}
},
"required": [
"name",
"namespace",
- "app"
+ "app",
+ "type"
],
"title": "StreamsApp",
"type": "object"
},
"StreamsAppAutoScaling": {
+ "additionalProperties": true,
"description": "Kubernetes Event-driven Autoscaling config.",
"properties": {
"consumerGroup": {
@@ -683,13 +779,22 @@
},
"enabled": {
"default": false,
+ "description": "",
"title": "Enabled",
"type": "boolean"
},
"idleReplicas": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount",
- "title": "Idle replica count",
- "type": "integer"
+ "title": "Idle replica count"
},
"lagThreshold": {
"description": "Average target value to trigger scaling actions.",
@@ -737,40 +842,52 @@
"title": "StreamsAppAutoScaling",
"type": "object"
},
- "StreamsAppConfig": {
- "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.",
+ "StreamsAppValues": {
+ "additionalProperties": true,
+ "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.",
"properties": {
"autoscaling": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/StreamsAppAutoScaling"
+ "$ref": "#/$defs/StreamsAppAutoScaling"
+ },
+ {
+ "type": "null"
}
],
- "description": "Kubernetes Event-driven Autoscaling config",
- "title": "Autoscaling"
+ "default": null,
+ "description": "Kubernetes event-driven autoscaling config"
},
"nameOverride": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Override name with this value",
- "title": "Nameoverride",
- "type": "string"
+ "title": "Nameoverride"
},
"streams": {
"allOf": [
{
- "$ref": "#/definitions/StreamsConfig"
+ "$ref": "#/$defs/StreamsConfig"
}
],
- "description": "Streams Bootstrap streams section",
- "title": "Streams"
+ "description": "streams-bootstrap streams section"
}
},
"required": [
"streams"
],
- "title": "StreamsAppConfig",
+ "title": "StreamsAppValues",
"type": "object"
},
"StreamsConfig": {
+ "additionalProperties": true,
"description": "Streams Bootstrap streams section.",
"properties": {
"brokers": {
@@ -779,18 +896,36 @@
"type": "string"
},
"config": {
- "additionalProperties": {
- "type": "string"
- },
"default": {},
"description": "Configuration",
"title": "Config",
"type": "object"
},
+ "deleteOutput": {
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup",
+ "title": "Deleteoutput"
+ },
"errorTopic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Error topic",
- "title": "Errortopic",
- "type": "string"
+ "title": "Errortopic"
},
"extraInputPatterns": {
"additionalProperties": {
@@ -823,9 +958,17 @@
"type": "object"
},
"inputPattern": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Input pattern",
- "title": "Inputpattern",
- "type": "string"
+ "title": "Inputpattern"
},
"inputTopics": {
"default": [],
@@ -837,14 +980,30 @@
"type": "array"
},
"outputTopic": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Output topic",
- "title": "Outputtopic",
- "type": "string"
+ "title": "Outputtopic"
},
"schemaRegistryUrl": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "URL of the schema registry",
- "title": "Schemaregistryurl",
- "type": "string"
+ "title": "Schemaregistryurl"
}
},
"required": [
@@ -854,6 +1013,7 @@
"type": "object"
},
"ToSection": {
+ "additionalProperties": false,
"description": "Holds multiple output topics.",
"properties": {
"models": {
@@ -867,7 +1027,7 @@
},
"topics": {
"additionalProperties": {
- "$ref": "#/definitions/TopicConfig"
+ "$ref": "#/$defs/TopicConfig"
},
"default": {},
"description": "Output topics",
@@ -899,38 +1059,82 @@
"type": "object"
},
"key_schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Key schema class name",
- "title": "Key schema",
- "type": "string"
+ "title": "Key schema"
},
"partitions_count": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Number of partitions into which the topic is divided",
- "title": "Partitions count",
- "type": "integer"
+ "title": "Partitions count"
},
"replication_factor": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Replication factor of the topic",
- "title": "Replication factor",
- "type": "integer"
+ "title": "Replication factor"
},
"role": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`",
- "title": "Role",
- "type": "string"
+ "title": "Role"
},
"type": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/OutputTopicTypes"
+ "$ref": "#/$defs/OutputTopicTypes"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic type",
"title": "Topic type"
},
"value_schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Value schema class name",
- "title": "Value schema",
- "type": "string"
+ "title": "Value schema"
}
},
"title": "TopicConfig",
@@ -940,32 +1144,32 @@
"items": {
"discriminator": {
"mapping": {
- "kafka-sink-connector": "#/definitions/KafkaSinkConnector",
- "kafka-source-connector": "#/definitions/KafkaSourceConnector",
- "kubernetes-app": "#/definitions/KubernetesApp",
- "producer-app": "#/definitions/ProducerApp",
- "streams-app": "#/definitions/StreamsApp"
+ "helm-app": "#/$defs/HelmApp",
+ "kafka-sink-connector": "#/$defs/KafkaSinkConnector",
+ "kafka-source-connector": "#/$defs/KafkaSourceConnector",
+ "producer-app": "#/$defs/ProducerApp",
+ "streams-app": "#/$defs/StreamsApp"
},
"propertyName": "type"
},
"oneOf": [
{
- "$ref": "#/definitions/KafkaSinkConnector"
+ "$ref": "#/$defs/HelmApp"
},
{
- "$ref": "#/definitions/KafkaSourceConnector"
+ "$ref": "#/$defs/KafkaSinkConnector"
},
{
- "$ref": "#/definitions/KubernetesApp"
+ "$ref": "#/$defs/KafkaSourceConnector"
},
{
- "$ref": "#/definitions/ProducerApp"
+ "$ref": "#/$defs/ProducerApp"
},
{
- "$ref": "#/definitions/StreamsApp"
+ "$ref": "#/$defs/StreamsApp"
}
]
},
- "title": "KPOps pipeline schema",
+ "title": "PipelineSchema",
"type": "array"
}
diff --git a/docs/docs/user/core-concepts/components/helm-app.md b/docs/docs/user/core-concepts/components/helm-app.md
new file mode 100644
index 000000000..4a7af609b
--- /dev/null
+++ b/docs/docs/user/core-concepts/components/helm-app.md
@@ -0,0 +1,37 @@
+# HelmApp
+
+### Usage
+
+Can be used to deploy any app in Kubernetes using Helm, for example, a REST service that serves Kafka data.
+
+### Configuration
+
+
+
+??? example "`pipeline.yaml`"
+
+ ```yaml
+ --8<--
+ ./docs/resources/pipeline-components/helm-app.yaml
+ --8<--
+ ```
+
+
+
+### Operations
+
+#### deploy
+
+Deploy using Helm.
+
+#### destroy
+
+Uninstall Helm release.
+
+#### reset
+
+Do nothing.
+
+#### clean
+
+Do nothing.
diff --git a/docs/docs/user/core-concepts/components/kafka-app.md b/docs/docs/user/core-concepts/components/kafka-app.md
index e69153c5e..acbe86742 100644
--- a/docs/docs/user/core-concepts/components/kafka-app.md
+++ b/docs/docs/user/core-concepts/components/kafka-app.md
@@ -1,6 +1,6 @@
# KafkaApp
-Subclass of [_KubernetesApp_](kubernetes-app.md).
+Subclass of [_HelmApp_](helm-app.md).
### Usage
@@ -26,7 +26,7 @@ Subclass of [_KubernetesApp_](kubernetes-app.md).
#### deploy
-In addition to [KubernetesApp's `deploy`](kubernetes-app.md#deploy):
+In addition to [HelmApp's `deploy`](helm-app.md#deploy):
- Create topics if provided (optional)
- Submit Avro schemas to the registry if provided (optional)
diff --git a/docs/docs/user/core-concepts/components/kubernetes-app.md b/docs/docs/user/core-concepts/components/kubernetes-app.md
index 4a28dbe0e..80a0c3467 100644
--- a/docs/docs/user/core-concepts/components/kubernetes-app.md
+++ b/docs/docs/user/core-concepts/components/kubernetes-app.md
@@ -2,7 +2,7 @@
### Usage
-Can be used to deploy any app in Kubernetes using Helm, for example, a REST service that serves Kafka data.
+Can be used to create components for any Kubernetes app.
### Configuration
@@ -22,11 +22,11 @@ Can be used to deploy any app in Kubernetes using Helm, for example, a REST serv
#### deploy
-Deploy using Helm.
+Do nothing.
#### destroy
-Uninstall Helm release.
+Do nothing.
#### reset
diff --git a/docs/docs/user/core-concepts/components/producer-app.md b/docs/docs/user/core-concepts/components/producer-app.md
index 1f55fa6d9..bff598d53 100644
--- a/docs/docs/user/core-concepts/components/producer-app.md
+++ b/docs/docs/user/core-concepts/components/producer-app.md
@@ -1,6 +1,6 @@
# ProducerApp
-Subclass of [_KafkaApp_](kafka-app.md).
+Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md).
### Usage
diff --git a/docs/docs/user/core-concepts/components/streams-app.md b/docs/docs/user/core-concepts/components/streams-app.md
index ac881ade2..d34705062 100644
--- a/docs/docs/user/core-concepts/components/streams-app.md
+++ b/docs/docs/user/core-concepts/components/streams-app.md
@@ -1,6 +1,6 @@
# StreamsApp
-Subclass of [_KafkaApp_](kafka-app.md).
+Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md).
### Usage
diff --git a/docs/docs/user/core-concepts/components/streams-bootstrap.md b/docs/docs/user/core-concepts/components/streams-bootstrap.md
new file mode 100644
index 000000000..52bb5fa0e
--- /dev/null
+++ b/docs/docs/user/core-concepts/components/streams-bootstrap.md
@@ -0,0 +1,25 @@
+# StreamsApp
+
+Subclass of [_HelmApp_](helm-app.md).
+
+### Usage
+
+Configures a Helm app with [streams-bootstrap Helm charts](https://github.com/bakdata/streams-bootstrap){target=_blank}.
+
+### Operations
+
+#### deploy
+
+Deploy using Helm.
+
+#### destroy
+
+Uninstall Helm release.
+
+#### reset
+
+Do nothing.
+
+#### clean
+
+Do nothing.
diff --git a/docs/docs/user/core-concepts/variables/environment_variables.md b/docs/docs/user/core-concepts/variables/environment_variables.md
index 2a57aabea..35ca235d7 100644
--- a/docs/docs/user/core-concepts/variables/environment_variables.md
+++ b/docs/docs/user/core-concepts/variables/environment_variables.md
@@ -6,10 +6,7 @@ Environment variables can be set by using the [export](https://www.unix.com/man-
!!! tip "dotenv files"
- Support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20),
- but not implemented in KPOps yet. One of the possible ways to still
- use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`.
- This would work in `bash` suppose there are no spaces inside the values.
+ KPOps currently supports `.env` files only for variables related to the [config](../config.md). Full support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20). One of the possible ways to use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. This would work in `bash` suppose there are no spaces inside the values.
diff --git a/docs/docs/user/core-concepts/variables/substitution.md b/docs/docs/user/core-concepts/variables/substitution.md
index 4ef9b1b25..eb4076c79 100644
--- a/docs/docs/user/core-concepts/variables/substitution.md
+++ b/docs/docs/user/core-concepts/variables/substitution.md
@@ -6,7 +6,7 @@ KPOps supports the usage of placeholders and environment variables in [pipeline
These variables can be used in a component's definition to refer to any of its attributes, including ones that the user has defined in the defaults.
-All of them are prefixed with `component_` and follow the following form: `component_{attribute_name}`. If the attribute itself contains attributes, they can be referred to like this: `component_{attribute_name}_{subattribute_name}`.
+All of them are prefixed with `component.` and follow the following form: `component.{attribute_name}`. If the attribute itself contains attributes, they can be referred to like this: `component.{attribute_name}.{subattribute_name}`.
@@ -23,11 +23,13 @@ All of them are prefixed with `component_` and follow the following form: `compo
These variables include all fields in the [config](../config.md) and refer to the pipeline configuration that is independent of the components.
+All such variables are prefixed with `config.` and are of the same form as the [component-specific variables](#component-specific-variables).
+
!!! info Aliases
- `error_topic_name` is an alias for `topic_name_config_default_error_topic_name`
- `output_topic_name` is an alias for `topic_name_config_default_output_topic_name`
+ `error_topic_name` is an alias for `config.topic_name_config.default_error_topic_name`
+ `output_topic_name` is an alias for `config.topic_name_config.default_output_topic_name`
@@ -41,7 +43,7 @@ Environment variables such as `$PATH` can be used in the pipeline definition and
These are special variables that refer to the name and path of a pipeline.
-- `${pipeline_name}`: Concatenated path of the parent directory where pipeline.yaml is defined in.
+- `${pipeline.name}`: Concatenated path of the parent directory where pipeline.yaml is defined in.
For instance, `./data/pipelines/v1/pipeline.yaml`, here the value for the variable would be `data-pipelines-v1`.
- `${pipeline_name_}`: Similar to the previous variable, each `` contains a part of the path to the `pipeline.yaml` file.
diff --git a/docs/docs/user/migration-guide/v1-v2.md b/docs/docs/user/migration-guide/v1-v2.md
index c5936cbe5..e3edf9453 100644
--- a/docs/docs/user/migration-guide/v1-v2.md
+++ b/docs/docs/user/migration-guide/v1-v2.md
@@ -18,7 +18,7 @@ Because of this new convention `producer` has been renamed to `producer-app`. Th
app:
streams:
outputTopic: output_topic
- extraOutputTopics:
+ extraOutputTopics:
output_role1: output_topic1
output_role2: output_topic2
```
@@ -41,10 +41,10 @@ In the `to` section these have changed:
role: "role-1"
...
${pipeline_name}-topic-2:
-- type: output
+- type: output
...
${pipeline_name}-topic-3:
- type: error
+ type: error
...
```
@@ -68,11 +68,11 @@ In the `from` section these have changed:
role: topic-role
...
${pipeline_name}-input-pattern-topic:
-- type: input-pattern
+- type: input-pattern
+ type: pattern
...
${pipeline_name}-extra-pattern-topic:
-- type: extra-pattern
+- type: extra-pattern
+ type: pattern
role: some-role
...
@@ -86,15 +86,15 @@ All the internal KPOps models are now snake_case, and only Helm/Kubernetes value
...
type: streams-app
name: streams-app
- namespace: namespace
- app:
+ namespace: namespace
+ app:
streams:
- brokers: ${brokers}
+ brokers: ${brokers}
schemaRegistryUrl: ${schema_registry_url}
autoscaling:
consumerGroup: consumer-group
lagThreshold: 0
- enabled: false
+ enabled: false
pollingInterval: 30
to:
@@ -117,8 +117,8 @@ type: streams-app
- repositoryName: bakdata-streams-bootstrap
+ repository_name: bakdata-streams-bootstrap
url: https://bakdata.github.io/streams-bootstrap/
-- repoAuthFlags:
-+ repo_auth_flags:
+- repoAuthFlags:
++ repo_auth_flags:
username: user
password: pass
ca_file: /home/user/path/to/ca-file
@@ -151,9 +151,9 @@ Since you can pass a comma separated string of broker address, the broker field
environment: development
- broker: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+ brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
- kafka_connect_host: "http://localhost:8083"
- kafka_rest_host: "http://localhost:8082"
- schema_registry_url: "http://localhost:8081"
+ kafka_connect_host: "http://localhost:8083"
+ kafka_rest_host: "http://localhost:8082"
+ schema_registry_url: "http://localhost:8081"
```
#### pipeline.yaml and default.yaml
diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md
new file mode 100644
index 000000000..1556c0339
--- /dev/null
+++ b/docs/docs/user/migration-guide/v2-v3.md
@@ -0,0 +1,341 @@
+# Migrate from V2 to V3
+
+[**Jump to the summary**](#summary)
+
+## [Use hash and trim long Helm release names instead of only trimming](https://github.com/bakdata/kpops/pull/390)
+
+KPOps handles long (more than 53 characters) Helm releases names differently. Helm will not find your (long) old release names anymore. Therefore, it is recommended that you should once destroy your pipeline with KPOps v2 to remove old Helm release names. After a clean destroy, re-deploy your pipeline with the KPOps v3.
+
+For example if you have a component with the Helm release name `example-component-name-too-long-fake-fakefakefakefakefake`. The new release name will shorten the original name to 53 characters and then replace the last 6 characters of the trimmed name with the first 5 characters of the result of SHA-1(helm_release_name).
+
+
+
+```console
+example-component-name-too-long-fake-fakefakef-0a7fc ----> 53 chars
+---------------------------------------------- -----
+ ^Shortened helm_release_name ^first 5 characters of SHA1(helm_release_name)
+```
+
+
+
+## [Create HelmApp component](https://github.com/bakdata/kpops/pull/370)
+
+All Helm-specific parts of the built-in [`KubernetesApp`](../core-concepts/components/kubernetes-app.md) have been extracted to a new child component that is more appropriately named [`HelmApp`](../core-concepts/components/helm-app.md). It has to be renamed in your existing pipeline defintions and custom components module.
+
+#### pipeline.yaml
+
+```diff
+-- type: kubernetes-app
++- type: helm-app
+ name: foo
+```
+
+#### custom_module.py
+
+```diff
+- from kpops.components import KubernetesApp
++ from kpops.components import HelmApp
+
+
+- class CustomHelmApp(KubernetesApp):
++ class CustomHelmApp(HelmApp):
+ ...
+```
+
+## [Create StreamsBootstrap component & refactor cleanup jobs as individual HelmApp](https://github.com/bakdata/kpops/pull/398)
+
+Previously the default `KafkaApp` component configured the [streams-bootstrap](https://bakdata.github.io/streams-bootstrap/) Helm Charts. Now, this component is no longer tied to Helm (or Kubernetes). Instead, there is a new `StreamsBootstrap` component that configures the Helm Chart repository for the components that use it, e.g. `StreamsApp` and `ProducerApp`. If you are using non-default values for the Helm Chart repository or version, it has to be updated as shown below.
+
+#### defaults.yaml
+
+```diff
+ kafka-app:
+ app:
+ streams: ...
+
++ streams-bootstrap:
+ repo_config: ...
+ version: ...
+```
+
+## [Refactor Kafka Connector resetter as individual HelmApp](https://github.com/bakdata/kpops/pull/400)
+
+Internally, the [Kafka Connector resetter](bakdata-kafka-connect-resetter/kafka-connect-resetter) is now its own standard `HelmApp`, removing a lot of the shared code.
+It is configured using the `resetter_namespace` (formerly `namespace`) and `resetter_values` attributes.
+
+#### defaults.yaml
+
+```diff
+ kafka-connector:
+- namespace: my-namespace
++ resetter_namespace: my-namespace
+```
+
+## [Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config](https://github.com/bakdata/kpops/pull/354)
+
+The breaking changes target the `config.yaml` file:
+
+- The `schema_registry_url` is replaced with `schema_registry.url` (default `http://localhost:8081`) and `schema_registry.enabled` (default `false`).
+
+- `kafka_rest_host` is renamed to `kafka_rest.url` (default `http://localhost:8082`).
+
+- `kafka_connect_host` is replaced with `kafka_connect.url` (default `http://localhost:8083`).
+
+- `brokers` is renamed to `kafka_brokers`.
+
+The environment variable names of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes.
+
+#### config.yaml
+
+```diff
+ environment: development
+- brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+- kafka_rest_host: "http://my-custom-rest.url:8082"
+- kafka_connect_host: "http://my-custom-connect.url:8083"
+- schema_registry_url: "http://my-custom-sr.url:8081"
++ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
++ kafka_rest:
++ url: "http://my-custom-rest.url:8082"
++ kafka_connect:
++ url: "http://my-custom-connect.url:8083"
++ schema_registry:
++ enabled: true
++ url: "http://my-custom-sr.url:8081"
+```
+
+#### pipeline.yaml and default.yaml
+
+The variable is now called `kafka_brokers`.
+
+```diff
+...
+ app:
+ streams:
+- brokers: ${brokers}
++ brokers: ${kafka_brokers}
+ schemaRegistryUrl: ${schema_registry_url}
+ nameOverride: override-with-this-name
+ imageTag: "1.0.0"
+...
+```
+
+## [Define custom components module & pipeline base dir globally](https://github.com/bakdata/kpops/pull/387)
+
+
+
+!!! warning inline end
+ **The previous CLI parameters have been removed.**
+
+
+
+The options for a custom `components_module` and `pipeline_base_dir` are now global settings, defined in `config.yaml`.
+
+#### config.yaml
+
+```diff
+ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+ environment: development
++ components_module: components
++ pipeline_base_dir: pipelines
+```
+
+## [Move GitHub action to repsitory root](https://github.com/bakdata/kpops/pull/356)
+
+The location of the GitHub action has changed, and it's now available directly as `bakdata/kpops`.
+
+You'll need to change it in your GitHub CI workflows.
+
+```diff
+steps:
+ - name: kpops deploy
+- uses: bakdata/kpops/actions/kpops-runner@main
++ uses: bakdata/kpops@main
+ with:
+ command: deploy --execute
+ # ...
+```
+
+## [Allow overriding config files](https://github.com/bakdata/kpops/pull/391)
+
+Specifying the environment is no longer mandatory. If not defined, only the global files will be used.
+
+`environment` is no longer specified in `config.yaml`. Instead, it can be either set via the CLI flag `--environment` or with the environment variable `KPOPS_ENVIRONMENT`.
+
+The `--config` flag in the CLI now points to the directory that contains `config*.yaml` files. The files to be used are resolved based on the provided (or not) `environment`.
+
+#### config.yaml
+
+```diff
+- environment: development
+ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+ schema_registry:
+ enabled: true
+ url: "http://my-custom-sr.url:8081"
+```
+
+## [Change substitution variables separator to `.`](https://github.com/bakdata/kpops/pull/388)
+
+The delimiter in the substitution variables is changed to `.`.
+
+#### pipeline.yaml and default.yaml
+
+```diff
+steps:
+ - type: scheduled-producer
+ app:
+ labels:
+- app_type: "${component_type}"
+- app_name: "${component_name}"
+- app_schedule: "${component_app_schedule}"
++ app_type: "${component.type}"
++ app_name: "${component.name}"
++ app_schedule: "${component.app.schedule}"
+```
+
+#### config.yaml
+
+```diff
+topic_name_config:
+- default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic"
+- default_output_topic_name: "${pipeline_name}-${component_name}-topic"
++ default_error_topic_name: "${pipeline_name}-${component.name}-dead-letter-topic"
++ default_output_topic_name: "${pipeline_name}-${component.name}-topic"
+```
+
+## [Refactor generate template for Python API usage](https://github.com/bakdata/kpops/pull/380)
+
+The `template` method of every pipeline component has been renamed to `manifest` as it is no longer strictly tied to Helm template. Instead, it can be used to render the final resources of a component, such as Kubernetes manifests.
+
+There is also a new `kpops manifest` command replacing the existing `kpops generate --template` flag.
+
+If you're using this functionality in your custom components, it needs to be updated.
+
+```diff
+ from kpops.components.base_components.models.resource import Resource
+
+ @override
+- def template(self) -> None:
++ def manifest(self) -> Resource:
+ """Render final component resources, e.g. Kubernetes manifests."""
+ return [] # list of manifests
+```
+
+## [Namespace substitution vars](https://github.com/bakdata/kpops/pull/408)
+
+The global configuration variables are now namespaced under the config key, such as `${config.kafka_brokers}`, `${config.schema_registry.url}`. Same with pipeline variables, e.g. `${pipeline_name} → ${pipeline.name}`.
+This would make it more uniform with the existing `${component.}` variables.
+
+#### pipeline.yaml
+
+```diff
+ name: kafka-app
+- prefix: ${pipeline_name}-
++ prefix: ${pipeline.name}-
+ app:
+ streams:
+- brokers: ${kafka_brokers}
+- schemaRegistryUrl: ${schema_registry.url}
++ brokers: ${config.kafka_brokers}
++ schemaRegistryUrl: ${config.schema_registry.url}
+```
+
+## Summary
+
+
+
+!!! warning
+
+ [**Helm will not find your (long) old release names anymore.**](#use-hash-and-trim-long-helm-release-names-instead-of-only-trimming)
+
+??? example "defaults.yaml"
+
+ ```diff
+ kafka-app:
+ app:
+ streams: ...
+
+ + streams-bootstrap:
+ repo_config: ...
+ version: ...
+ ```
+
+??? example "pipeline.yaml"
+
+ ```diff
+ - - type: kubernetes-app
+ + - type: helm-app
+ ...
+ - type: kafka-app
+ app:
+ - brokers: ${brokers}
+ + brokers: ${config.kafka_brokers}
+ labels:
+ - app_schedule: "${component_app_schedule}"
+ + app_schedule: "${component.app.schedule}"
+ ...
+ - type: kafka-connector
+ - namespace: my-namespace
+ + resetter_namespace: my-namespace
+ ...
+ ```
+
+??? example "config.yaml"
+
+ ```diff
+ - environment: development
+
+ + components_module: components
+
+ + pipeline_base_dir: pipelines
+
+ - brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+ + kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+
+ - kafka_rest_host: "http://my-custom-rest.url:8082"
+ + kafka_rest:
+ + url: "http://my-custom-rest.url:8082"
+
+ - kafka_connect_host: "http://my-custom-connect.url:8083"
+ + kafka_connect:
+ + url: "http://my-custom-connect.url:8083"
+
+ - schema_registry_url: "http://my-custom-sr.url:8081"
+ + schema_registry:
+ + enabled: true
+ + url: "http://my-custom-sr.url:8081"
+
+ topic_name_config:
+ - default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic"
+ + default_error_topic_name: "${pipeline.name}-${component.name}-dead-letter-topic"
+ ...
+ ```
+
+??? example "custom_module.py"
+
+ ```diff
+ - from kpops.components import KubernetesApp
+ + from kpops.components import HelmApp
+ + from kpops.components.base_components.models.resource import Resource
+
+ - class CustomHelmApp(KubernetesApp):
+ + class CustomHelmApp(HelmApp):
+
+ @override
+ - def template(self) -> None:
+ + def manifest(self) -> Resource:
+ """Render final component resources, e.g. Kubernetes manifests."""
+ return [] # list of manifests
+ ...
+ ```
+
+??? example "github_ci_workflow.yaml"
+
+ ```diff
+ steps:
+ - name: ...
+ - uses: bakdata/kpops/actions/kpops-runner@main
+ + uses: bakdata/kpops@main
+ ...
+ ```
+
+
diff --git a/docs/docs/user/references/ci-integration/github-actions.md b/docs/docs/user/references/ci-integration/github-actions.md
index 7284254cc..c1c726e18 100644
--- a/docs/docs/user/references/ci-integration/github-actions.md
+++ b/docs/docs/user/references/ci-integration/github-actions.md
@@ -1,8 +1,6 @@
# GitHub Actions integration
-We provided a GitHub composite action called
-[kpops-runner](https://github.com/bakdata/kpops/tree/main/actions/kpops-runner/action.yaml)
-that installs all the necessary dependencies and runs KPOps commands with the given parameters.
+We provided a GitHub composite action `bakdata/kpops` that installs and executes KPOps commands with the given parameters.
## Input Parameters
@@ -11,9 +9,9 @@ that installs all the necessary dependencies and runs KPOps commands with the gi
| command | ✅ | - | string | KPOps command to run. generate, deploy, destroy, reset, clean are possible values. Flags such as --dry-run and --execute need to be specified |
| pipeline | ✅ | - | string | Pipeline to run by KPOps |
| working-directory | ❌ | . | string | root directory used by KPOps to run pipelines |
-| pipeline-base-dir | ❌ | - | string | directory where relative pipeline variables are initialized from |
| defaults | ❌ | - | string | defaults folder path |
-| config | ❌ | - | string | config.yaml file path |
+| config | ❌ | - | string | Directory containing the config*.yaml file(s) |
+| environment | ❌ | - | string | Environment to run KPOps in |
| components | ❌ | - | string | components package path |
| filter-type | ❌ | - | string | Whether to include/exclude the steps defined in KPOPS_PIPELINE_STEPS |
| python-version | ❌ | "3.11.x" | string | Python version to install (Defaults to the latest stable version of Python 3.11) |
@@ -28,33 +26,20 @@ steps:
# ...
# This step is useful for debugging reasons
- name: Generate Kafka pipeline
- uses: bakdata/kpops/actions/kpops-runner@main
+ uses: bakdata/kpops@main
with:
command: generate
working-directory: home/my-kpops-root-dir
pipeline: pipelines/my-pipeline-file.yaml
kpops-version: 1.2.3
+ # It is possible to use a pre-release KPOps version from TestPyPI https://test.pypi.org/project/kpops/#history
- name: Deploy Kafka pipeline
- uses: bakdata/kpops/actions/kpops-runner@main
+ uses: bakdata/kpops@main
with:
command: deploy --execute
working-directory: home/my-kpops-root-dir
pipeline: pipelines/my-pipeline-file.yaml
- kpops-version: 1.2.3
+ kpops-version: 1.2.5.dev20230707132709
# ...
```
-
-It is possible to execute the KPOps runner on
-a dev version from the [test.pypi](https://test.pypi.org/project/kpops/#history).
-
-```yaml
-steps:
- - name: Deploy Kafka pipeline
- uses: bakdata/kpops/actions/kpops-runner@main
- with:
- command: deploy --execute
- working-directory: home/my-kpops-root-dir
- pipeline: pipelines/my-pipeline-file.yaml
- kpops-version: 1.2.5.dev20230707132709 -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/
-```
diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md
index 952ae5a35..0a7617224 100644
--- a/docs/docs/user/references/cli-commands.md
+++ b/docs/docs/user/references/cli-commands.md
@@ -18,9 +18,10 @@ $ kpops [OPTIONS] COMMAND [ARGS]...
* `clean`: Clean pipeline steps
* `deploy`: Deploy pipeline steps
* `destroy`: Destroy pipeline steps
-* `generate`: Enriches pipelines steps with defaults.
+* `generate`: Generate enriched pipeline representation
+* `manifest`: Render final resource representation
* `reset`: Reset pipeline steps
-* `schema`: Generate json schema.
+* `schema`: Generate JSON schema.
## `kpops clean`
@@ -29,21 +30,21 @@ Clean pipeline steps
**Usage**:
```console
-$ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE]
+$ kpops clean [OPTIONS] PIPELINE_PATH
```
**Arguments**:
* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required]
-* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components
**Options**:
-* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .]
+* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH]
* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH]
-* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml]
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
* `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS]
* `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include]
+* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT]
* `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run]
* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose]
* `--help`: Show this message and exit.
@@ -55,21 +56,21 @@ Deploy pipeline steps
**Usage**:
```console
-$ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE]
+$ kpops deploy [OPTIONS] PIPELINE_PATH
```
**Arguments**:
* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required]
-* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components
**Options**:
-* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .]
+* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH]
* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH]
-* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml]
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
* `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS]
* `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include]
+* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT]
* `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run]
* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose]
* `--help`: Show this message and exit.
@@ -81,48 +82,72 @@ Destroy pipeline steps
**Usage**:
```console
-$ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE]
+$ kpops destroy [OPTIONS] PIPELINE_PATH
```
**Arguments**:
* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required]
-* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components
**Options**:
-* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .]
+* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH]
* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH]
-* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml]
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
* `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS]
* `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include]
+* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT]
* `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run]
* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose]
* `--help`: Show this message and exit.
## `kpops generate`
-Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands.
+Enrich pipeline steps with defaults. The enriched pipeline is used for all KPOps operations (deploy, destroy, ...).
**Usage**:
```console
-$ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE]
+$ kpops generate [OPTIONS] PIPELINE_PATH
```
**Arguments**:
* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required]
-* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components
**Options**:
-* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .]
+* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH]
* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH]
-* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml]
-* `--template / --no-template`: Run Helm template [default: no-template]
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
+* `--output / --no-output`: Enable output printing [default: output]
+* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT]
+* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose]
+* `--help`: Show this message and exit.
+
+## `kpops manifest`
+
+In addition to generate, render final resource representation for each pipeline step, e.g. Kubernetes manifests.
+
+**Usage**:
+
+```console
+$ kpops manifest [OPTIONS] PIPELINE_PATH
+```
+
+**Arguments**:
+
+* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required]
+
+**Options**:
+
+* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH]
+* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH]
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
+* `--output / --no-output`: Enable output printing [default: output]
* `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS]
* `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include]
+* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT]
* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose]
* `--help`: Show this message and exit.
@@ -133,54 +158,54 @@ Reset pipeline steps
**Usage**:
```console
-$ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE]
+$ kpops reset [OPTIONS] PIPELINE_PATH
```
**Arguments**:
* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required]
-* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components
**Options**:
-* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .]
+* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH]
* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH]
-* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml]
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
* `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS]
* `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include]
+* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT]
* `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run]
* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose]
* `--help`: Show this message and exit.
## `kpops schema`
-Generate json schema.
+Generate JSON schema.
-The schemas can be used to enable support for kpops files in a text editor.
+The schemas can be used to enable support for KPOps files in a text editor.
**Usage**:
```console
-$ kpops schema [OPTIONS] SCOPE:{pipeline|config} [COMPONENTS_MODULE]
+$ kpops schema [OPTIONS] SCOPE:{pipeline|defaults|config}
```
**Arguments**:
-* `SCOPE:{pipeline|config}`:
+* `SCOPE:{pipeline|defaults|config}`:
Scope of the generated schema
- pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide [COMPONENTS_MODULES].
+ pipeline: Schema of PipelineComponents. Includes the built-in KPOps components by default. To include custom components, provide components module in config.
- config: Schema of PipelineConfig. [required]
-* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components
+ config: Schema of KpopsConfig. [required]
**Options**:
+* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .]
* `--include-stock-components / --no-include-stock-components`: Include the built-in KPOps components. [default: include-stock-components]
* `--help`: Show this message and exit.
diff --git a/docs/docs/user/references/editor-integration.md b/docs/docs/user/references/editor-integration.md
index 86b7e93d0..a5c91890b 100644
--- a/docs/docs/user/references/editor-integration.md
+++ b/docs/docs/user/references/editor-integration.md
@@ -1,10 +1,11 @@
# Editor integration
-KPOps provides JSON schemas that enable autocompletion and validation for some of the files that the user must work with.
+KPOps provides JSON schemas that enable autocompletion and validation for all YAML files that the user must work with.
## Supported files
- [`pipeline.yaml`](../../resources/pipeline-components/pipeline.md)
+- [`defaults.yaml`](../core-concepts/defaults.md)
- [`config.yaml`](../core-concepts/config.md)
## Usage
@@ -23,6 +24,6 @@ KPOps provides JSON schemas that enable autocompletion and validation for some o
```
!!! tip "Advanced usage"
- It is possible to generate schemas with the [`kpops schema`](./cli-commands.md#kpops-schema) command. Useful when using custom components or when using a pre-release version of KPOps.
+ It is possible to generate schemas with the [`kpops schema`](./cli-commands.md#kpops-schema) command. Useful for including custom components or when using a pre-release version of KPOps.
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
index 2d6ac97d0..d436c94a5 100644
--- a/docs/mkdocs.yml
+++ b/docs/mkdocs.yml
@@ -59,7 +59,7 @@ markdown_extensions:
- pymdownx.keys
- pymdownx.details
- pymdownx.snippets:
- base_path: 'docs/'
+ base_path: "docs/"
url_download: true
url_timeout: false
- pymdownx.caret
@@ -82,38 +82,41 @@ nav:
- Home:
KPOps Documentation: index.md
- User Guide:
- - What is KPOps: user/what-is-kpops.md
- - Changelog: user/changelog.md
- - Getting Started:
- - Setup: user/getting-started/setup.md
- - Quick start: user/getting-started/quick-start.md
- - Teardown: user/getting-started/teardown.md
- - Examples:
- - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md
- - Core Concepts:
- - Components:
- - Overview: user/core-concepts/components/overview.md
- - KubernetesApp: user/core-concepts/components/kubernetes-app.md
- - KafkaApp: user/core-concepts/components/kafka-app.md
- - StreamsApp: user/core-concepts/components/streams-app.md
- - ProducerApp: user/core-concepts/components/producer-app.md
- - KafkaConnector: user/core-concepts/components/kafka-connector.md
- - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md
- - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md
- - Config: user/core-concepts/config.md
- - Defaults: user/core-concepts/defaults.md
- - Variables:
- - Environment variables: user/core-concepts/variables/environment_variables.md
- - Substitution: user/core-concepts/variables/substitution.md
- - References:
- - Migration guide:
- - Migrate from v1 to v2: user/migration-guide/v1-v2.md
- - CLI usage: user/references/cli-commands.md
- - Editor integration: user/references/editor-integration.md
- - CI integration:
- - GitHub Actions: user/references/ci-integration/github-actions.md
+ - What is KPOps: user/what-is-kpops.md
+ - Changelog: user/changelog.md
+ - Getting Started:
+ - Setup: user/getting-started/setup.md
+ - Quick start: user/getting-started/quick-start.md
+ - Teardown: user/getting-started/teardown.md
+ - Examples:
+ - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md
+ - Core Concepts:
+ - Components:
+ - Overview: user/core-concepts/components/overview.md
+ - KubernetesApp: user/core-concepts/components/kubernetes-app.md
+ - HelmApp: user/core-concepts/components/helm-app.md
+ - StreamsBootstrap: user/core-concepts/components/streams-bootstrap.md
+ - KafkaApp: user/core-concepts/components/kafka-app.md
+ - StreamsApp: user/core-concepts/components/streams-app.md
+ - ProducerApp: user/core-concepts/components/producer-app.md
+ - KafkaConnector: user/core-concepts/components/kafka-connector.md
+ - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md
+ - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md
+ - Config: user/core-concepts/config.md
+ - Defaults: user/core-concepts/defaults.md
+ - Variables:
+ - Environment variables: user/core-concepts/variables/environment_variables.md
+ - Substitution: user/core-concepts/variables/substitution.md
+ - References:
+ - Migration guide:
+ - Migrate from v1 to v2: user/migration-guide/v1-v2.md
+ - Migrate from v2 to v3: user/migration-guide/v2-v3.md
+ - CLI usage: user/references/cli-commands.md
+ - Editor integration: user/references/editor-integration.md
+ - CI integration:
+ - GitHub Actions: user/references/ci-integration/github-actions.md
- Developer Guide:
- - Getting Started: developer/getting-started.md
- - Contributing: developer/contributing.md
- - Code base:
- - Auto generation: developer/auto-generation.md
+ - Getting Started: developer/getting-started.md
+ - Contributing: developer/contributing.md
+ - Code base:
+ - Auto generation: developer/auto-generation.md
diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml
index d429723d6..c20493eb7 100644
--- a/examples/bakdata/atm-fraud-detection/config.yaml
+++ b/examples/bakdata/atm-fraud-detection/config.yaml
@@ -1,15 +1,19 @@
-environment: development
-
topic_name_config:
- default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic"
- default_output_topic_name: "${pipeline_name}-${component_name}-topic"
+ default_error_topic_name: "${pipeline.name}-${component.name}-dead-letter-topic"
+ default_output_topic_name: "${pipeline.name}-${component.name}-topic"
-brokers: "http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092"
+kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
-schema_registry_url: "http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081"
+schema_registry:
+ enabled: true
+ url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081"
-kafka_rest_host: "http://localhost:8082"
+kafka_rest:
+ url: "http://localhost:8082"
-kafka_connect_host: "http://localhost:8083"
+kafka_connect:
+ url: "http://localhost:8083"
defaults_path: .
+
+pipeline_base_dir: examples
diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml
index 609933f13..2183f91d6 100644
--- a/examples/bakdata/atm-fraud-detection/defaults.yaml
+++ b/examples/bakdata/atm-fraud-detection/defaults.yaml
@@ -4,14 +4,11 @@ pipeline-component:
kubernetes-app:
namespace: ${NAMESPACE}
-kafka-connector:
- namespace: ${NAMESPACE}
-
kafka-app:
app:
streams:
- brokers: ${brokers}
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers}
+ schemaRegistryUrl: ${config.schema_registry.url}
optimizeLeaveGroupBehavior: false
producer-app:
@@ -23,7 +20,7 @@ producer-app:
streams-app:
app:
labels:
- pipeline: ${pipeline_name}
+ pipeline: ${pipeline.name}
streams:
optimizeLeaveGroupBehavior: false
to:
diff --git a/examples/bakdata/atm-fraud-detection/pipeline.yaml b/examples/bakdata/atm-fraud-detection/pipeline.yaml
index 9982aa0a7..d166a21f4 100644
--- a/examples/bakdata/atm-fraud-detection/pipeline.yaml
+++ b/examples/bakdata/atm-fraud-detection/pipeline.yaml
@@ -83,7 +83,7 @@
app:
connector.class: io.confluent.connect.jdbc.JdbcSinkConnector
tasks.max: 1
- topics: ${pipeline_name}-account-linker-topic
+ topics: ${pipeline.name}-account-linker-topic
connection.url: jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db
connection.user: app1
connection.password: AppPassword
diff --git a/hooks/gen_docs/gen_docs_cli_usage.py b/hooks/gen_docs/gen_docs_cli_usage.py
index 25f7ecd8c..84476e69f 100644
--- a/hooks/gen_docs/gen_docs_cli_usage.py
+++ b/hooks/gen_docs/gen_docs_cli_usage.py
@@ -7,7 +7,7 @@
PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py"
PATH_CLI_COMMANDS_DOC = ROOT / "docs/docs/user/references/cli-commands.md"
-# TODO(@sujuka99): try to use typer_cli.main.docs here instead
+# TODO(Ivan Yordanov): try to use typer_cli.main.docs here instead
# https://github.com/bakdata/kpops/issues/297
if __name__ == "__main__":
diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py
index 6fb78f767..58edfcf34 100644
--- a/hooks/gen_docs/gen_docs_components.py
+++ b/hooks/gen_docs/gen_docs_components.py
@@ -11,7 +11,8 @@
from kpops.cli.registry import _find_classes
from kpops.components import KafkaConnector, PipelineComponent
from kpops.utils.colorify import redify, yellowify
-from kpops.utils.yaml_loading import load_yaml_file
+from kpops.utils.pydantic import issubclass_patched
+from kpops.utils.yaml import load_yaml_file
PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py"
PATH_CLI_COMMANDS_DOC = ROOT / "docs/docs/user/references/cli-commands.md"
@@ -33,21 +34,35 @@
)
KPOPS_COMPONENTS = tuple(_find_classes("kpops.components", PipelineComponent))
-KPOPS_COMPONENTS_INHERITANCE_REF = {
- component.type: cast(
- type[PipelineComponent],
- component.__base__,
- ).type
- for component in KPOPS_COMPONENTS
-}
KPOPS_COMPONENTS_SECTIONS = {
component.type: [
field_name
- for field_name, model in component.__fields__.items()
- if not model.field_info.exclude
+ for field_name, field_info in component.model_fields.items()
+ if not field_info.exclude
]
for component in KPOPS_COMPONENTS
}
+KPOPS_COMPONENTS_INHERITANCE_REF = {
+ component.type: {
+ "bases": [
+ cast(
+ type[PipelineComponent],
+ base,
+ ).type
+ for base in component.__bases__
+ if issubclass_patched(base, PipelineComponent)
+ ],
+ "parents": [
+ cast(
+ type[PipelineComponent],
+ parent,
+ ).type
+ for parent in component.parents
+ ],
+ }
+ for component in KPOPS_COMPONENTS
+}
+
# Dependency files should not be changed manually
DANGEROUS_FILES_TO_CHANGE = {
PATH_DOCS_COMPONENTS_DEPENDENCIES,
@@ -91,14 +106,13 @@ def filter_sections(
if section := filter_section(component_name, sections, target_section):
component_sections.append(section)
elif include_inherited:
- temp_component_name = component_name
- while (
- temp_component_name := KPOPS_COMPONENTS_INHERITANCE_REF[
- temp_component_name
- ]
- ) != PipelineComponent.type:
+ for component in KPOPS_COMPONENTS_INHERITANCE_REF[component_name][
+ "parents"
+ ]:
+ if component == PipelineComponent.type:
+ break
if section := filter_section(
- temp_component_name,
+ component,
sections,
target_section,
):
@@ -122,11 +136,12 @@ def filter_section(
section = target_section + "-" + component_name + ".yaml"
if section in sections:
return section
- if KPOPS_COMPONENTS_INHERITANCE_REF[component_name] == PipelineComponent.type:
+ if KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["bases"] == [
+ PipelineComponent.type
+ ]:
section = target_section + ".yaml"
if section in sections:
return section
- return None
return None
diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py
index d347ed8b5..aea4b6af2 100644
--- a/hooks/gen_docs/gen_docs_env_vars.py
+++ b/hooks/gen_docs/gen_docs_env_vars.py
@@ -2,17 +2,20 @@
import csv
import shutil
-from collections.abc import Callable, Iterator
+from collections.abc import Callable
+from contextlib import suppress
from dataclasses import dataclass
from pathlib import Path
from textwrap import fill
from typing import Any
-from pydantic import BaseSettings
-from pydantic.fields import ModelField
+from pydantic import BaseModel
+from pydantic_core import PydanticUndefined
from pytablewriter import MarkdownTableWriter
from typer.models import ArgumentInfo, OptionInfo
+from kpops.utils.dict_ops import generate_substitution
+
try:
from typing import Self
except ImportError:
@@ -21,7 +24,8 @@
from hooks import ROOT
from hooks.gen_docs import IterableStrEnum
from kpops.cli import main
-from kpops.cli.pipeline_config import PipelineConfig
+from kpops.config import KpopsConfig
+from kpops.utils.pydantic import issubclass_patched
PATH_DOCS_RESOURCES = ROOT / "docs/docs/resources"
PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables"
@@ -29,10 +33,10 @@
PATH_CONFIG_ENV_VARS_DOTENV_FILE = PATH_DOCS_VARIABLES / "config_env_vars.env"
PATH_CONFIG_ENV_VARS_MD_FILE = PATH_DOCS_VARIABLES / "config_env_vars.md"
PATH_CONFIG_ENV_VARS_CSV_FILE = PATH_DOCS_VARIABLES / "temp_config_env_vars.csv"
-TITLE_CONFIG_ENV_VARS = "Pipeline config environment variables"
+TITLE_CONFIG_ENV_VARS = "Global config environment variables"
DESCRIPTION_CONFIG_ENV_VARS = (
"These variables are a lower priority alternative to the settings in `config.yaml`. "
- "Variables marked as required can instead be set in the pipeline config."
+ "Variables marked as required can instead be set in the global config."
)
PATH_CLI_ENV_VARS_DOTFILES_FILE = PATH_DOCS_VARIABLES / "cli_env_vars.env"
@@ -127,7 +131,7 @@ def csv_append_env_var(
width=68,
)
required = False
- if default_value == Ellipsis:
+ if default_value in [Ellipsis, PydanticUndefined]:
required = True
default_value = ""
elif default_value is None:
@@ -246,43 +250,48 @@ def write_csv_to_md_file(
def fill_csv_pipeline_config(target: Path) -> None:
- """Append all ``PipelineConfig``-related env vars to a ``.csv`` file.
+ """Append all ``KpopsConfig``-related env vars to a ``.csv`` file.
- Finds all ``PipelineConfig``-related env vars and appends them to
+ Finds all ``KpopsConfig``-related env vars and appends them to
a ``.csv`` file.
:param target: The path to the `.csv` file. Note that it must already
contain the column names
"""
- for field in collect_fields(PipelineConfig):
- field_info = PipelineConfig.Config.get_field_info(field.name)
+ for (field_name, field_value), env_var_name in zip(
+ generate_substitution(collect_fields(KpopsConfig), separator=".").items(),
+ generate_substitution(collect_fields(KpopsConfig), separator="__").keys(),
+ strict=True,
+ ):
+ with suppress(KeyError): # In case the prefix is ever removed from KpopsConfig
+ env_var_name = KpopsConfig.model_config["env_prefix"] + env_var_name
field_description: str = (
- field.field_info.description
+ field_value.description
or "No description available, please refer to the pipeline config documentation."
)
- field_default = field.field_info.default
- if config_env_var := field_info.get(
- "env",
- ) or field.field_info.extra.get("env"):
- csv_append_env_var(
- target,
- config_env_var,
- field_default,
- field_description,
- field.name,
- )
+ field_default = field_value.default
+ csv_append_env_var(
+ target,
+ env_var_name.upper(),
+ field_default,
+ field_description,
+ field_name,
+ )
-def collect_fields(settings: type[BaseSettings]) -> Iterator[ModelField]:
- """Collect and yield all fields in a settings class.
+def collect_fields(model: type[BaseModel]) -> dict[str, Any]:
+ """Collect and return a ``dict`` of all fields in a settings class.
:param model: settings class
- :yield: all settings including nested ones in settings classes
+ :return: ``dict`` of all fields in a settings class
"""
- for field in settings.__fields__.values():
- if issubclass(field_type := field.type_, BaseSettings):
- yield from collect_fields(field_type)
- yield field
+ seen_fields = {}
+ for field_name, field_value in model.model_fields.items():
+ if field_value.annotation and issubclass_patched(field_value.annotation):
+ seen_fields[field_name] = collect_fields(field_value.annotation)
+ else:
+ seen_fields[field_name] = field_value
+ return seen_fields
def fill_csv_cli(target: Path) -> None:
diff --git a/hooks/gen_schema.py b/hooks/gen_schema.py
index 726da823f..3e3cdb0e8 100644
--- a/hooks/gen_schema.py
+++ b/hooks/gen_schema.py
@@ -4,7 +4,12 @@
from pathlib import Path
from hooks import ROOT
-from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema
+from kpops.utils.gen_schema import (
+ SchemaScope,
+ gen_config_schema,
+ gen_defaults_schema,
+ gen_pipeline_schema,
+)
PATH_TO_SCHEMA = ROOT / "docs/docs/schema"
@@ -20,6 +25,8 @@ def gen_schema(scope: SchemaScope):
match scope:
case SchemaScope.PIPELINE:
gen_pipeline_schema()
+ case SchemaScope.DEFAULTS:
+ gen_defaults_schema()
case SchemaScope.CONFIG:
gen_config_schema()
Path(PATH_TO_SCHEMA / f"{scope.value}.json").write_text(f.getvalue())
@@ -27,4 +34,5 @@ def gen_schema(scope: SchemaScope):
if __name__ == "__main__":
gen_schema(SchemaScope.PIPELINE)
+ gen_schema(SchemaScope.DEFAULTS)
gen_schema(SchemaScope.CONFIG)
diff --git a/kpops/__init__.py b/kpops/__init__.py
index 8fea6bcdf..4d23bd364 100644
--- a/kpops/__init__.py
+++ b/kpops/__init__.py
@@ -1,10 +1,11 @@
__version__ = "2.0.11"
# export public API functions
-from kpops.cli.main import clean, deploy, destroy, generate, reset
+from kpops.cli.main import clean, deploy, destroy, generate, manifest, reset
__all__ = (
"generate",
+ "manifest",
"deploy",
"destroy",
"reset",
diff --git a/kpops/cli/main.py b/kpops/cli/main.py
index b989cc01e..4c342cdac 100644
--- a/kpops/cli/main.py
+++ b/kpops/cli/main.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+from collections.abc import Iterator
from enum import Enum
from pathlib import Path
from typing import TYPE_CHECKING, Optional
@@ -10,7 +11,6 @@
from kpops import __version__
from kpops.cli.custom_formatter import CustomFormatter
-from kpops.cli.pipeline_config import ENV_PREFIX, PipelineConfig
from kpops.cli.registry import Registry
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.kafka_connect.kafka_connect_handler import (
@@ -19,25 +19,36 @@
from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler
from kpops.component_handlers.topic.handler import TopicHandler
from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper
-from kpops.pipeline_generator.pipeline import Pipeline
-from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema
+from kpops.components.base_components.models.resource import Resource
+from kpops.config import ENV_PREFIX, KpopsConfig
+from kpops.pipeline import Pipeline, PipelineGenerator
+from kpops.utils.gen_schema import (
+ SchemaScope,
+ gen_config_schema,
+ gen_defaults_schema,
+ gen_pipeline_schema,
+)
+from kpops.utils.pydantic import YamlConfigSettingsSource
+from kpops.utils.yaml import print_yaml
if TYPE_CHECKING:
- from collections.abc import Iterator
-
from kpops.components.base_components import PipelineComponent
+
LOG_DIVIDER = "#" * 100
app = dtyper.Typer(pretty_exceptions_enable=False)
-BASE_DIR_PATH_OPTION: Path = typer.Option(
- default=Path(),
+DOTENV_PATH_OPTION: Optional[list[Path]] = typer.Option(
+ default=None,
exists=True,
- dir_okay=True,
- file_okay=False,
- envvar=f"{ENV_PREFIX}PIPELINE_BASE_DIR",
- help="Base directory to the pipelines (default is current working directory)",
+ dir_okay=False,
+ file_okay=True,
+ envvar=f"{ENV_PREFIX}DOTENV_PATH",
+ help=(
+ "Path to dotenv file. Multiple files can be provided. "
+ "The files will be loaded in order, with each file overriding the previous one."
+ ),
)
DEFAULT_PATH_OPTION: Optional[Path] = typer.Option(
@@ -50,13 +61,13 @@
)
CONFIG_PATH_OPTION: Path = typer.Option(
- default=Path("config.yaml"),
- exists=False,
- dir_okay=False,
- file_okay=True,
+ default=Path(),
+ exists=True,
+ dir_okay=True,
+ file_okay=False,
readable=True,
envvar=f"{ENV_PREFIX}CONFIG_PATH",
- help="Path to the config.yaml file",
+ help="Path to the dir containing config.yaml files",
)
PIPELINE_PATH_ARG: Path = typer.Argument(
@@ -93,11 +104,16 @@ class FilterType(str, Enum):
help="Whether the --steps option should include/exclude the steps",
)
+OUTPUT_OPTION = typer.Option(True, help="Enable output printing")
VERBOSE_OPTION = typer.Option(False, help="Enable verbose printing")
-COMPONENTS_MODULES: str | None = typer.Argument(
+ENVIRONMENT: str | None = typer.Option(
default=None,
- help="Custom Python module containing your project-specific components",
+ envvar=f"{ENV_PREFIX}ENVIRONMENT",
+ help=(
+ "The environment you want to generate and deploy the pipeline to. "
+ "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). "
+ ),
)
logger = logging.getLogger()
@@ -110,28 +126,24 @@ class FilterType(str, Enum):
def setup_pipeline(
- pipeline_base_dir: Path,
pipeline_path: Path,
- components_module: str | None,
- pipeline_config: PipelineConfig,
+ kpops_config: KpopsConfig,
+ environment: str | None,
) -> Pipeline:
registry = Registry()
- if components_module:
- registry.find_components(components_module)
+ if kpops_config.components_module:
+ registry.find_components(kpops_config.components_module)
registry.find_components("kpops.components")
- handlers = setup_handlers(components_module, pipeline_config)
- return Pipeline.load_from_yaml(
- pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers
- )
+ handlers = setup_handlers(kpops_config)
+ parser = PipelineGenerator(kpops_config, registry, handlers)
+ return parser.load_yaml(pipeline_path, environment)
-def setup_handlers(
- components_module: str | None, config: PipelineConfig
-) -> ComponentHandlers:
- schema_handler = SchemaHandler.load_schema_handler(components_module, config)
- connector_handler = KafkaConnectHandler.from_pipeline_config(config)
- proxy_wrapper = ProxyWrapper(config)
+def setup_handlers(config: KpopsConfig) -> ComponentHandlers:
+ schema_handler = SchemaHandler.load_schema_handler(config)
+ connector_handler = KafkaConnectHandler.from_kpops_config(config)
+ proxy_wrapper = ProxyWrapper(config.kafka_rest)
topic_handler = TopicHandler(proxy_wrapper)
return ComponentHandlers(schema_handler, connector_handler, topic_handler)
@@ -193,24 +205,31 @@ def log_action(action: str, pipeline_component: PipelineComponent):
log.info("\n")
-def create_pipeline_config(
- config: Path, defaults: Optional[Path], verbose: bool
-) -> PipelineConfig:
+def create_kpops_config(
+ config: Path,
+ defaults: Path | None = None,
+ dotenv: list[Path] | None = None,
+ environment: str | None = None,
+ verbose: bool = False,
+) -> KpopsConfig:
setup_logging_level(verbose)
- PipelineConfig.Config.config_path = config
+ YamlConfigSettingsSource.config_dir = config
+ YamlConfigSettingsSource.environment = environment
+ kpops_config = KpopsConfig(
+ _env_file=dotenv # pyright: ignore[reportGeneralTypeIssues]
+ )
if defaults:
- pipeline_config = PipelineConfig(defaults_path=defaults)
+ kpops_config.defaults_path = defaults
else:
- pipeline_config = PipelineConfig()
- pipeline_config.defaults_path = config.parent / pipeline_config.defaults_path
- return pipeline_config
+ kpops_config.defaults_path = config / kpops_config.defaults_path
+ return kpops_config
@app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
help="""
- Generate json schema.
+ Generate JSON schema.
- The schemas can be used to enable support for kpops files in a text editor.
+ The schemas can be used to enable support for KPOps files in a text editor.
"""
)
def schema(
@@ -220,73 +239,111 @@ def schema(
help="""
Scope of the generated schema
\n\n\n
- pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide [COMPONENTS_MODULES].
+ pipeline: Schema of PipelineComponents. Includes the built-in KPOps components by default. To include custom components, provide components module in config.
\n\n\n
- config: Schema of PipelineConfig.""",
+ config: Schema of KpopsConfig.""",
),
- components_module: Optional[str] = COMPONENTS_MODULES,
+ config: Path = CONFIG_PATH_OPTION,
include_stock_components: bool = typer.Option(
default=True, help="Include the built-in KPOps components."
),
) -> None:
match scope:
case SchemaScope.PIPELINE:
- gen_pipeline_schema(components_module, include_stock_components)
+ kpops_config = create_kpops_config(config)
+ gen_pipeline_schema(
+ kpops_config.components_module, include_stock_components
+ )
+ case SchemaScope.DEFAULTS:
+ kpops_config = create_kpops_config(config)
+ gen_defaults_schema(
+ kpops_config.components_module, include_stock_components
+ )
case SchemaScope.CONFIG:
gen_config_schema()
@app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
- help="Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands."
+ short_help="Generate enriched pipeline representation",
+ help="Enrich pipeline steps with defaults. The enriched pipeline is used for all KPOps operations (deploy, destroy, ...).",
)
def generate(
pipeline_path: Path = PIPELINE_PATH_ARG,
- components_module: Optional[str] = COMPONENTS_MODULES,
- pipeline_base_dir: Path = BASE_DIR_PATH_OPTION,
+ dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION,
defaults: Optional[Path] = DEFAULT_PATH_OPTION,
config: Path = CONFIG_PATH_OPTION,
- template: bool = typer.Option(False, help="Run Helm template"),
- steps: Optional[str] = PIPELINE_STEPS,
- filter_type: FilterType = FILTER_TYPE,
+ output: bool = OUTPUT_OPTION,
+ environment: Optional[str] = ENVIRONMENT,
verbose: bool = VERBOSE_OPTION,
) -> Pipeline:
- pipeline_config = create_pipeline_config(config, defaults, verbose)
- pipeline = setup_pipeline(
- pipeline_base_dir, pipeline_path, components_module, pipeline_config
+ kpops_config = create_kpops_config(
+ config,
+ defaults,
+ dotenv,
+ environment,
+ verbose,
)
+ pipeline = setup_pipeline(pipeline_path, kpops_config, environment)
+ if output:
+ print_yaml(pipeline.to_yaml())
+ return pipeline
- if not template:
- pipeline.print_yaml()
-
- if template:
- steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type)
- for component in steps_to_apply:
- component.template()
- elif steps:
- log.warning(
- "The following flags are considered only when `--template` is set: \n \
- '--steps'"
- )
- return pipeline
+@app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
+ short_help="Render final resource representation",
+ help="In addition to generate, render final resource representation for each pipeline step, e.g. Kubernetes manifests.",
+)
+def manifest(
+ pipeline_path: Path = PIPELINE_PATH_ARG,
+ dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION,
+ defaults: Optional[Path] = DEFAULT_PATH_OPTION,
+ config: Path = CONFIG_PATH_OPTION,
+ output: bool = OUTPUT_OPTION,
+ steps: Optional[str] = PIPELINE_STEPS,
+ filter_type: FilterType = FILTER_TYPE,
+ environment: Optional[str] = ENVIRONMENT,
+ verbose: bool = VERBOSE_OPTION,
+) -> list[Resource]:
+ pipeline = generate(
+ pipeline_path=pipeline_path,
+ dotenv=dotenv,
+ defaults=defaults,
+ config=config,
+ output=False,
+ environment=environment,
+ verbose=verbose,
+ )
+ steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type)
+ resources: list[Resource] = []
+ for component in steps_to_apply:
+ resource = component.manifest()
+ resources.append(resource)
+ if output:
+ for manifest in resource:
+ print_yaml(manifest)
+ return resources
@app.command(help="Deploy pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
def deploy(
pipeline_path: Path = PIPELINE_PATH_ARG,
- components_module: Optional[str] = COMPONENTS_MODULES,
- pipeline_base_dir: Path = BASE_DIR_PATH_OPTION,
+ dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION,
defaults: Optional[Path] = DEFAULT_PATH_OPTION,
config: Path = CONFIG_PATH_OPTION,
steps: Optional[str] = PIPELINE_STEPS,
filter_type: FilterType = FILTER_TYPE,
+ environment: Optional[str] = ENVIRONMENT,
dry_run: bool = DRY_RUN,
verbose: bool = VERBOSE_OPTION,
):
- pipeline_config = create_pipeline_config(config, defaults, verbose)
- pipeline = setup_pipeline(
- pipeline_base_dir, pipeline_path, components_module, pipeline_config
+ kpops_config = create_kpops_config(
+ config,
+ defaults,
+ dotenv,
+ environment,
+ verbose,
)
+ pipeline = setup_pipeline(pipeline_path, kpops_config, environment)
steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type)
for component in steps_to_apply:
@@ -297,19 +354,23 @@ def deploy(
@app.command(help="Destroy pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
def destroy(
pipeline_path: Path = PIPELINE_PATH_ARG,
- components_module: Optional[str] = COMPONENTS_MODULES,
- pipeline_base_dir: Path = BASE_DIR_PATH_OPTION,
+ dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION,
defaults: Optional[Path] = DEFAULT_PATH_OPTION,
config: Path = CONFIG_PATH_OPTION,
steps: Optional[str] = PIPELINE_STEPS,
filter_type: FilterType = FILTER_TYPE,
+ environment: Optional[str] = ENVIRONMENT,
dry_run: bool = DRY_RUN,
verbose: bool = VERBOSE_OPTION,
):
- pipeline_config = create_pipeline_config(config, defaults, verbose)
- pipeline = setup_pipeline(
- pipeline_base_dir, pipeline_path, components_module, pipeline_config
+ kpops_config = create_kpops_config(
+ config,
+ defaults,
+ dotenv,
+ environment,
+ verbose,
)
+ pipeline = setup_pipeline(pipeline_path, kpops_config, environment)
pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type)
for component in pipeline_steps:
log_action("Destroy", component)
@@ -319,19 +380,23 @@ def destroy(
@app.command(help="Reset pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
def reset(
pipeline_path: Path = PIPELINE_PATH_ARG,
- components_module: Optional[str] = COMPONENTS_MODULES,
- pipeline_base_dir: Path = BASE_DIR_PATH_OPTION,
+ dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION,
defaults: Optional[Path] = DEFAULT_PATH_OPTION,
config: Path = CONFIG_PATH_OPTION,
steps: Optional[str] = PIPELINE_STEPS,
filter_type: FilterType = FILTER_TYPE,
+ environment: Optional[str] = ENVIRONMENT,
dry_run: bool = DRY_RUN,
verbose: bool = VERBOSE_OPTION,
):
- pipeline_config = create_pipeline_config(config, defaults, verbose)
- pipeline = setup_pipeline(
- pipeline_base_dir, pipeline_path, components_module, pipeline_config
+ kpops_config = create_kpops_config(
+ config,
+ defaults,
+ dotenv,
+ environment,
+ verbose,
)
+ pipeline = setup_pipeline(pipeline_path, kpops_config, environment)
pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type)
for component in pipeline_steps:
log_action("Reset", component)
@@ -342,19 +407,23 @@ def reset(
@app.command(help="Clean pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8
def clean(
pipeline_path: Path = PIPELINE_PATH_ARG,
- components_module: Optional[str] = COMPONENTS_MODULES,
- pipeline_base_dir: Path = BASE_DIR_PATH_OPTION,
+ dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION,
defaults: Optional[Path] = DEFAULT_PATH_OPTION,
config: Path = CONFIG_PATH_OPTION,
steps: Optional[str] = PIPELINE_STEPS,
filter_type: FilterType = FILTER_TYPE,
+ environment: Optional[str] = ENVIRONMENT,
dry_run: bool = DRY_RUN,
verbose: bool = VERBOSE_OPTION,
):
- pipeline_config = create_pipeline_config(config, defaults, verbose)
- pipeline = setup_pipeline(
- pipeline_base_dir, pipeline_path, components_module, pipeline_config
+ kpops_config = create_kpops_config(
+ config,
+ defaults,
+ dotenv,
+ environment,
+ verbose,
)
+ pipeline = setup_pipeline(pipeline_path, kpops_config, environment)
pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type)
for component in pipeline_steps:
log_action("Clean", component)
diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py
deleted file mode 100644
index 8ed19dfd8..000000000
--- a/kpops/cli/pipeline_config.py
+++ /dev/null
@@ -1,129 +0,0 @@
-from __future__ import annotations
-
-from collections.abc import Callable
-from pathlib import Path
-from typing import Any
-
-from pydantic import BaseConfig, BaseSettings, Field
-from pydantic.env_settings import SettingsSourceCallable
-
-from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig
-from kpops.utils.yaml_loading import load_yaml_file
-
-ENV_PREFIX = "KPOPS_"
-
-
-class TopicNameConfig(BaseSettings):
- """Configures topic names."""
-
- default_output_topic_name: str = Field(
- default="${pipeline_name}-${component_name}",
- description="Configures the value for the variable ${output_topic_name}",
- )
- default_error_topic_name: str = Field(
- default="${pipeline_name}-${component_name}-error",
- description="Configures the value for the variable ${error_topic_name}",
- )
-
-
-class PipelineConfig(BaseSettings):
- """Pipeline configuration unrelated to the components."""
-
- defaults_path: Path = Field(
- default=Path(),
- example="defaults",
- description="The path to the folder containing the defaults.yaml file and the environment defaults files. "
- "Paths can either be absolute or relative to `config.yaml`",
- )
- environment: str = Field(
- default=...,
- env=f"{ENV_PREFIX}ENVIRONMENT",
- example="development",
- description="The environment you want to generate and deploy the pipeline to. "
- "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).",
- )
- brokers: str = Field(
- default=...,
- env=f"{ENV_PREFIX}KAFKA_BROKERS",
- description="The comma separated Kafka brokers address.",
- example="broker1:9092,broker2:9092,broker3:9092",
- )
- defaults_filename_prefix: str = Field(
- default="defaults",
- description="The name of the defaults file and the prefix of the defaults environment file.",
- )
- topic_name_config: TopicNameConfig = Field(
- default=TopicNameConfig(),
- description="Configure the topic name variables you can use in the pipeline definition.",
- )
- schema_registry_url: str | None = Field(
- default=None,
- example="http://localhost:8081",
- env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL",
- description="Address of the Schema Registry.",
- )
- kafka_rest_host: str | None = Field(
- default=None,
- env=f"{ENV_PREFIX}REST_PROXY_HOST",
- example="http://localhost:8082",
- description="Address of the Kafka REST Proxy.",
- )
- kafka_connect_host: str | None = Field(
- default=None,
- env=f"{ENV_PREFIX}CONNECT_HOST",
- example="http://localhost:8083",
- description="Address of Kafka Connect.",
- )
- timeout: int = Field(
- default=300,
- env=f"{ENV_PREFIX}TIMEOUT",
- description="The timeout in seconds that specifies when actions like deletion or deploy timeout.",
- )
- create_namespace: bool = Field(
- default=False,
- description="Flag for `helm upgrade --install`. Create the release namespace if not present.",
- )
- helm_config: HelmConfig = Field(
- default=HelmConfig(),
- description="Global flags for Helm.",
- )
- helm_diff_config: HelmDiffConfig = Field(
- default=HelmDiffConfig(),
- description="Configure Helm Diff.",
- )
- retain_clean_jobs: bool = Field(
- default=False,
- env=f"{ENV_PREFIX}RETAIN_CLEAN_JOBS",
- description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.",
- )
-
- class Config(BaseConfig):
- config_path = Path("config.yaml")
- env_file = ".env"
- env_file_encoding = "utf-8"
-
- @classmethod
- def customise_sources(
- cls,
- init_settings: SettingsSourceCallable,
- env_settings: SettingsSourceCallable,
- file_secret_settings: SettingsSourceCallable,
- ) -> tuple[
- SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ...
- ]:
- return (
- env_settings,
- init_settings,
- yaml_config_settings_source,
- file_secret_settings,
- )
-
-
-def yaml_config_settings_source(settings: PipelineConfig) -> dict[str, Any]:
- path_to_config = settings.Config.config_path
- if path_to_config.exists():
- if isinstance(source := load_yaml_file(path_to_config), dict):
- return source
- err_msg = f"{path_to_config} must be a mapping."
- raise TypeError(err_msg)
- return {}
diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py
index 5e4d758db..8499504ba 100644
--- a/kpops/component_handlers/helm_wrapper/helm.py
+++ b/kpops/component_handlers/helm_wrapper/helm.py
@@ -4,6 +4,7 @@
import re
import subprocess
import tempfile
+from pathlib import Path
from typing import TYPE_CHECKING
import yaml
@@ -18,6 +19,8 @@
RepoAuthFlags,
Version,
)
+from kpops.component_handlers.kubernetes.model import KubernetesManifest
+from kpops.components.base_components.models.resource import Resource
if TYPE_CHECKING:
from collections.abc import Iterable, Iterator
@@ -132,8 +135,8 @@ def template(
namespace: str,
values: dict,
flags: HelmTemplateFlags | None = None,
- ) -> str:
- """From HELM: Render chart templates locally and display the output.
+ ) -> Resource:
+ """From Helm: Render chart templates locally and display the output.
Any values that would normally be looked up or retrieved in-cluster will
be faked locally. Additionally, none of the server-side testing of chart
@@ -144,11 +147,11 @@ def template(
:param namespace: The Kubernetes namespace the command should execute in
:param values: `values.yaml` to be used
:param flags: the flags to be set for `helm template`, defaults to HelmTemplateFlags()
- :return: the output of `helm template`
+ :return: the rendered resource (list of Kubernetes manifests)
"""
if flags is None:
flags = HelmTemplateFlags()
- with tempfile.NamedTemporaryFile("w") as values_file:
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as values_file:
yaml.safe_dump(values, values_file)
command = [
"helm",
@@ -161,7 +164,9 @@ def template(
values_file.name,
]
command.extend(flags.to_command())
- return self.__execute(command)
+ output = self.__execute(command)
+ manifests = KubernetesManifest.from_yaml(output)
+ return list(manifests)
def get_manifest(self, release_name: str, namespace: str) -> Iterable[HelmTemplate]:
command = [
@@ -198,7 +203,11 @@ def load_manifest(yaml_contents: str) -> Iterator[HelmTemplate]:
if line.startswith("---"):
is_beginning = True
if template_name and current_yaml_doc:
- yield HelmTemplate.load(template_name, "\n".join(current_yaml_doc))
+ manifests = KubernetesManifest.from_yaml(
+ "\n".join(current_yaml_doc)
+ )
+ manifest = next(manifests) # only 1 manifest
+ yield HelmTemplate(Path(template_name), manifest)
template_name = None
current_yaml_doc.clear()
elif is_beginning:
diff --git a/kpops/component_handlers/helm_wrapper/helm_diff.py b/kpops/component_handlers/helm_wrapper/helm_diff.py
index 26de5613a..7827fe453 100644
--- a/kpops/component_handlers/helm_wrapper/helm_diff.py
+++ b/kpops/component_handlers/helm_wrapper/helm_diff.py
@@ -2,6 +2,7 @@
from collections.abc import Iterable, Iterator
from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate
+from kpops.component_handlers.kubernetes.model import KubernetesManifest
from kpops.utils.dict_differ import Change, render_diff
log = logging.getLogger("HelmDiff")
@@ -15,7 +16,7 @@ def __init__(self, config: HelmDiffConfig) -> None:
def calculate_changes(
current_release: Iterable[HelmTemplate],
new_release: Iterable[HelmTemplate],
- ) -> Iterator[Change[dict]]:
+ ) -> Iterator[Change[KubernetesManifest]]:
"""Compare 2 releases and generate a Change object for each difference.
:param current_release: Iterable containing HelmTemplate objects for the current release
@@ -31,13 +32,13 @@ def calculate_changes(
# get corresponding dry-run release
new_resource = new_release_index.pop(current_resource.filepath, None)
yield Change(
- current_resource.template,
- new_resource.template if new_resource else {},
+ current_resource.manifest,
+ new_resource.manifest if new_resource else KubernetesManifest(),
)
# collect added files
for new_resource in new_release_index.values():
- yield Change({}, new_resource.template)
+ yield Change(KubernetesManifest(), new_resource.manifest)
def log_helm_diff(
self,
diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py
index af21abb3f..8a635983f 100644
--- a/kpops/component_handlers/helm_wrapper/model.py
+++ b/kpops/component_handlers/helm_wrapper/model.py
@@ -2,24 +2,24 @@
from dataclasses import dataclass
from pathlib import Path
-import yaml
-from pydantic import BaseConfig, BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field
from typing_extensions import override
from kpops.component_handlers.helm_wrapper.exception import ParseError
+from kpops.component_handlers.kubernetes.model import KubernetesManifest
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import DescConfig
+from kpops.utils.pydantic import DescConfigModel
class HelmDiffConfig(BaseModel):
ignore: set[str] = Field(
default_factory=set,
description="Set of keys that should not be checked.",
- example="- name\n- imageTag",
+ examples=["- name\n- imageTag"],
)
-class RepoAuthFlags(BaseModel):
+class RepoAuthFlags(DescConfigModel):
"""Authorisation-related flags for `helm repo`.
:param username: Username, defaults to None
@@ -46,9 +46,6 @@ class RepoAuthFlags(BaseModel):
default=False, description=describe_attr("insecure_skip_tls_verify", __doc__)
)
- class Config(DescConfig):
- pass
-
def to_command(self) -> list[str]:
command: list[str] = []
if self.username:
@@ -64,7 +61,7 @@ def to_command(self) -> list[str]:
return command
-class HelmRepoConfig(BaseModel):
+class HelmRepoConfig(DescConfigModel):
"""Helm repository configuration.
:param repository_name: Name of the Helm repository
@@ -80,11 +77,8 @@ class HelmRepoConfig(BaseModel):
default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__)
)
- class Config(DescConfig):
- pass
-
-class HelmConfig(BaseModel):
+class HelmConfig(DescConfigModel):
"""Global Helm configuration.
:param context: Name of kubeconfig context (`--kube-context`)
@@ -95,7 +89,7 @@ class HelmConfig(BaseModel):
context: str | None = Field(
default=None,
description=describe_attr("context", __doc__),
- example="dev-storage",
+ examples=["dev-storage"],
)
debug: bool = Field(
default=False,
@@ -107,9 +101,6 @@ class HelmConfig(BaseModel):
description=describe_attr("api_version", __doc__),
)
- class Config(DescConfig):
- pass
-
class HelmFlags(RepoAuthFlags):
set_file: dict[str, Path] = Field(default_factory=dict)
@@ -120,8 +111,9 @@ class HelmFlags(RepoAuthFlags):
wait: bool = True
wait_for_jobs: bool = False
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
@override
def to_command(self) -> list[str]:
@@ -168,8 +160,8 @@ def to_command(self) -> list[str]:
@dataclass
class HelmTemplate:
- filepath: str
- template: dict
+ filepath: Path
+ manifest: KubernetesManifest
@staticmethod
def parse_source(source: str) -> str:
@@ -184,11 +176,6 @@ def parse_source(source: str) -> str:
raise ParseError(msg)
return source.removeprefix(HELM_SOURCE_PREFIX).strip()
- @classmethod
- def load(cls, filepath: str, content: str):
- template = yaml.load(content, yaml.Loader)
- return cls(filepath, template)
-
# Indicates the beginning of `NOTES:` section in the output of `helm install` or
# `helm upgrade`
diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py
index 7ad76b93a..4b892996f 100644
--- a/kpops/component_handlers/helm_wrapper/utils.py
+++ b/kpops/component_handlers/helm_wrapper/utils.py
@@ -1,22 +1,31 @@
+import hashlib
import logging
log = logging.getLogger("HelmUtils")
+ENCODING = "utf-8"
+RELEASE_NAME_MAX_LEN = 53
-RELEASE_NAME_MAX_LEN = 52
+def create_helm_release_name(name: str, suffix: str = "") -> str:
+ """Shortens the long Helm release name.
-def trim_release_name(name: str, suffix: str = "") -> str:
- """Trim Helm release name while preserving suffix.
+ Helm has a limit of 53 characters for release names.
+ If the name exceeds the character limit:
+ 1. trim the string and fetch the first RELEASE_NAME_MAX_LEN - len(suffix) characters.
+ 2. replace the last 6 characters with the SHA-1 encoded string (with "-") to avoid collision
+ 3. append the suffix if given
- :param name: The release name including optional suffix
+ :param name: The Helm release name to be shortened.
:param suffix: The release suffix to preserve
- :return: Truncated release name.
+ :return: Trimmed + hashed version of the release name if it exceeds the Helm release character length otherwise the actual release name
"""
if len(name) > RELEASE_NAME_MAX_LEN:
- new_name = name[: (RELEASE_NAME_MAX_LEN - len(suffix))] + suffix
+ exact_name = name[: RELEASE_NAME_MAX_LEN - len(suffix)]
+ hash_name = hashlib.sha1(name.encode(ENCODING)).hexdigest()
+ new_name = exact_name[:-6] + "-" + hash_name[:5] + suffix
log.critical(
- f"Invalid Helm release name '{name}'. Truncating to {RELEASE_NAME_MAX_LEN} characters: \n {name} --> {new_name}"
+ f"Invalid Helm release name '{name}'. Truncating and hashing the release name: \n {name} --> {new_name}"
)
- name = new_name
+ return new_name
return name
diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py
index 13f02a80d..4d92bad03 100644
--- a/kpops/component_handlers/kafka_connect/connect_wrapper.py
+++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py
@@ -1,6 +1,9 @@
+from __future__ import annotations
+
import logging
import time
from time import sleep
+from typing import TYPE_CHECKING
import httpx
@@ -14,6 +17,11 @@
KafkaConnectResponse,
)
+if TYPE_CHECKING:
+ from pydantic import AnyHttpUrl
+
+ from kpops.config import KafkaConnectConfig
+
HEADERS = {"Accept": "application/json", "Content-Type": "application/json"}
log = logging.getLogger("KafkaConnectAPI")
@@ -22,18 +30,12 @@
class ConnectWrapper:
"""Wraps Kafka Connect APIs."""
- def __init__(self, host: str | None):
- if not host:
- error_message = (
- "The Kafka Connect host is not set. Please set the host in the config."
- )
- log.error(error_message)
- raise RuntimeError(error_message)
- self._host: str = host
+ def __init__(self, config: KafkaConnectConfig) -> None:
+ self._config: KafkaConnectConfig = config
@property
- def host(self) -> str:
- return self._host
+ def url(self) -> AnyHttpUrl:
+ return self._config.url
def create_connector(
self, connector_config: KafkaConnectorConfig
@@ -44,10 +46,10 @@ def create_connector(
:param connector_config: The config of the connector
:return: The current connector info if successful.
"""
- config_json = connector_config.dict()
+ config_json = connector_config.model_dump()
connect_data = {"name": connector_config.name, "config": config_json}
response = httpx.post(
- url=f"{self._host}/connectors", headers=HEADERS, json=connect_data
+ url=f"{self.url}connectors", headers=HEADERS, json=connect_data
)
if response.status_code == httpx.codes.CREATED:
log.info(f"Connector {connector_config.name} created.")
@@ -61,17 +63,18 @@ def create_connector(
self.create_connector(connector_config)
raise KafkaConnectError(response)
- def get_connector(self, connector_name: str) -> KafkaConnectResponse:
+ def get_connector(self, connector_name: str | None) -> KafkaConnectResponse:
"""Get information about the connector.
- API Reference:
- https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name)
-
+ API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name)
:param connector_name: Nameof the crated connector
:return: Information about the connector.
"""
+ if connector_name is None:
+ msg = "Connector name not set"
+ raise Exception(msg)
response = httpx.get(
- url=f"{self._host}/connectors/{connector_name}", headers=HEADERS
+ url=f"{self.url}connectors/{connector_name}", headers=HEADERS
)
if response.status_code == httpx.codes.OK:
log.info(f"Connector {connector_name} exists.")
@@ -100,9 +103,9 @@ def update_connector_config(
:return: Information about the connector after the change has been made.
"""
connector_name = connector_config.name
- config_json = connector_config.dict()
+ config_json = connector_config.model_dump()
response = httpx.put(
- url=f"{self._host}/connectors/{connector_name}/config",
+ url=f"{self.url}connectors/{connector_name}/config",
headers=HEADERS,
json=config_json,
)
@@ -133,9 +136,9 @@ def validate_connector_config(
:return: List of all found errors
"""
response = httpx.put(
- url=f"{self._host}/connector-plugins/{connector_config.class_name}/config/validate",
+ url=f"{self.url}connector-plugins/{connector_config.class_name}/config/validate",
headers=HEADERS,
- json=connector_config.dict(),
+ json=connector_config.model_dump(),
)
if response.status_code == httpx.codes.OK:
@@ -163,7 +166,7 @@ def delete_connector(self, connector_name: str) -> None:
:raises ConnectorNotFoundException: Connector not found
"""
response = httpx.delete(
- url=f"{self._host}/connectors/{connector_name}", headers=HEADERS
+ url=f"{self.url}connectors/{connector_name}", headers=HEADERS
)
if response.status_code == httpx.codes.NO_CONTENT:
log.info(f"Connector {connector_name} deleted.")
diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py
index c810a9c36..fb644dd7a 100644
--- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py
+++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py
@@ -18,8 +18,8 @@
except ImportError:
from typing_extensions import Self
- from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig
+ from kpops.config import KpopsConfig
log = logging.getLogger("KafkaConnectHandler")
@@ -97,19 +97,19 @@ def __dry_run_connector_creation(
connector = self._connect_wrapper.get_connector(connector_name)
log.info(f"Connector Creation: connector {connector_name} already exists.")
- if diff := render_diff(connector.config, connector_config.dict()):
+ if diff := render_diff(connector.config, connector_config.model_dump()):
log.info(f"Updating config:\n{diff}")
- log.debug(connector_config.dict())
+ log.debug(connector_config.model_dump())
log.debug(f"PUT /connectors/{connector_name}/config HTTP/1.1")
- log.debug(f"HOST: {self._connect_wrapper.host}")
+ log.debug(f"HOST: {self._connect_wrapper.url}")
except ConnectorNotFoundException:
- diff = render_diff({}, connector_config.dict())
+ diff = render_diff({}, connector_config.model_dump())
log.info(
f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}"
)
log.debug("POST /connectors HTTP/1.1")
- log.debug(f"HOST: {self._connect_wrapper.host}")
+ log.debug(f"HOST: {self._connect_wrapper.url}")
errors = self._connect_wrapper.validate_connector_config(connector_config)
if len(errors) > 0:
@@ -130,15 +130,15 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None:
)
)
log.debug(f"DELETE /connectors/{connector_name} HTTP/1.1")
- log.debug(f"HOST: {self._connect_wrapper.host}")
+ log.debug(f"HOST: {self._connect_wrapper.url}")
except ConnectorNotFoundException:
log.warning(
f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping."
)
@classmethod
- def from_pipeline_config(cls, pipeline_config: PipelineConfig) -> Self:
+ def from_kpops_config(cls, config: KpopsConfig) -> Self:
return cls(
- connect_wrapper=ConnectWrapper(host=pipeline_config.kafka_connect_host),
- timeout=pipeline_config.timeout,
+ connect_wrapper=ConnectWrapper(config.kafka_connect),
+ timeout=config.timeout,
)
diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py
index e83e33e5d..59cdba7b9 100644
--- a/kpops/component_handlers/kafka_connect/model.py
+++ b/kpops/component_handlers/kafka_connect/model.py
@@ -1,10 +1,24 @@
from enum import Enum
from typing import Any, Literal
-from pydantic import BaseConfig, BaseModel, Extra, Field, validator
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ SerializationInfo,
+ field_validator,
+ model_serializer,
+)
+from pydantic.json_schema import SkipJsonSchema
from typing_extensions import override
-from kpops.utils.pydantic import CamelCaseConfig, DescConfig, to_dot
+from kpops.components.base_components.helm_app import HelmAppValues
+from kpops.utils.pydantic import (
+ CamelCaseConfigModel,
+ DescConfigModel,
+ by_alias,
+ exclude_by_value,
+ to_dot,
+)
class KafkaConnectorType(str, Enum):
@@ -12,23 +26,27 @@ class KafkaConnectorType(str, Enum):
SOURCE = "source"
-class KafkaConnectorConfig(BaseModel):
+class KafkaConnectorConfig(DescConfigModel):
"""Settings specific to Kafka Connectors."""
connector_class: str
- name: str = Field(default=..., hidden_from_schema=True)
+ name: SkipJsonSchema[str]
- class Config(DescConfig):
- extra = Extra.allow
- alias_generator = to_dot
-
- @override
- @classmethod
- def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None:
- super().schema_extra(schema, model)
- schema["additionalProperties"] = {"type": "string"}
-
- @validator("connector_class")
+ @override
+ @staticmethod
+ def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None:
+ super(KafkaConnectorConfig, KafkaConnectorConfig).json_schema_extra(
+ schema, model
+ )
+ schema["additional_properties"] = {"type": "string"}
+
+ model_config = ConfigDict(
+ extra="allow",
+ alias_generator=to_dot,
+ json_schema_extra=json_schema_extra,
+ )
+
+ @field_validator("connector_class")
def connector_class_must_contain_dot(cls, connector_class: str) -> str:
if "." not in connector_class:
msg = f"Invalid connector class {connector_class}"
@@ -39,9 +57,11 @@ def connector_class_must_contain_dot(cls, connector_class: str) -> str:
def class_name(self) -> str:
return self.connector_class.split(".")[-1]
- @override
- def dict(self, **_) -> dict[str, Any]:
- return super().dict(by_alias=True, exclude_none=True)
+ # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution
+ @model_serializer(mode="wrap", when_used="always")
+ def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]:
+ result = exclude_by_value(handler(self), None)
+ return {by_alias(self, name): value for name, value in result.items()}
class ConnectorTask(BaseModel):
@@ -53,10 +73,9 @@ class KafkaConnectResponse(BaseModel):
name: str
config: dict[str, str]
tasks: list[ConnectorTask]
- type: str | None
+ type: str | None = None
- class Config(BaseConfig):
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class KafkaConnectConfigError(BaseModel):
@@ -74,24 +93,13 @@ class KafkaConnectConfigErrorResponse(BaseModel):
configs: list[KafkaConnectConfigDescription]
-class KafkaConnectResetterConfig(BaseModel):
+class KafkaConnectorResetterConfig(CamelCaseConfigModel):
brokers: str
connector: str
delete_consumer_group: bool | None = None
offset_topic: str | None = None
- class Config(CamelCaseConfig):
- pass
-
-class KafkaConnectResetterValues(BaseModel):
+class KafkaConnectorResetterValues(HelmAppValues):
connector_type: Literal["source", "sink"]
- config: KafkaConnectResetterConfig
- name_override: str
-
- class Config(CamelCaseConfig):
- pass
-
- @override
- def dict(self, **_) -> dict[str, Any]:
- return super().dict(by_alias=True, exclude_none=True)
+ config: KafkaConnectorResetterConfig
diff --git a/kpops/pipeline_generator/__init__.py b/kpops/component_handlers/kubernetes/__init__.py
similarity index 100%
rename from kpops/pipeline_generator/__init__.py
rename to kpops/component_handlers/kubernetes/__init__.py
diff --git a/kpops/component_handlers/kubernetes/model.py b/kpops/component_handlers/kubernetes/model.py
new file mode 100644
index 000000000..bc9ff2730
--- /dev/null
+++ b/kpops/component_handlers/kubernetes/model.py
@@ -0,0 +1,27 @@
+import json
+from collections import UserDict
+from collections.abc import Iterator
+
+import yaml
+
+from kpops.utils.types import JsonType
+
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
+
+
+class KubernetesManifest(UserDict[str, JsonType]):
+ """Representation of a Kubernetes API object as YAML/JSON mapping."""
+
+ @classmethod
+ def from_yaml(cls, /, content: str) -> Iterator[Self]:
+ manifests: Iterator[dict[str, JsonType]] = yaml.load_all(content, yaml.Loader)
+ for manifest in manifests:
+ yield cls(manifest)
+
+ @classmethod
+ def from_json(cls, /, content: str) -> Self:
+ manifest: dict[str, JsonType] = json.loads(content)
+ return cls(manifest)
diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py
index 63d88b726..1afb1626b 100644
--- a/kpops/component_handlers/schema_handler/schema_handler.py
+++ b/kpops/component_handlers/schema_handler/schema_handler.py
@@ -17,16 +17,18 @@
from kpops.utils.colorify import greenify, magentaify
if TYPE_CHECKING:
- from kpops.cli.pipeline_config import PipelineConfig
from kpops.components.base_components.models.to_section import ToSection
+ from kpops.config import KpopsConfig
log = logging.getLogger("SchemaHandler")
class SchemaHandler:
- def __init__(self, url: str, components_module: str | None):
- self.schema_registry_client = SchemaRegistryClient(url)
- self.components_module = components_module
+ def __init__(self, kpops_config: KpopsConfig) -> None:
+ self.schema_registry_client = SchemaRegistryClient(
+ str(kpops_config.schema_registry.url)
+ )
+ self.components_module = kpops_config.components_module
@cached_property
def schema_provider(self) -> SchemaProvider:
@@ -41,16 +43,10 @@ def schema_provider(self) -> SchemaProvider:
raise ValueError(msg) from e
@classmethod
- def load_schema_handler(
- cls, components_module: str | None, config: PipelineConfig
- ) -> SchemaHandler | None:
- if not config.schema_registry_url:
- return None
-
- return cls(
- url=config.schema_registry_url,
- components_module=components_module,
- )
+ def load_schema_handler(cls, config: KpopsConfig) -> SchemaHandler | None:
+ if config.schema_registry.enabled:
+ return cls(config)
+ return None
def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None:
for topic_name, config in to_section.topics.items():
diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py
index dae606108..9a08e5512 100644
--- a/kpops/component_handlers/topic/handler.py
+++ b/kpops/component_handlers/topic/handler.py
@@ -129,9 +129,9 @@ def __dry_run_topic_creation(
)
)
log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1")
- log.debug(f"Host: {self.proxy_wrapper.host}")
+ log.debug(f"Host: {self.proxy_wrapper.url}")
log.debug(HEADERS)
- log.debug(topic_spec.dict())
+ log.debug(topic_spec.model_dump())
@staticmethod
def __check_partition_count(
@@ -185,7 +185,7 @@ def __dry_run_topic_deletion(self, topic_name: str) -> None:
log.warning(
f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping."
)
- log.debug(f"Host: {self.proxy_wrapper.host}")
+ log.debug(f"Host: {self.proxy_wrapper.url}")
log.debug(HEADERS)
log.debug("HTTP/1.1 404 Not Found")
log.debug(HEADERS)
@@ -203,7 +203,7 @@ def __prepare_body(cls, topic_name: str, topic_config: TopicConfig) -> TopicSpec
:param topic_config: The topic config
:return: Topic specification
"""
- topic_spec_json: dict = topic_config.dict(
+ topic_spec_json: dict = topic_config.model_dump(
include={
"partitions_count": True,
"replication_factor": True,
diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py
index b58445f81..5c0cf024d 100644
--- a/kpops/component_handlers/topic/model.py
+++ b/kpops/component_handlers/topic/model.py
@@ -1,13 +1,14 @@
from enum import Enum
+from typing import Any
-from pydantic import BaseConfig, BaseModel, Extra
+from pydantic import BaseModel, ConfigDict
class TopicSpec(BaseModel):
topic_name: str
- partitions_count: int | None
- replication_factor: int | None
- configs: list[dict[str, str]] | None
+ partitions_count: int | None = None
+ replication_factor: int | None = None
+ configs: list[dict[str, Any]] | None = None
class TopicResponse(BaseModel):
@@ -43,8 +44,9 @@ class KafkaTopicConfigSynonyms(BaseModel):
value: str
source: KafkaTopicConfigSource
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
class KafkaTopicConfig(BaseModel):
@@ -53,15 +55,17 @@ class KafkaTopicConfig(BaseModel):
value: str
name: str
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
class TopicConfigResponse(BaseModel):
data: list[KafkaTopicConfig]
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
class KafkaBrokerConfigSource(str, Enum):
@@ -75,8 +79,9 @@ class KafkaBrokerConfigSynonyms(BaseModel):
value: str | None
source: KafkaBrokerConfigSource
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
class KafkaBrokerConfig(BaseModel):
@@ -85,12 +90,14 @@ class KafkaBrokerConfig(BaseModel):
value: str | None
name: str
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
class BrokerConfigResponse(BaseModel):
data: list[KafkaBrokerConfig]
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py
index 4edc3633c..aa1db6283 100644
--- a/kpops/component_handlers/topic/proxy_wrapper.py
+++ b/kpops/component_handlers/topic/proxy_wrapper.py
@@ -1,9 +1,11 @@
+from __future__ import annotations
+
import logging
from functools import cached_property
+from typing import TYPE_CHECKING
import httpx
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers.topic.exception import (
KafkaRestProxyError,
TopicNotFoundException,
@@ -15,6 +17,11 @@
TopicSpec,
)
+if TYPE_CHECKING:
+ from pydantic import AnyHttpUrl
+
+ from kpops.config import KafkaRestConfig
+
log = logging.getLogger("KafkaRestProxy")
HEADERS = {"Content-Type": "application/json"}
@@ -23,12 +30,8 @@
class ProxyWrapper:
"""Wraps Kafka REST Proxy APIs."""
- def __init__(self, pipeline_config: PipelineConfig) -> None:
- if not pipeline_config.kafka_rest_host:
- msg = "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST."
- raise ValueError(msg)
-
- self._host = pipeline_config.kafka_rest_host
+ def __init__(self, config: KafkaRestConfig) -> None:
+ self._config: KafkaRestConfig = config
@cached_property
def cluster_id(self) -> str:
@@ -43,7 +46,7 @@ def cluster_id(self) -> str:
:raises KafkaRestProxyError: Kafka REST proxy error
:return: The Kafka cluster ID.
"""
- response = httpx.get(url=f"{self._host}/v3/clusters")
+ response = httpx.get(url=f"{self._config.url!s}v3/clusters")
if response.status_code == httpx.codes.OK:
cluster_information = response.json()
return cluster_information["data"][0]["cluster_id"]
@@ -51,8 +54,8 @@ def cluster_id(self) -> str:
raise KafkaRestProxyError(response)
@property
- def host(self) -> str:
- return self._host
+ def url(self) -> AnyHttpUrl:
+ return self._config.url
def create_topic(self, topic_spec: TopicSpec) -> None:
"""Create a topic.
@@ -64,9 +67,9 @@ def create_topic(self, topic_spec: TopicSpec) -> None:
:raises KafkaRestProxyError: Kafka REST proxy error
"""
response = httpx.post(
- url=f"{self._host}/v3/clusters/{self.cluster_id}/topics",
+ url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics",
headers=HEADERS,
- json=topic_spec.dict(exclude_none=True),
+ json=topic_spec.model_dump(exclude_none=True),
)
if response.status_code == httpx.codes.CREATED:
log.info(f"Topic {topic_spec.topic_name} created.")
@@ -85,7 +88,7 @@ def delete_topic(self, topic_name: str) -> None:
:raises KafkaRestProxyError: Kafka REST proxy error
"""
response = httpx.delete(
- url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}",
+ url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}",
headers=HEADERS,
)
if response.status_code == httpx.codes.NO_CONTENT:
@@ -106,7 +109,7 @@ def get_topic(self, topic_name: str) -> TopicResponse:
:return: Response of the get topic API.
"""
response = httpx.get(
- url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}",
+ url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}",
headers=HEADERS,
)
if response.status_code == httpx.codes.OK:
@@ -136,7 +139,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse:
:return: The topic configuration.
"""
response = httpx.get(
- url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs",
+ url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs",
headers=HEADERS,
)
@@ -166,7 +169,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No
:raises KafkaRestProxyError: Kafka REST proxy error
"""
response = httpx.post(
- url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter",
+ url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter",
headers=HEADERS,
json={"data": json_body},
)
@@ -186,7 +189,7 @@ def get_broker_config(self) -> BrokerConfigResponse:
:return: The broker configuration.
"""
response = httpx.get(
- url=f"{self.host}/v3/clusters/{self.cluster_id}/brokers/-/configs",
+ url=f"{self.url!s}v3/clusters/{self.cluster_id}/brokers/-/configs",
headers=HEADERS,
)
diff --git a/kpops/components/__init__.py b/kpops/components/__init__.py
index 77b8c69fb..dc5fcee9c 100644
--- a/kpops/components/__init__.py
+++ b/kpops/components/__init__.py
@@ -1,4 +1,5 @@
from kpops.components.base_components import (
+ HelmApp,
KafkaApp,
KafkaSinkConnector,
KafkaSourceConnector,
@@ -6,14 +7,18 @@
PipelineComponent,
)
from kpops.components.base_components.kafka_connector import KafkaConnector
-from kpops.components.streams_bootstrap import ProducerApp, StreamsApp
+from kpops.components.streams_bootstrap import StreamsBootstrap
+from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp
+from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp
__all__ = (
+ "HelmApp",
"KafkaApp",
"KafkaConnector",
"KafkaSinkConnector",
"KafkaSourceConnector",
"KubernetesApp",
+ "StreamsBootstrap",
"ProducerApp",
"StreamsApp",
"PipelineComponent",
diff --git a/kpops/components/base_components/__init__.py b/kpops/components/base_components/__init__.py
index 37aca3c70..cde4c13bb 100644
--- a/kpops/components/base_components/__init__.py
+++ b/kpops/components/base_components/__init__.py
@@ -1,3 +1,4 @@
+from kpops.components.base_components.helm_app import HelmApp
from kpops.components.base_components.kafka_app import KafkaApp
from kpops.components.base_components.kafka_connector import (
KafkaSinkConnector,
@@ -7,6 +8,7 @@
from kpops.components.base_components.pipeline_component import PipelineComponent
__all__ = (
+ "HelmApp",
"KafkaApp",
"KafkaSinkConnector",
"KafkaSourceConnector",
diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py
index d9100bd25..37c5851c9 100644
--- a/kpops/components/base_components/base_defaults_component.py
+++ b/kpops/components/base_components/base_defaults_component.py
@@ -1,22 +1,31 @@
import inspect
import logging
+from abc import ABC
from collections import deque
from collections.abc import Sequence
+from dataclasses import asdict, is_dataclass
from functools import cached_property
from pathlib import Path
-from typing import TypeVar
+from typing import Any, TypeVar
+import pydantic
import typer
-from pydantic import BaseModel, Field
+from pydantic import (
+ AliasChoices,
+ ConfigDict,
+ Field,
+ computed_field,
+)
+from pydantic.json_schema import SkipJsonSchema
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
+from kpops.config import KpopsConfig
from kpops.utils import cached_classproperty
-from kpops.utils.dict_ops import update_nested
+from kpops.utils.dict_ops import update_nested, update_nested_pair
from kpops.utils.docstring import describe_attr
from kpops.utils.environment import ENV
-from kpops.utils.pydantic import DescConfig, to_dash
-from kpops.utils.yaml_loading import load_yaml_file
+from kpops.utils.pydantic import DescConfigModel, to_dash
+from kpops.utils.yaml import load_yaml_file
try:
from typing import Self
@@ -26,7 +35,7 @@
log = logging.getLogger("BaseDefaultsComponent")
-class BaseDefaultsComponent(BaseModel):
+class BaseDefaultsComponent(DescConfigModel, ABC):
"""Base for all components, handles defaults.
Component defaults are usually provided in a yaml file called
@@ -34,41 +43,38 @@ class BaseDefaultsComponent(BaseModel):
correctly to the component.
:param enrich: Whether to enrich component with defaults, defaults to False
- :param config: Pipeline configuration to be accessed by this component
+ :param config: KPOps configuration to be accessed by this component
:param handlers: Component handlers to be accessed by this component
:param validate: Whether to run custom validation on the component, defaults to True
"""
- enrich: bool = Field(
+ model_config = ConfigDict(
+ arbitrary_types_allowed=True,
+ ignored_types=(cached_property, cached_classproperty),
+ )
+
+ enrich: SkipJsonSchema[bool] = Field(
default=False,
description=describe_attr("enrich", __doc__),
exclude=True,
- hidden_from_schema=True,
)
- config: PipelineConfig = Field(
+ config: SkipJsonSchema[KpopsConfig] = Field(
default=...,
description=describe_attr("config", __doc__),
exclude=True,
- hidden_from_schema=True,
)
- handlers: ComponentHandlers = Field(
+ handlers: SkipJsonSchema[ComponentHandlers] = Field(
default=...,
description=describe_attr("handlers", __doc__),
exclude=True,
- hidden_from_schema=True,
)
- validate_: bool = Field(
- alias="validate",
+ validate_: SkipJsonSchema[bool] = Field(
+ validation_alias=AliasChoices("validate", "validate_"),
default=True,
description=describe_attr("validate", __doc__),
exclude=True,
- hidden_from_schema=True,
)
- class Config(DescConfig):
- arbitrary_types_allowed = True
- keep_untouched = (cached_property, cached_classproperty)
-
def __init__(self, **kwargs) -> None:
if kwargs.get("enrich", True):
kwargs = self.extend_with_defaults(**kwargs)
@@ -76,6 +82,7 @@ def __init__(self, **kwargs) -> None:
if kwargs.get("validate", True):
self._validate_custom(**kwargs)
+ @computed_field
@cached_classproperty
def type(cls: type[Self]) -> str: # pyright: ignore[reportGeneralTypeIssues]
"""Return calling component's type.
@@ -84,28 +91,35 @@ def type(cls: type[Self]) -> str: # pyright: ignore[reportGeneralTypeIssues]
"""
return to_dash(cls.__name__)
- def extend_with_defaults(self, **kwargs) -> dict:
+ @classmethod
+ def extend_with_defaults(cls, **kwargs: Any) -> dict[str, Any]:
"""Merge parent components' defaults with own.
:param kwargs: The init kwargs for pydantic
:returns: Enriched kwargs with inheritted defaults
"""
- config: PipelineConfig = kwargs["config"]
+ config = kwargs["config"]
+ assert isinstance(config, KpopsConfig)
+
+ for k, v in kwargs.items():
+ if isinstance(v, pydantic.BaseModel):
+ kwargs[k] = v.model_dump(exclude_unset=True)
+ elif is_dataclass(v):
+ kwargs[k] = asdict(v)
+
log.debug(
typer.style(
"Enriching component of type ", fg=typer.colors.GREEN, bold=False
)
- + typer.style(
- kwargs.get("type"), fg=typer.colors.GREEN, bold=True, underline=True
- )
+ + typer.style(cls.type, fg=typer.colors.GREEN, bold=True, underline=True)
)
main_default_file_path, environment_default_file_path = get_defaults_file_paths(
- config
+ config, ENV.get("environment")
)
defaults = load_defaults(
- self.__class__, main_default_file_path, environment_default_file_path
+ cls, main_default_file_path, environment_default_file_path
)
- return update_nested(kwargs, defaults)
+ return update_nested_pair(kwargs, defaults)
def _validate_custom(self, **kwargs) -> None:
"""Run custom validation on component.
@@ -176,14 +190,17 @@ def defaults_from_yaml(path: Path, key: str) -> dict:
return value
-def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]:
+def get_defaults_file_paths(
+ config: KpopsConfig, environment: str | None
+) -> tuple[Path, Path]:
"""Return the paths to the main and the environment defaults-files.
The files need not exist, this function will only check if the dir set in
`config.defaults_path` exists and return paths to the defaults files
calculated from it. It is up to the caller to handle any false paths.
- :param config: Pipeline configuration
+ :param config: KPOps configuration
+ :param environment: Environment
:returns: The defaults files paths
"""
defaults_dir = Path(config.defaults_path).resolve()
@@ -191,17 +208,20 @@ def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]:
config.defaults_filename_prefix
).with_suffix(".yaml")
- environment_default_file_path = defaults_dir / Path(
- f"{config.defaults_filename_prefix}_{config.environment}"
- ).with_suffix(".yaml")
+ environment_default_file_path = (
+ defaults_dir
+ / Path(f"{config.defaults_filename_prefix}_{environment}").with_suffix(".yaml")
+ if environment is not None
+ else main_default_file_path
+ )
return main_default_file_path, environment_default_file_path
-T = TypeVar("T")
+_T = TypeVar("_T")
-def deduplicate(seq: Sequence[T]) -> list[T]:
+def deduplicate(seq: Sequence[_T]) -> list[_T]:
"""Deduplicate items of a sequence while preserving its order.
:param seq: Sequence to be 'cleaned'
diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py
new file mode 100644
index 000000000..b8978c5af
--- /dev/null
+++ b/kpops/components/base_components/helm_app.py
@@ -0,0 +1,199 @@
+from __future__ import annotations
+
+import logging
+from functools import cached_property
+from typing import Any
+
+from pydantic import Field, SerializationInfo, model_serializer
+from typing_extensions import override
+
+from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler
+from kpops.component_handlers.helm_wrapper.helm import Helm
+from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff
+from kpops.component_handlers.helm_wrapper.model import (
+ HelmFlags,
+ HelmRepoConfig,
+ HelmTemplateFlags,
+ HelmUpgradeInstallFlags,
+)
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
+from kpops.components.base_components.kubernetes_app import (
+ KubernetesApp,
+ KubernetesAppValues,
+)
+from kpops.components.base_components.models.resource import Resource
+from kpops.utils.colorify import magentaify
+from kpops.utils.docstring import describe_attr
+from kpops.utils.pydantic import exclude_by_name
+
+log = logging.getLogger("HelmApp")
+
+
+class HelmAppValues(KubernetesAppValues):
+ """Helm app values.
+
+ :param name_override: Override name with this value
+ """
+
+ name_override: str | None = Field(
+ default=None,
+ title="Nameoverride",
+ description=describe_attr("name_override", __doc__),
+ )
+
+ # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer`
+ # BEWARE! All default values are enforced, hard to replicate without
+ # access to ``model_dump``
+ @override
+ def model_dump(self, **_) -> dict[str, Any]:
+ return super().model_dump(
+ by_alias=True, exclude_none=True, exclude_defaults=True
+ )
+
+
+class HelmApp(KubernetesApp):
+ """Kubernetes app managed through Helm with an associated Helm chart.
+
+ :param repo_config: Configuration of the Helm chart repo to be used for
+ deploying the component, defaults to None this means that the command "helm repo add" is not called and Helm
+ expects a path to local Helm chart.
+ :param version: Helm chart version, defaults to None
+ :param app: Helm app values
+ """
+
+ repo_config: HelmRepoConfig | None = Field(
+ default=None,
+ description=describe_attr("repo_config", __doc__),
+ )
+ version: str | None = Field(
+ default=None,
+ description=describe_attr("version", __doc__),
+ )
+ app: HelmAppValues = Field(
+ default=...,
+ description=describe_attr("app", __doc__),
+ )
+
+ @cached_property
+ def helm(self) -> Helm:
+ """Helm object that contains component-specific config such as repo."""
+ helm = Helm(self.config.helm_config)
+ if self.repo_config is not None:
+ helm.add_repo(
+ self.repo_config.repository_name,
+ self.repo_config.url,
+ self.repo_config.repo_auth_flags,
+ )
+ return helm
+
+ @cached_property
+ def helm_diff(self) -> HelmDiff:
+ """Helm diff object of last and current release of this component."""
+ return HelmDiff(self.config.helm_diff_config)
+
+ @cached_property
+ def dry_run_handler(self) -> DryRunHandler:
+ helm_diff = HelmDiff(self.config.helm_diff_config)
+ return DryRunHandler(self.helm, helm_diff, self.namespace)
+
+ @property
+ def helm_release_name(self) -> str:
+ """The name for the Helm release. Can be overridden."""
+ return create_helm_release_name(self.full_name)
+
+ @property
+ def helm_chart(self) -> str:
+ """Return component's Helm chart."""
+ msg = (
+ f"Please implement the helm_chart property of the {self.__module__} module."
+ )
+ raise NotImplementedError(msg)
+
+ @property
+ def helm_flags(self) -> HelmFlags:
+ """Return shared flags for Helm commands."""
+ auth_flags = (
+ self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {}
+ )
+ return HelmFlags(
+ **auth_flags,
+ version=self.version,
+ create_namespace=self.config.create_namespace,
+ )
+
+ @property
+ def template_flags(self) -> HelmTemplateFlags:
+ """Return flags for Helm template command."""
+ return HelmTemplateFlags(
+ **self.helm_flags.model_dump(),
+ api_version=self.config.helm_config.api_version,
+ )
+
+ @override
+ def manifest(self) -> Resource:
+ return self.helm.template(
+ self.helm_release_name,
+ self.helm_chart,
+ self.namespace,
+ self.to_helm_values(),
+ self.template_flags,
+ )
+
+ @property
+ def deploy_flags(self) -> HelmUpgradeInstallFlags:
+ """Return flags for Helm upgrade install command."""
+ return HelmUpgradeInstallFlags(**self.helm_flags.model_dump())
+
+ @override
+ def deploy(self, dry_run: bool) -> None:
+ stdout = self.helm.upgrade_install(
+ self.helm_release_name,
+ self.helm_chart,
+ dry_run,
+ self.namespace,
+ self.to_helm_values(),
+ self.deploy_flags,
+ )
+ if dry_run:
+ self.dry_run_handler.print_helm_diff(stdout, self.helm_release_name, log)
+
+ @override
+ def destroy(self, dry_run: bool) -> None:
+ stdout = self.helm.uninstall(
+ self.namespace,
+ self.helm_release_name,
+ dry_run,
+ )
+
+ if stdout:
+ log.info(magentaify(stdout))
+
+ def to_helm_values(self) -> dict:
+ """Generate a dictionary of values readable by Helm from `self.app`.
+
+ :returns: Thte values to be used by Helm
+ """
+ if self.app.name_override is None:
+ self.app.name_override = self.full_name
+ return self.app.model_dump()
+
+ def print_helm_diff(self, stdout: str) -> None:
+ """Print the diff of the last and current release of this component.
+
+ :param stdout: The output of a Helm command that installs or upgrades the release
+ """
+ current_release = list(
+ self.helm.get_manifest(self.helm_release_name, self.namespace)
+ )
+ if current_release:
+ log.info(f"Helm release {self.helm_release_name} already exists")
+ else:
+ log.info(f"Helm release {self.helm_release_name} does not exist")
+ new_release = Helm.load_manifest(stdout)
+ self.helm_diff.log_helm_diff(log, current_release, new_release)
+
+ # HACK: workaround for Pydantic to exclude cached properties during model export
+ # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution
+ @model_serializer(mode="wrap", when_used="always")
+ def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]:
+ return exclude_by_name(handler(self), "helm", "helm_diff")
diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py
index a13dc7a7d..7ee67b09c 100644
--- a/kpops/components/base_components/kafka_app.py
+++ b/kpops/components/base_components/kafka_app.py
@@ -3,25 +3,23 @@
import logging
from abc import ABC
-from pydantic import BaseModel, Extra, Field
+from pydantic import ConfigDict, Field
from typing_extensions import override
from kpops.component_handlers.helm_wrapper.model import (
- HelmRepoConfig,
- HelmUpgradeInstallFlags,
-)
-from kpops.component_handlers.helm_wrapper.utils import trim_release_name
-from kpops.components.base_components.kubernetes_app import (
- KubernetesApp,
- KubernetesAppConfig,
+ HelmFlags,
)
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
+from kpops.components.base_components.helm_app import HelmAppValues
+from kpops.components.base_components.pipeline_component import PipelineComponent
+from kpops.components.streams_bootstrap import StreamsBootstrap
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import CamelCaseConfig, DescConfig
+from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel
log = logging.getLogger("KafkaApp")
-class KafkaStreamsConfig(BaseModel):
+class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel):
"""Kafka Streams config.
:param brokers: Brokers
@@ -33,57 +31,75 @@ class KafkaStreamsConfig(BaseModel):
default=None, description=describe_attr("schema_registry_url", __doc__)
)
- class Config(CamelCaseConfig, DescConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
-class KafkaAppConfig(KubernetesAppConfig):
+class KafkaAppValues(HelmAppValues):
"""Settings specific to Kafka Apps.
:param streams: Kafka streams config
- :param name_override: Override name with this value, defaults to None
"""
streams: KafkaStreamsConfig = Field(
default=..., description=describe_attr("streams", __doc__)
)
- name_override: str | None = Field(
- default=None, description=describe_attr("name_override", __doc__)
- )
-class KafkaApp(KubernetesApp, ABC):
+class KafkaAppCleaner(StreamsBootstrap):
+ """Helm app for resetting and cleaning a streams-bootstrap app."""
+
+ @property
+ @override
+ def helm_chart(self) -> str:
+ raise NotImplementedError
+
+ @property
+ @override
+ def helm_release_name(self) -> str:
+ suffix = "-clean"
+ return create_helm_release_name(self.full_name + suffix, suffix)
+
+ @property
+ @override
+ def helm_flags(self) -> HelmFlags:
+ return HelmFlags(
+ create_namespace=self.config.create_namespace,
+ version=self.version,
+ wait=True,
+ wait_for_jobs=True,
+ )
+
+ @override
+ def clean(self, dry_run: bool) -> None:
+ """Clean an app using a cleanup job.
+
+ :param dry_run: Dry run command
+ """
+ log.info(f"Uninstall old cleanup job for {self.helm_release_name}")
+ self.destroy(dry_run)
+
+ log.info(f"Init cleanup job for {self.helm_release_name}")
+ self.deploy(dry_run)
+
+ if not self.config.retain_clean_jobs:
+ log.info(f"Uninstall cleanup job for {self.helm_release_name}")
+ self.destroy(dry_run)
+
+
+class KafkaApp(PipelineComponent, ABC):
"""Base component for Kafka-based components.
Producer or streaming apps should inherit from this class.
:param app: Application-specific settings
- :param repo_config: Configuration of the Helm chart repo to be used for
- deploying the component,
- defaults to HelmRepoConfig(repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/")
- :param version: Helm chart version, defaults to "2.9.0"
"""
- app: KafkaAppConfig = Field(
+ app: KafkaAppValues = Field(
default=...,
description=describe_attr("app", __doc__),
)
- repo_config: HelmRepoConfig = Field(
- default=HelmRepoConfig(
- repository_name="bakdata-streams-bootstrap",
- url="https://bakdata.github.io/streams-bootstrap/",
- ),
- description=describe_attr("repo_config", __doc__),
- )
- version: str | None = Field(
- default="2.9.0",
- description=describe_attr("version", __doc__),
- )
-
- @property
- def clean_up_helm_chart(self) -> str:
- """Helm chart used to destroy and clean this component."""
- raise NotImplementedError
@override
def deploy(self, dry_run: bool) -> None:
@@ -97,75 +113,3 @@ def deploy(self, dry_run: bool) -> None:
to_section=self.to, dry_run=dry_run
)
super().deploy(dry_run)
-
- def _run_clean_up_job(
- self,
- values: dict,
- dry_run: bool,
- retain_clean_jobs: bool = False,
- ) -> None:
- """Clean an app using the respective cleanup job.
-
- :param values: The value YAML for the chart
- :param dry_run: Dry run command
- :param retain_clean_jobs: Whether to retain the cleanup job, defaults to False
- :return:
- """
- suffix = "-clean"
- clean_up_release_name = trim_release_name(
- self.helm_release_name + suffix, suffix
- )
- log.info(f"Uninstall old cleanup job for {clean_up_release_name}")
-
- self.__uninstall_clean_up_job(clean_up_release_name, dry_run)
-
- log.info(f"Init cleanup job for {clean_up_release_name}")
-
- stdout = self.__install_clean_up_job(
- clean_up_release_name, suffix, values, dry_run
- )
-
- if dry_run:
- self.dry_run_handler.print_helm_diff(stdout, clean_up_release_name, log)
-
- if not retain_clean_jobs:
- log.info(f"Uninstall cleanup job for {clean_up_release_name}")
- self.__uninstall_clean_up_job(clean_up_release_name, dry_run)
-
- def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None:
- """Uninstall clean up job.
-
- :param release_name: Name of the Helm release
- :param dry_run: Whether to do a dry run of the command
- """
- self.helm.uninstall(self.namespace, release_name, dry_run)
-
- def __install_clean_up_job(
- self,
- release_name: str,
- suffix: str,
- values: dict,
- dry_run: bool,
- ) -> str:
- """Install clean up job.
-
- :param release_name: Name of the Helm release
- :param suffix: Suffix to add to the release name, e.g. "-clean"
- :param values: The Helm values for the chart
- :param dry_run: Whether to do a dry run of the command
- :return: Install clean up job with helm, return the output of the installation
- """
- clean_up_release_name = trim_release_name(release_name, suffix)
- return self.helm.upgrade_install(
- clean_up_release_name,
- self.clean_up_helm_chart,
- dry_run,
- self.namespace,
- values,
- HelmUpgradeInstallFlags(
- create_namespace=self.config.create_namespace,
- version=self.version,
- wait=True,
- wait_for_jobs=True,
- ),
- )
diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py
index fa6e83f1c..38f490458 100644
--- a/kpops/components/base_components/kafka_connector.py
+++ b/kpops/components/base_components/kafka_connector.py
@@ -5,26 +5,22 @@
from functools import cached_property
from typing import Any, NoReturn
-from pydantic import Field, validator
+from pydantic import Field, PrivateAttr, ValidationInfo, computed_field, field_validator
from typing_extensions import override
-from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler
-from kpops.component_handlers.helm_wrapper.helm import Helm
-from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff
from kpops.component_handlers.helm_wrapper.model import (
HelmFlags,
HelmRepoConfig,
- HelmTemplateFlags,
- HelmUpgradeInstallFlags,
)
-from kpops.component_handlers.helm_wrapper.utils import trim_release_name
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
from kpops.component_handlers.kafka_connect.model import (
KafkaConnectorConfig,
+ KafkaConnectorResetterConfig,
+ KafkaConnectorResetterValues,
KafkaConnectorType,
- KafkaConnectResetterConfig,
- KafkaConnectResetterValues,
)
from kpops.components.base_components.base_defaults_component import deduplicate
+from kpops.components.base_components.helm_app import HelmApp, HelmAppValues
from kpops.components.base_components.models.from_section import FromTopic
from kpops.components.base_components.pipeline_component import PipelineComponent
from kpops.utils.colorify import magentaify
@@ -33,105 +29,146 @@
log = logging.getLogger("KafkaConnector")
-class KafkaConnector(PipelineComponent, ABC):
- """Base class for all Kafka connectors.
+class KafkaConnectorResetter(HelmApp):
+ """Helm app for resetting and cleaning a Kafka Connector.
- Should only be used to set defaults
-
- :param app: Application-specific settings
:param repo_config: Configuration of the Helm chart repo to be used for
- deploying the component,
- defaults to HelmRepoConfig(repository_name="bakdata-kafka-connect-resetter", url="https://bakdata.github.io/kafka-connect-resetter/")
- :param namespace: Namespace in which the component shall be deployed
+ deploying the component, defaults to kafka-connect-resetter Helm repo
:param version: Helm chart version, defaults to "1.0.4"
- :param resetter_values: Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.,
- defaults to dict
- :param _connector_type: Defines the type of the connector (Source or Sink)
"""
- namespace: str = Field(
- default=...,
- description=describe_attr("namespace", __doc__),
- )
- app: KafkaConnectorConfig = Field(
- default=...,
- description=describe_attr("app", __doc__),
- )
+ app: KafkaConnectorResetterValues
repo_config: HelmRepoConfig = Field(
default=HelmRepoConfig(
repository_name="bakdata-kafka-connect-resetter",
url="https://bakdata.github.io/kafka-connect-resetter/",
- ),
- description=describe_attr("repo_config", __doc__),
+ )
)
version: str | None = Field(
default="1.0.4", description=describe_attr("version", __doc__)
)
- resetter_values: dict = Field(
- default_factory=dict,
+ suffix: str = "-clean"
+
+ @property
+ @override
+ def full_name(self) -> str:
+ return super().full_name + self.suffix
+
+ @property
+ @override
+ def helm_chart(self) -> str:
+ return f"{self.repo_config.repository_name}/kafka-connect-resetter"
+
+ @property
+ @override
+ def helm_release_name(self) -> str:
+ return create_helm_release_name(self.full_name, self.suffix)
+
+ @property
+ @override
+ def helm_flags(self) -> HelmFlags:
+ return HelmFlags(
+ create_namespace=self.config.create_namespace,
+ version=self.version,
+ wait_for_jobs=True,
+ wait=True,
+ )
+
+ @override
+ def reset(self, dry_run: bool) -> None:
+ """Reset connector.
+
+ At first, it deletes the previous cleanup job (connector resetter)
+ to make sure that there is no running clean job in the cluster. Then it releases a cleanup job.
+ If retain_clean_jobs config is set to false the cleanup job will be deleted subsequently.
+
+ :param dry_run: If the cleanup should be run in dry run mode or not
+ """
+ log.info(
+ magentaify(
+ f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.app.config.connector}"
+ )
+ )
+ self.destroy(dry_run)
+
+ log.info(
+ magentaify(
+ f"Connector Cleanup: deploy Connect {self.app.connector_type} resetter for {self.app.config.connector}"
+ )
+ )
+ self.deploy(dry_run)
+
+ if not self.config.retain_clean_jobs:
+ log.info(magentaify("Connector Cleanup: uninstall Kafka Resetter."))
+ self.destroy(dry_run)
+
+ @override
+ def clean(self, dry_run: bool) -> None:
+ self.reset(dry_run)
+
+
+class KafkaConnector(PipelineComponent, ABC):
+ """Base class for all Kafka connectors.
+
+ Should only be used to set defaults
+
+ :param app: Application-specific settings
+ :param resetter_namespace: Kubernetes namespace in which the Kafka Connect resetter shall be deployed
+ :param resetter_values: Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc.,
+ defaults to empty HelmAppValues
+ """
+
+ app: KafkaConnectorConfig = Field(
+ default=...,
+ description=describe_attr("app", __doc__),
+ )
+ resetter_namespace: str | None = Field(
+ default=None, description=describe_attr("resetter_namespace", __doc__)
+ )
+ resetter_values: HelmAppValues = Field(
+ default_factory=HelmAppValues,
description=describe_attr("resetter_values", __doc__),
)
+ _connector_type: KafkaConnectorType = PrivateAttr()
- _connector_type: KafkaConnectorType = Field(default=..., hidden_from_schema=True)
-
- @validator("app", pre=True)
+ @field_validator("app", mode="before")
+ @classmethod
def connector_config_should_have_component_name(
cls,
app: KafkaConnectorConfig | dict[str, str],
- values: dict[str, Any],
- ) -> dict[str, str]:
+ info: ValidationInfo,
+ ) -> KafkaConnectorConfig:
if isinstance(app, KafkaConnectorConfig):
- app = app.dict()
- component_name = values["prefix"] + values["name"]
+ app = app.model_dump()
+ component_name: str = info.data["prefix"] + info.data["name"]
connector_name: str | None = app.get("name")
if connector_name is not None and connector_name != component_name:
msg = f"Connector name '{connector_name}' should be the same as component name '{component_name}'"
raise ValueError(msg)
app["name"] = component_name
- return app
+ return KafkaConnectorConfig(**app)
+ @computed_field
@cached_property
- def helm(self) -> Helm:
- """Helm object that contains component-specific config such as repo."""
- helm_repo_config = self.repo_config
- helm = Helm(self.config.helm_config)
- helm.add_repo(
- helm_repo_config.repository_name,
- helm_repo_config.url,
- helm_repo_config.repo_auth_flags,
- )
- return helm
-
- @property
- def _resetter_release_name(self) -> str:
- suffix = "-clean"
- clean_up_release_name = self.full_name + suffix
- return trim_release_name(clean_up_release_name, suffix)
-
- @property
- def _resetter_helm_chart(self) -> str:
- return f"{self.repo_config.repository_name}/kafka-connect-resetter"
-
- @cached_property
- def dry_run_handler(self) -> DryRunHandler:
- helm_diff = HelmDiff(self.config.helm_diff_config)
- return DryRunHandler(self.helm, helm_diff, self.namespace)
-
- @property
- def helm_flags(self) -> HelmFlags:
- """Return shared flags for Helm commands."""
- return HelmFlags(
- **self.repo_config.repo_auth_flags.dict(),
- version=self.version,
- create_namespace=self.config.create_namespace,
- )
-
- @property
- def template_flags(self) -> HelmTemplateFlags:
- """Return flags for Helm template command."""
- return HelmTemplateFlags(
- **self.helm_flags.dict(),
- api_version=self.config.helm_config.api_version,
+ def _resetter(self) -> KafkaConnectorResetter:
+ kwargs: dict[str, Any] = {}
+ if self.resetter_namespace:
+ kwargs["namespace"] = self.resetter_namespace
+ return KafkaConnectorResetter(
+ config=self.config,
+ handlers=self.handlers,
+ **kwargs,
+ **self.model_dump(
+ exclude={"_resetter", "resetter_values", "resetter_namespace", "app"}
+ ),
+ app=KafkaConnectorResetterValues(
+ connector_type=self._connector_type.value,
+ config=KafkaConnectorResetterConfig(
+ connector=self.full_name,
+ brokers=self.config.kafka_brokers,
+ ),
+ **self.resetter_values.model_dump(),
+ ),
)
@override
@@ -163,105 +200,6 @@ def clean(self, dry_run: bool) -> None:
)
self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run)
- def _run_connect_resetter(
- self,
- dry_run: bool,
- retain_clean_jobs: bool,
- **kwargs,
- ) -> None:
- """Clean the connector from the cluster.
-
- At first, it deletes the previous cleanup job (connector resetter)
- to make sure that there is no running clean job in the cluster. Then it releases a cleanup job.
- If the retain_clean_jobs flag is set to false the cleanup job will be deleted.
-
- :param dry_run: If the cleanup should be run in dry run mode or not
- :param retain_clean_jobs: If the cleanup job should be kept
- :param kwargs: Other values for the KafkaConnectResetter
- """
- log.info(
- magentaify(
- f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.full_name}"
- )
- )
- self.__uninstall_connect_resetter(self._resetter_release_name, dry_run)
-
- log.info(
- magentaify(
- f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}"
- )
- )
-
- stdout = self.__install_connect_resetter(dry_run, **kwargs)
-
- if dry_run:
- self.dry_run_handler.print_helm_diff(
- stdout, self._resetter_release_name, log
- )
-
- if not retain_clean_jobs:
- log.info(magentaify("Connector Cleanup: uninstall Kafka Resetter."))
- self.__uninstall_connect_resetter(self._resetter_release_name, dry_run)
-
- def __install_connect_resetter(
- self,
- dry_run: bool,
- **kwargs,
- ) -> str:
- """Install connector resetter.
-
- :param dry_run: Whether to dry run the command
- :return: The output of `helm upgrade --install`
- """
- return self.helm.upgrade_install(
- release_name=self._resetter_release_name,
- namespace=self.namespace,
- chart=self._resetter_helm_chart,
- dry_run=dry_run,
- flags=HelmUpgradeInstallFlags(
- create_namespace=self.config.create_namespace,
- version=self.version,
- wait_for_jobs=True,
- wait=True,
- ),
- values=self._get_kafka_connect_resetter_values(
- **kwargs,
- ),
- )
-
- def _get_kafka_connect_resetter_values(
- self,
- **kwargs,
- ) -> dict:
- """Get connector resetter helm chart values.
-
- :return: The Helm chart values of the connector resetter
- """
- return {
- **KafkaConnectResetterValues(
- config=KafkaConnectResetterConfig(
- connector=self.full_name,
- brokers=self.config.brokers,
- **kwargs,
- ),
- connector_type=self._connector_type.value,
- name_override=self.full_name,
- ).dict(),
- **self.resetter_values,
- }
-
- def __uninstall_connect_resetter(self, release_name: str, dry_run: bool) -> None:
- """Uninstall connector resetter.
-
- :param release_name: Name of the release to be uninstalled
- :param dry_run: Whether to do a dry run of the command
- """
- self.helm.uninstall(
- namespace=self.namespace,
- release_name=release_name,
- dry_run=dry_run,
- )
-
class KafkaSourceConnector(KafkaConnector):
"""Kafka source connector model.
@@ -276,52 +214,29 @@ class KafkaSourceConnector(KafkaConnector):
description=describe_attr("offset_topic", __doc__),
)
- _connector_type = KafkaConnectorType.SOURCE
+ _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SOURCE)
@override
def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn:
msg = "Kafka source connector doesn't support FromSection"
raise NotImplementedError(msg)
- @override
- def template(self) -> None:
- values = self._get_kafka_connect_resetter_values(
- offset_topic=self.offset_topic,
- )
- stdout = self.helm.template(
- self._resetter_release_name,
- self._resetter_helm_chart,
- self.namespace,
- values,
- self.template_flags,
- )
- print(stdout)
-
@override
def reset(self, dry_run: bool) -> None:
- self.__run_kafka_connect_resetter(dry_run)
+ self._resetter.app.config.offset_topic = self.offset_topic
+ self._resetter.reset(dry_run)
@override
def clean(self, dry_run: bool) -> None:
super().clean(dry_run)
- self.__run_kafka_connect_resetter(dry_run)
-
- def __run_kafka_connect_resetter(self, dry_run: bool) -> None:
- """Run the connector resetter.
-
- :param dry_run: Whether to do a dry run of the command
- """
- self._run_connect_resetter(
- dry_run=dry_run,
- retain_clean_jobs=self.config.retain_clean_jobs,
- offset_topic=self.offset_topic,
- )
+ self._resetter.app.config.offset_topic = self.offset_topic
+ self._resetter.clean(dry_run)
class KafkaSinkConnector(KafkaConnector):
"""Kafka sink connector model."""
- _connector_type = KafkaConnectorType.SINK
+ _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SINK)
@override
def add_input_topics(self, topics: list[str]) -> None:
@@ -330,18 +245,6 @@ def add_input_topics(self, topics: list[str]) -> None:
topics = deduplicate(topics)
setattr(self.app, "topics", ",".join(topics))
- @override
- def template(self) -> None:
- values = self._get_kafka_connect_resetter_values()
- stdout = self.helm.template(
- self._resetter_release_name,
- self._resetter_helm_chart,
- self.namespace,
- values,
- self.template_flags,
- )
- print(stdout)
-
@override
def set_input_pattern(self, name: str) -> None:
setattr(self.app, "topics.regex", name)
@@ -352,23 +255,11 @@ def set_error_topic(self, topic_name: str) -> None:
@override
def reset(self, dry_run: bool) -> None:
- self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=False)
+ self._resetter.app.config.delete_consumer_group = False
+ self._resetter.reset(dry_run)
@override
def clean(self, dry_run: bool) -> None:
super().clean(dry_run)
- self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=True)
-
- def __run_kafka_connect_resetter(
- self, dry_run: bool, delete_consumer_group: bool
- ) -> None:
- """Run the connector resetter.
-
- :param dry_run: Whether to do a dry run of the command
- :param delete_consumer_group: Whether the consumer group should be deleted or not
- """
- self._run_connect_resetter(
- dry_run=dry_run,
- retain_clean_jobs=self.config.retain_clean_jobs,
- delete_consumer_group=delete_consumer_group,
- )
+ self._resetter.app.config.delete_consumer_group = True
+ self._resetter.clean(dry_run)
diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py
index ff35459c3..5f3c3e67d 100644
--- a/kpops/components/base_components/kubernetes_app.py
+++ b/kpops/components/base_components/kubernetes_app.py
@@ -2,184 +2,47 @@
import logging
import re
-from functools import cached_property
-from typing import Any
+from abc import ABC
-from pydantic import BaseModel, Extra, Field
+from pydantic import ConfigDict, Field
from typing_extensions import override
-from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler
-from kpops.component_handlers.helm_wrapper.helm import Helm
-from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff
-from kpops.component_handlers.helm_wrapper.model import (
- HelmFlags,
- HelmRepoConfig,
- HelmTemplateFlags,
- HelmUpgradeInstallFlags,
-)
from kpops.components.base_components.pipeline_component import PipelineComponent
-from kpops.utils.colorify import magentaify
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import CamelCaseConfig, DescConfig
+from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel
-log = logging.getLogger("KubernetesAppComponent")
+log = logging.getLogger("KubernetesApp")
KUBERNETES_NAME_CHECK_PATTERN = re.compile(
r"^(?![0-9]+$)(?!.*-$)(?!-)[a-z0-9-.]{1,253}(? Helm:
- """Helm object that contains component-specific config such as repo."""
- helm = Helm(self.config.helm_config)
- if self.repo_config is not None:
- helm.add_repo(
- self.repo_config.repository_name,
- self.repo_config.url,
- self.repo_config.repo_auth_flags,
- )
- return helm
-
- @cached_property
- def helm_diff(self) -> HelmDiff:
- """Helm diff object of last and current release of this component."""
- return HelmDiff(self.config.helm_diff_config)
-
- @cached_property
- def dry_run_handler(self) -> DryRunHandler:
- helm_diff = HelmDiff(self.config.helm_diff_config)
- return DryRunHandler(self.helm, helm_diff, self.namespace)
-
- @property
- def helm_release_name(self) -> str:
- """The name for the Helm release. Can be overridden."""
- return self.full_name
-
- @property
- def helm_chart(self) -> str:
- """Return component's Helm chart."""
- msg = (
- f"Please implement the helm_chart property of the {self.__module__} module."
- )
- raise NotImplementedError(msg)
-
- @property
- def helm_flags(self) -> HelmFlags:
- """Return shared flags for Helm commands."""
- auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {}
- return HelmFlags(
- **auth_flags,
- version=self.version,
- create_namespace=self.config.create_namespace,
- )
-
- @property
- def template_flags(self) -> HelmTemplateFlags:
- """Return flags for Helm template command."""
- return HelmTemplateFlags(
- **self.helm_flags.dict(),
- api_version=self.config.helm_config.api_version,
- )
-
- @override
- def template(self) -> None:
- stdout = self.helm.template(
- self.helm_release_name,
- self.helm_chart,
- self.namespace,
- self.to_helm_values(),
- self.template_flags,
- )
- print(stdout)
-
- @property
- def deploy_flags(self) -> HelmUpgradeInstallFlags:
- """Return flags for Helm upgrade install command."""
- return HelmUpgradeInstallFlags(**self.helm_flags.dict())
-
- @override
- def deploy(self, dry_run: bool) -> None:
- stdout = self.helm.upgrade_install(
- self.helm_release_name,
- self.helm_chart,
- dry_run,
- self.namespace,
- self.to_helm_values(),
- self.deploy_flags,
- )
- if dry_run:
- self.dry_run_handler.print_helm_diff(stdout, self.helm_release_name, log)
-
- @override
- def destroy(self, dry_run: bool) -> None:
- stdout = self.helm.uninstall(
- self.namespace,
- self.helm_release_name,
- dry_run,
- )
-
- if stdout:
- log.info(magentaify(stdout))
-
- def to_helm_values(self) -> dict:
- """Generate a dictionary of values readable by Helm from `self.app`.
-
- :returns: Thte values to be used by Helm
- """
- return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True)
-
- def print_helm_diff(self, stdout: str) -> None:
- """Print the diff of the last and current release of this component.
-
- :param stdout: The output of a Helm command that installs or upgrades the release
- """
- current_release = list(
- self.helm.get_manifest(self.helm_release_name, self.namespace)
- )
- if current_release:
- log.info(f"Helm release {self.helm_release_name} already exists")
- else:
- log.info(f"Helm release {self.helm_release_name} does not exist")
- new_release = Helm.load_manifest(stdout)
- self.helm_diff.log_helm_diff(log, current_release, new_release)
@override
def _validate_custom(self, **kwargs) -> None:
@@ -196,12 +59,3 @@ def validate_kubernetes_name(name: str) -> None:
if not bool(KUBERNETES_NAME_CHECK_PATTERN.match(name)):
msg = f"The component name {name} is invalid for Kubernetes."
raise ValueError(msg)
-
- @override
- def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]:
- # HACK: workaround for Pydantic to exclude cached properties during model export
- if exclude is None:
- exclude = set()
- exclude.add("helm")
- exclude.add("helm_diff")
- return super().dict(exclude=exclude, **kwargs)
diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py
index 153133639..5f1dae193 100644
--- a/kpops/components/base_components/models/from_section.py
+++ b/kpops/components/base_components/models/from_section.py
@@ -1,11 +1,11 @@
from enum import Enum
from typing import Any, NewType
-from pydantic import BaseModel, Extra, Field, root_validator
+from pydantic import ConfigDict, Field, model_validator
from kpops.components.base_components.models import TopicName
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import DescConfig
+from kpops.utils.pydantic import DescConfigModel
class InputTopicTypes(str, Enum):
@@ -18,7 +18,7 @@ class InputTopicTypes(str, Enum):
PATTERN = "pattern"
-class FromTopic(BaseModel):
+class FromTopic(DescConfigModel):
"""Input topic.
:param type: Topic type, defaults to None
@@ -31,23 +31,24 @@ class FromTopic(BaseModel):
)
role: str | None = Field(default=None, description=describe_attr("role", __doc__))
- class Config(DescConfig):
- extra = Extra.forbid
- use_enum_values = True
+ model_config = ConfigDict(
+ extra="forbid",
+ use_enum_values=True,
+ )
- @root_validator
- def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]:
+ @model_validator(mode="after")
+ def extra_topic_role(self) -> Any:
"""Ensure that cls.role is used correctly, assign type if needed."""
- if values["type"] == InputTopicTypes.INPUT and values["role"]:
+ if self.type == InputTopicTypes.INPUT and self.role:
msg = "Define role only if `type` is `pattern` or `None`"
raise ValueError(msg)
- return values
+ return self
ComponentName = NewType("ComponentName", str)
-class FromSection(BaseModel):
+class FromSection(DescConfigModel):
"""Holds multiple input topics.
:param topics: Input topics
@@ -63,5 +64,6 @@ class FromSection(BaseModel):
description=describe_attr("components", __doc__),
)
- class Config(DescConfig):
- extra = Extra.forbid
+ model_config = ConfigDict(
+ extra="forbid",
+ )
diff --git a/kpops/components/base_components/models/resource.py b/kpops/components/base_components/models/resource.py
new file mode 100644
index 000000000..08c01f344
--- /dev/null
+++ b/kpops/components/base_components/models/resource.py
@@ -0,0 +1,5 @@
+from collections.abc import Mapping, Sequence
+from typing import Any, TypeAlias
+
+# representation of final resource for component, e.g. a list of Kubernetes manifests
+Resource: TypeAlias = Sequence[Mapping[str, Any]]
diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py
index 03f1d7141..56da461c8 100644
--- a/kpops/components/base_components/models/to_section.py
+++ b/kpops/components/base_components/models/to_section.py
@@ -1,11 +1,11 @@
from enum import Enum
from typing import Any
-from pydantic import BaseModel, Extra, Field, root_validator
+from pydantic import ConfigDict, Field, model_validator
from kpops.components.base_components.models import ModelName, ModelVersion, TopicName
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import DescConfig
+from kpops.utils.pydantic import DescConfigModel
class OutputTopicTypes(str, Enum):
@@ -18,7 +18,7 @@ class OutputTopicTypes(str, Enum):
ERROR = "error"
-class TopicConfig(BaseModel):
+class TopicConfig(DescConfigModel):
"""Configure an output topic.
:param type: Topic type
@@ -58,21 +58,22 @@ class TopicConfig(BaseModel):
)
role: str | None = Field(default=None, description=describe_attr("role", __doc__))
- class Config(DescConfig):
- extra = Extra.forbid
- allow_population_by_field_name = True
- use_enum_values = True
+ model_config = ConfigDict(
+ extra="forbid",
+ use_enum_values=True,
+ populate_by_name=True,
+ )
- @root_validator
- def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]:
+ @model_validator(mode="after")
+ def extra_topic_role(self) -> Any:
"""Ensure that cls.role is used correctly, assign type if needed."""
- if values["type"] and values["role"]:
+ if self.type and self.role:
msg = "Define `role` only if `type` is undefined"
raise ValueError(msg)
- return values
+ return self
-class ToSection(BaseModel):
+class ToSection(DescConfigModel):
"""Holds multiple output topics.
:param topics: Output topics
@@ -86,5 +87,6 @@ class ToSection(BaseModel):
default={}, description=describe_attr("models", __doc__)
)
- class Config(DescConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="forbid",
+ )
diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py
index d05d4d4c1..4b09b35de 100644
--- a/kpops/components/base_components/pipeline_component.py
+++ b/kpops/components/base_components/pipeline_component.py
@@ -2,7 +2,7 @@
from abc import ABC
-from pydantic import Extra, Field
+from pydantic import AliasChoices, ConfigDict, Field
from kpops.components.base_components.base_defaults_component import (
BaseDefaultsComponent,
@@ -12,13 +12,20 @@
FromTopic,
InputTopicTypes,
)
+from kpops.components.base_components.models.resource import Resource
from kpops.components.base_components.models.to_section import (
OutputTopicTypes,
TopicConfig,
ToSection,
)
+from kpops.utils import cached_classproperty
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import DescConfig
+from kpops.utils.pydantic import issubclass_patched
+
+try:
+ from typing import Self
+except ImportError:
+ from typing_extensions import Self
class PipelineComponent(BaseDefaultsComponent, ABC):
@@ -27,7 +34,7 @@ class PipelineComponent(BaseDefaultsComponent, ABC):
:param name: Component name
:param prefix: Pipeline prefix that will prefix every component name.
If you wish to not have any prefix you can specify an empty string.,
- defaults to "${pipeline_name}-"
+ defaults to "${pipeline.name}-"
:param from_: Topic(s) and/or components from which the component will read
input, defaults to None
:param to: Topic(s) into which the component will write output,
@@ -36,12 +43,13 @@ class PipelineComponent(BaseDefaultsComponent, ABC):
name: str = Field(default=..., description=describe_attr("name", __doc__))
prefix: str = Field(
- default="${pipeline_name}-",
+ default="${pipeline.name}-",
description=describe_attr("prefix", __doc__),
)
from_: FromSection | None = Field(
default=None,
- alias="from",
+ serialization_alias="from",
+ validation_alias=AliasChoices("from", "from_"),
title="From",
description=describe_attr("from_", __doc__),
)
@@ -50,8 +58,9 @@ class PipelineComponent(BaseDefaultsComponent, ABC):
description=describe_attr("to", __doc__),
)
- class Config(DescConfig):
- extra = Extra.allow
+ model_config = ConfigDict(
+ extra="allow",
+ )
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
@@ -62,6 +71,22 @@ def __init__(self, **kwargs) -> None:
def full_name(self) -> str:
return self.prefix + self.name
+ @cached_classproperty
+ def parents(cls: type[Self]) -> tuple[type[PipelineComponent], ...]: # pyright: ignore[reportGeneralTypeIssues]
+ """Get parent components.
+
+ :return: All ancestor KPOps components
+ """
+
+ def gen_parents():
+ for base in cls.mro():
+ # skip class itself and non-component ancestors
+ if base is cls or not issubclass_patched(base, PipelineComponent):
+ continue
+ yield base
+
+ return tuple(gen_parents())
+
def add_input_topics(self, topics: list[str]) -> None:
"""Add given topics to the list of input topics.
@@ -177,7 +202,7 @@ def weave_from_topics(
self.apply_from_inputs(input_topic, from_topic)
def inflate(self) -> list[PipelineComponent]:
- """Inflate a component.
+ """Inflate component.
This is helpful if one component should result in multiple components.
To support this, override this method and return a list of components
@@ -186,35 +211,30 @@ def inflate(self) -> list[PipelineComponent]:
"""
return [self]
- def template(self) -> None:
- """Run `helm template`.
-
- From HELM: Render chart templates locally and display the output.
- Any values that would normally be looked up or retrieved in-cluster will
- be faked locally. Additionally, none of the server-side testing of chart
- validity (e.g. whether an API is supported) is done.
- """
+ def manifest(self) -> Resource:
+ """Render final component resources, e.g. Kubernetes manifests."""
+ return []
def deploy(self, dry_run: bool) -> None:
- """Deploy the component (self) to the k8s cluster.
+ """Deploy component, e.g. to Kubernetes cluster.
:param dry_run: Whether to do a dry run of the command
"""
def destroy(self, dry_run: bool) -> None:
- """Uninstall the component (self) from the k8s cluster.
+ """Uninstall component, e.g. from Kubernetes cluster.
:param dry_run: Whether to do a dry run of the command
"""
def reset(self, dry_run: bool) -> None:
- """Reset component (self) state.
+ """Reset component state.
:param dry_run: Whether to do a dry run of the command
"""
def clean(self, dry_run: bool) -> None:
- """Remove component (self) and any trace of it.
+ """Destroy component including related states.
:param dry_run: Whether to do a dry run of the command
"""
diff --git a/kpops/components/streams_bootstrap/__init__.py b/kpops/components/streams_bootstrap/__init__.py
index 097d85b13..1b02b091b 100644
--- a/kpops/components/streams_bootstrap/__init__.py
+++ b/kpops/components/streams_bootstrap/__init__.py
@@ -1,7 +1,31 @@
-from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp
-from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp
+from abc import ABC
-__all__ = [
- "ProducerApp",
- "StreamsApp",
-]
+from pydantic import Field
+
+from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig
+from kpops.components.base_components.helm_app import HelmApp
+from kpops.utils.docstring import describe_attr
+
+STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig(
+ repository_name="bakdata-streams-bootstrap",
+ url="https://bakdata.github.io/streams-bootstrap/",
+)
+STREAMS_BOOTSTRAP_VERSION = "2.9.0"
+
+
+class StreamsBootstrap(HelmApp, ABC):
+ """Base for components with a streams-bootstrap Helm chart.
+
+ :param repo_config: Configuration of the Helm chart repo to be used for
+ deploying the component, defaults to streams-bootstrap Helm repo
+ :param version: Helm chart version, defaults to "2.9.0"
+ """
+
+ repo_config: HelmRepoConfig = Field(
+ default=STREAMS_BOOTSTRAP_HELM_REPO,
+ description=describe_attr("repo_config", __doc__),
+ )
+ version: str | None = Field(
+ default=STREAMS_BOOTSTRAP_VERSION,
+ description=describe_attr("version", __doc__),
+ )
diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py
index 8af1a68c6..53db5af67 100644
--- a/kpops/components/streams_bootstrap/producer/model.py
+++ b/kpops/components/streams_bootstrap/producer/model.py
@@ -1,7 +1,7 @@
-from pydantic import BaseConfig, Extra, Field
+from pydantic import ConfigDict, Field
from kpops.components.base_components.kafka_app import (
- KafkaAppConfig,
+ KafkaAppValues,
KafkaStreamsConfig,
)
from kpops.utils.docstring import describe_attr
@@ -22,7 +22,7 @@ class ProducerStreamsConfig(KafkaStreamsConfig):
)
-class ProducerValues(KafkaAppConfig):
+class ProducerAppValues(KafkaAppValues):
"""Settings specific to producers.
:param streams: Kafka Streams settings
@@ -32,5 +32,4 @@ class ProducerValues(KafkaAppConfig):
default=..., description=describe_attr("streams", __doc__)
)
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(extra="allow")
diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py
index 6091cdd77..2d6a586b2 100644
--- a/kpops/components/streams_bootstrap/producer/producer_app.py
+++ b/kpops/components/streams_bootstrap/producer/producer_app.py
@@ -1,23 +1,38 @@
-# from __future__ import annotations
+from functools import cached_property
from pydantic import Field
from typing_extensions import override
-from kpops.components.base_components.kafka_app import KafkaApp
+from kpops.components.base_components.kafka_app import (
+ KafkaApp,
+ KafkaAppCleaner,
+)
from kpops.components.base_components.models.to_section import (
OutputTopicTypes,
TopicConfig,
)
+from kpops.components.streams_bootstrap import StreamsBootstrap
from kpops.components.streams_bootstrap.app_type import AppType
-from kpops.components.streams_bootstrap.producer.model import ProducerValues
+from kpops.components.streams_bootstrap.producer.model import ProducerAppValues
from kpops.utils.docstring import describe_attr
-class ProducerApp(KafkaApp):
+class ProducerAppCleaner(KafkaAppCleaner):
+ app: ProducerAppValues
+
+ @property
+ @override
+ def helm_chart(self) -> str:
+ return (
+ f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}"
+ )
+
+
+class ProducerApp(KafkaApp, StreamsBootstrap):
"""Producer component.
- This producer holds configuration to use as values for the streams bootstrap
- producer helm chart.
+ This producer holds configuration to use as values for the streams-bootstrap
+ producer Helm chart.
Note that the producer does not support error topics.
@@ -25,7 +40,7 @@ class ProducerApp(KafkaApp):
:param from_: Producer doesn't support FromSection, defaults to None
"""
- app: ProducerValues = Field(
+ app: ProducerAppValues = Field(
default=...,
description=describe_attr("app", __doc__),
)
@@ -36,6 +51,14 @@ class ProducerApp(KafkaApp):
description=describe_attr("from_", __doc__),
)
+ @cached_property
+ def _cleaner(self) -> ProducerAppCleaner:
+ return ProducerAppCleaner(
+ config=self.config,
+ handlers=self.handlers,
+ **self.model_dump(),
+ )
+
@override
def apply_to_outputs(self, name: str, topic: TopicConfig) -> None:
match topic.type:
@@ -58,17 +81,6 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None:
def helm_chart(self) -> str:
return f"{self.repo_config.repository_name}/{AppType.PRODUCER_APP.value}"
- @property
- @override
- def clean_up_helm_chart(self) -> str:
- return (
- f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}"
- )
-
@override
def clean(self, dry_run: bool) -> None:
- self._run_clean_up_job(
- values=self.to_helm_values(),
- dry_run=dry_run,
- retain_clean_jobs=self.config.retain_clean_jobs,
- )
+ self._cleaner.clean(dry_run)
diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py
index ca2db77ae..95100b966 100644
--- a/kpops/components/streams_bootstrap/streams/model.py
+++ b/kpops/components/streams_bootstrap/streams/model.py
@@ -1,16 +1,20 @@
-from collections.abc import Mapping, Set
+from collections.abc import Callable
from typing import Any
-from pydantic import BaseConfig, BaseModel, Extra, Field
-from typing_extensions import override
+from pydantic import ConfigDict, Field, SerializationInfo, model_serializer
from kpops.components.base_components.base_defaults_component import deduplicate
from kpops.components.base_components.kafka_app import (
- KafkaAppConfig,
+ KafkaAppValues,
KafkaStreamsConfig,
)
from kpops.utils.docstring import describe_attr
-from kpops.utils.pydantic import CamelCaseConfig, DescConfig
+from kpops.utils.pydantic import (
+ CamelCaseConfigModel,
+ DescConfigModel,
+ exclude_by_value,
+ exclude_defaults,
+)
class StreamsConfig(KafkaStreamsConfig):
@@ -24,6 +28,7 @@ class StreamsConfig(KafkaStreamsConfig):
:param output_topic: Output topic, defaults to None
:param error_topic: Error topic, defaults to None
:param config: Configuration, defaults to {}
+ :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to None
"""
input_topics: list[str] = Field(
@@ -47,9 +52,12 @@ class StreamsConfig(KafkaStreamsConfig):
error_topic: str | None = Field(
default=None, description=describe_attr("error_topic", __doc__)
)
- config: dict[str, str] = Field(
+ config: dict[str, Any] = Field(
default={}, description=describe_attr("config", __doc__)
)
+ delete_output: bool | None = Field(
+ default=None, description=describe_attr("delete_output", __doc__)
+ )
def add_input_topics(self, topics: list[str]) -> None:
"""Add given topics to the list of input topics.
@@ -72,40 +80,15 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None:
self.extra_input_topics.get(role, []) + topics
)
- @override
- def dict(
- self,
- *,
- include: None | Set[int | str] | Mapping[int | str, Any] = None,
- exclude: None | Set[int | str] | Mapping[int | str, Any] = None,
- by_alias: bool = False,
- skip_defaults: bool | None = None,
- exclude_unset: bool = False,
- **kwargs,
- ) -> dict:
- """Generate a dictionary representation of the model.
-
- Optionally, specify which fields to include or exclude.
-
- :param include: Fields to include
- :param include: Fields to exclude
- :param by_alias: Use the fields' aliases in the dictionary
- :param skip_defaults: Whether to skip defaults
- :param exclude_unset: Whether to exclude unset fields
- """
- return super().dict(
- include=include,
- exclude=exclude,
- by_alias=by_alias,
- skip_defaults=skip_defaults,
- exclude_unset=exclude_unset,
- # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values
- exclude_defaults=True,
- exclude_none=True,
- )
+ # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution
+ @model_serializer(mode="wrap", when_used="always")
+ def serialize_model(
+ self, handler: Callable, info: SerializationInfo
+ ) -> dict[str, Any]:
+ return exclude_defaults(self, exclude_by_value(handler(self), None))
-class StreamsAppAutoScaling(BaseModel):
+class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel):
"""Kubernetes Event-driven Autoscaling config.
:param enabled: Whether to enable auto-scaling using KEDA., defaults to False
@@ -184,18 +167,16 @@ class StreamsAppAutoScaling(BaseModel):
default=[],
description=describe_attr("topics", __doc__),
)
-
- class Config(CamelCaseConfig, DescConfig):
- extra = Extra.allow
+ model_config = ConfigDict(extra="allow")
-class StreamsAppConfig(KafkaAppConfig):
- """StreamsBoostrap app configurations.
+class StreamsAppValues(KafkaAppValues):
+ """streams-bootstrap app configurations.
The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.
- :param streams: Streams Bootstrap streams section
- :param autoscaling: Kubernetes Event-driven Autoscaling config, defaults to None
+ :param streams: streams-bootstrap streams section
+ :param autoscaling: Kubernetes event-driven autoscaling config, defaults to None
"""
streams: StreamsConfig = Field(
@@ -206,6 +187,4 @@ class StreamsAppConfig(KafkaAppConfig):
default=None,
description=describe_attr("autoscaling", __doc__),
)
-
- class Config(BaseConfig):
- extra = Extra.allow
+ model_config = ConfigDict(extra="allow")
diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py
index a466b4eba..2c632e882 100644
--- a/kpops/components/streams_bootstrap/streams/streams_app.py
+++ b/kpops/components/streams_bootstrap/streams/streams_app.py
@@ -1,23 +1,46 @@
+from functools import cached_property
+
from pydantic import Field
from typing_extensions import override
-from kpops.components.base_components.kafka_app import KafkaApp
+from kpops.components.base_components.kafka_app import (
+ KafkaApp,
+ KafkaAppCleaner,
+)
+from kpops.components.streams_bootstrap import StreamsBootstrap
from kpops.components.streams_bootstrap.app_type import AppType
-from kpops.components.streams_bootstrap.streams.model import StreamsAppConfig
+from kpops.components.streams_bootstrap.streams.model import StreamsAppValues
from kpops.utils.docstring import describe_attr
-class StreamsApp(KafkaApp):
- """StreamsApp component that configures a streams bootstrap app.
+class StreamsAppCleaner(KafkaAppCleaner):
+ app: StreamsAppValues
+
+ @property
+ @override
+ def helm_chart(self) -> str:
+ return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}"
+
+
+class StreamsApp(KafkaApp, StreamsBootstrap):
+ """StreamsApp component that configures a streams-bootstrap app.
:param app: Application-specific settings
"""
- app: StreamsAppConfig = Field(
+ app: StreamsAppValues = Field(
default=...,
description=describe_attr("app", __doc__),
)
+ @cached_property
+ def _cleaner(self) -> StreamsAppCleaner:
+ return StreamsAppCleaner(
+ config=self.config,
+ handlers=self.handlers,
+ **self.model_dump(),
+ )
+
@override
def add_input_topics(self, topics: list[str]) -> None:
self.app.streams.add_input_topics(topics)
@@ -51,29 +74,12 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None:
def helm_chart(self) -> str:
return f"{self.repo_config.repository_name}/{AppType.STREAMS_APP.value}"
- @property
- @override
- def clean_up_helm_chart(self) -> str:
- return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}"
-
@override
def reset(self, dry_run: bool) -> None:
- self.__run_streams_clean_up_job(dry_run, delete_output=False)
+ self._cleaner.app.streams.delete_output = False
+ self._cleaner.clean(dry_run)
@override
def clean(self, dry_run: bool) -> None:
- self.__run_streams_clean_up_job(dry_run, delete_output=True)
-
- def __run_streams_clean_up_job(self, dry_run: bool, delete_output: bool) -> None:
- """Run clean job for this Streams app.
-
- :param dry_run: Whether to do a dry run of the command
- :param delete_output: Whether to delete the output of the app that is being cleaned
- """
- values = self.to_helm_values()
- values["streams"]["deleteOutput"] = delete_output
- self._run_clean_up_job(
- values=values,
- dry_run=dry_run,
- retain_clean_jobs=self.config.retain_clean_jobs,
- )
+ self._cleaner.app.streams.delete_output = True
+ self._cleaner.clean(dry_run)
diff --git a/kpops/config.py b/kpops/config.py
new file mode 100644
index 000000000..9bb57e104
--- /dev/null
+++ b/kpops/config.py
@@ -0,0 +1,147 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from pydantic import AnyHttpUrl, Field, TypeAdapter
+from pydantic_settings import (
+ BaseSettings,
+ PydanticBaseSettingsSource,
+ SettingsConfigDict,
+)
+from typing_extensions import override
+
+from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig
+from kpops.utils.docstring import describe_object
+from kpops.utils.pydantic import YamlConfigSettingsSource
+
+ENV_PREFIX = "KPOPS_"
+
+
+class TopicNameConfig(BaseSettings):
+ """Configure the topic name variables you can use in the pipeline definition."""
+
+ default_output_topic_name: str = Field(
+ default="${pipeline.name}-${component.name}",
+ description="Configures the value for the variable ${output_topic_name}",
+ )
+ default_error_topic_name: str = Field(
+ default="${pipeline.name}-${component.name}-error",
+ description="Configures the value for the variable ${error_topic_name}",
+ )
+
+
+class SchemaRegistryConfig(BaseSettings):
+ """Configuration for Schema Registry."""
+
+ enabled: bool = Field(
+ default=False,
+ description="Whether the Schema Registry handler should be initialized.",
+ )
+ url: AnyHttpUrl = Field(
+ default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8081"),
+ description="Address of the Schema Registry.",
+ )
+
+
+class KafkaRestConfig(BaseSettings):
+ """Configuration for Kafka REST Proxy."""
+
+ url: AnyHttpUrl = Field(
+ default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8082"),
+ description="Address of the Kafka REST Proxy.",
+ )
+
+
+class KafkaConnectConfig(BaseSettings):
+ """Configuration for Kafka Connect."""
+
+ url: AnyHttpUrl = Field(
+ default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8083"),
+ description="Address of Kafka Connect.",
+ )
+
+
+class KpopsConfig(BaseSettings):
+ """Global configuration for KPOps project."""
+
+ defaults_path: Path = Field(
+ default=Path(),
+ examples=["defaults", "."],
+ description="The path to the folder containing the defaults.yaml file and the environment defaults files. "
+ "Paths can either be absolute or relative to `config.yaml`",
+ )
+ components_module: str | None = Field(
+ default=None,
+ description="Custom Python module defining project-specific KPOps components",
+ )
+ pipeline_base_dir: Path = Field(
+ default=Path(),
+ description="Base directory to the pipelines (default is current working directory)",
+ )
+ kafka_brokers: str = Field(
+ default=...,
+ examples=[
+ "broker1:9092,broker2:9092,broker3:9092",
+ ],
+ description="The comma separated Kafka brokers address.",
+ )
+ defaults_filename_prefix: str = Field(
+ default="defaults",
+ description="The name of the defaults file and the prefix of the defaults environment file.",
+ )
+ topic_name_config: TopicNameConfig = Field(
+ default=TopicNameConfig(),
+ description=describe_object(TopicNameConfig.__doc__),
+ )
+ schema_registry: SchemaRegistryConfig = Field(
+ default=SchemaRegistryConfig(),
+ description=describe_object(SchemaRegistryConfig.__doc__),
+ )
+ kafka_rest: KafkaRestConfig = Field(
+ default=KafkaRestConfig(),
+ description=describe_object(KafkaRestConfig.__doc__),
+ )
+ kafka_connect: KafkaConnectConfig = Field(
+ default=KafkaConnectConfig(),
+ description=describe_object(KafkaConnectConfig.__doc__),
+ )
+ timeout: int = Field(
+ default=300,
+ description="The timeout in seconds that specifies when actions like deletion or deploy timeout.",
+ )
+ create_namespace: bool = Field(
+ default=False,
+ description="Flag for `helm upgrade --install`. Create the release namespace if not present.",
+ )
+ helm_config: HelmConfig = Field(
+ default=HelmConfig(),
+ description="Global flags for Helm.",
+ )
+ helm_diff_config: HelmDiffConfig = Field(
+ default=HelmDiffConfig(),
+ description="Configure Helm Diff.",
+ )
+ retain_clean_jobs: bool = Field(
+ default=False,
+ description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.",
+ )
+
+ model_config = SettingsConfigDict(env_prefix=ENV_PREFIX, env_nested_delimiter="__")
+
+ @override
+ @classmethod
+ def settings_customise_sources(
+ cls,
+ settings_cls: type[BaseSettings],
+ init_settings: PydanticBaseSettingsSource,
+ env_settings: PydanticBaseSettingsSource,
+ dotenv_settings: PydanticBaseSettingsSource,
+ file_secret_settings: PydanticBaseSettingsSource,
+ ):
+ return (
+ env_settings,
+ init_settings,
+ YamlConfigSettingsSource(settings_cls),
+ dotenv_settings,
+ file_secret_settings,
+ )
diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline.py
similarity index 65%
rename from kpops/pipeline_generator/pipeline.py
rename to kpops/pipeline.py
index 920eec202..aff9ca475 100644
--- a/kpops/pipeline_generator/pipeline.py
+++ b/kpops/pipeline.py
@@ -3,26 +3,25 @@
import json
import logging
from collections import Counter
-from contextlib import suppress
+from dataclasses import dataclass, field
from typing import TYPE_CHECKING
import yaml
-from pydantic import BaseModel
-from rich.console import Console
-from rich.syntax import Syntax
+from pydantic import Field, RootModel, SerializeAsAny
from kpops.components.base_components.pipeline_component import PipelineComponent
from kpops.utils.dict_ops import generate_substitution, update_nested_pair
from kpops.utils.environment import ENV
-from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested
+from kpops.utils.types import JsonType
+from kpops.utils.yaml import load_yaml_file, substitute_nested
if TYPE_CHECKING:
from collections.abc import Iterator
from pathlib import Path
- from kpops.cli.pipeline_config import PipelineConfig
from kpops.cli.registry import Registry
from kpops.component_handlers import ComponentHandlers
+ from kpops.config import KpopsConfig
log = logging.getLogger("PipelineGenerator")
@@ -35,50 +34,49 @@ class ValidationError(Exception):
pass
-class PipelineComponents(BaseModel):
- """Stores the pipeline components."""
+class Pipeline(RootModel):
+ """Pipeline representation."""
- components: list[PipelineComponent] = []
+ root: list[SerializeAsAny[PipelineComponent]] = Field(
+ default=[], title="Components"
+ )
@property
def last(self) -> PipelineComponent:
- return self.components[-1]
+ return self.root[-1]
def find(self, component_name: str) -> PipelineComponent:
- for component in self.components:
+ for component in self.root:
if component_name == component.name:
return component
msg = f"Component {component_name} not found"
raise ValueError(msg)
def add(self, component: PipelineComponent) -> None:
- self._populate_component_name(component)
- self.components.append(component)
+ self.root.append(component)
def __bool__(self) -> bool:
- return bool(self.components)
+ return bool(self.root)
def __iter__(self) -> Iterator[PipelineComponent]:
- return iter(self.components)
+ return iter(self.root)
def __len__(self) -> int:
- return len(self.components)
+ return len(self.root)
+
+ def to_yaml(self) -> str:
+ return yaml.dump(self.model_dump(mode="json", by_alias=True, exclude_none=True))
+
+ def validate(self) -> None:
+ self.validate_unique_names()
def validate_unique_names(self) -> None:
- step_names = [component.full_name for component in self.components]
+ step_names = [component.full_name for component in self.root]
duplicates = [name for name, count in Counter(step_names).items() if count > 1]
if duplicates:
msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}"
raise ValidationError(msg)
- @staticmethod
- def _populate_component_name(component: PipelineComponent) -> None: # TODO: remove
- with suppress(
- AttributeError # Some components like Kafka Connect do not have a name_override attribute
- ):
- if (app := getattr(component, "app")) and app.name_override is None:
- app.name_override = component.full_name
-
def create_env_components_index(
environment_components: list[dict],
@@ -97,69 +95,74 @@ def create_env_components_index(
return index
-class Pipeline:
- def __init__(
+@dataclass
+class PipelineGenerator:
+ config: KpopsConfig
+ registry: Registry
+ handlers: ComponentHandlers
+ pipeline: Pipeline = field(init=False, default_factory=Pipeline)
+
+ def parse(
self,
- component_list: list[dict],
+ components: list[dict],
environment_components: list[dict],
- registry: Registry,
- config: PipelineConfig,
- handlers: ComponentHandlers,
- ) -> None:
- self.components: PipelineComponents = PipelineComponents()
- self.handlers = handlers
- self.config = config
- self.registry = registry
- self.env_components_index = create_env_components_index(environment_components)
- self.parse_components(component_list)
- self.validate()
-
- @classmethod
- def load_from_yaml(
- cls,
- base_dir: Path,
- path: Path,
- registry: Registry,
- config: PipelineConfig,
- handlers: ComponentHandlers,
) -> Pipeline:
+ """Parse pipeline from sequence of component dictionaries.
+
+ :param components: List of components
+ :param environment_components: List of environment-specific components
+ :returns: Initialized pipeline object
+ """
+ self.env_components_index = create_env_components_index(environment_components)
+ self.parse_components(components)
+ self.pipeline.validate()
+ return self.pipeline
+
+ def load_yaml(self, path: Path, environment: str | None) -> Pipeline:
"""Load pipeline definition from yaml.
The file is often named ``pipeline.yaml``
- :param base_dir: Base directory to the pipelines (default is current working directory)
:param path: Path to pipeline definition yaml file
- :param registry: Pipeline components registry
- :param config: Pipeline config
- :param handlers: Component handlers
+ :param environment: Environment name
:raises TypeError: The pipeline definition should contain a list of components
:raises TypeError: The env-specific pipeline definition should contain a list of components
:returns: Initialized pipeline object
"""
- Pipeline.set_pipeline_name_env_vars(base_dir, path)
+ PipelineGenerator.set_pipeline_name_env_vars(
+ self.config.pipeline_base_dir, path
+ )
+ PipelineGenerator.set_environment_name(environment)
main_content = load_yaml_file(path, substitution=ENV)
if not isinstance(main_content, list):
msg = f"The pipeline definition {path} should contain a list of components"
raise TypeError(msg)
env_content = []
- if (env_file := Pipeline.pipeline_filename_environment(path, config)).exists():
+ if (
+ environment
+ and (
+ env_file := PipelineGenerator.pipeline_filename_environment(
+ path, environment
+ )
+ ).exists()
+ ):
env_content = load_yaml_file(env_file, substitution=ENV)
if not isinstance(env_content, list):
msg = f"The pipeline definition {env_file} should contain a list of components"
raise TypeError(msg)
- return cls(main_content, env_content, registry, config, handlers)
+ return self.parse(main_content, env_content)
- def parse_components(self, component_list: list[dict]) -> None:
+ def parse_components(self, components: list[dict]) -> None:
"""Instantiate, enrich and inflate a list of components.
- :param component_list: List of components
+ :param components: List of components
:raises ValueError: Every component must have a type defined
:raises ParsingException: Error enriching component
:raises ParsingException: All undefined exceptions
"""
- for component_data in component_list:
+ for component_data in components:
try:
try:
component_type: str = component_data["type"]
@@ -192,7 +195,6 @@ def apply_component(
**component_data,
)
component = self.enrich_component(component)
-
# inflate & enrich components
for inflated_component in component.inflate(): # TODO: recursively
enriched_component = self.enrich_component(inflated_component)
@@ -202,21 +204,21 @@ def apply_component(
original_from_component_name,
from_topic,
) in enriched_component.from_.components.items():
- original_from_component = self.components.find(
+ original_from_component = self.pipeline.find(
original_from_component_name
)
inflated_from_component = original_from_component.inflate()[-1]
- resolved_from_component = self.components.find(
+ resolved_from_component = self.pipeline.find(
inflated_from_component.name
)
enriched_component.weave_from_topics(
resolved_from_component.to, from_topic
)
- elif self.components:
+ elif self.pipeline:
# read from previous component
- prev_component = self.components.last
+ prev_component = self.pipeline.last
enriched_component.weave_from_topics(prev_component.to)
- self.components.add(enriched_component)
+ self.pipeline.add(enriched_component)
def enrich_component(
self,
@@ -230,11 +232,8 @@ def enrich_component(
component.validate_ = True
env_component_as_dict = update_nested_pair(
self.env_components_index.get(component.name, {}),
- # HACK: Pydantic .dict() doesn't create jsonable dict
- json.loads(component.json(by_alias=True)),
+ component.model_dump(mode="json", by_alias=True),
)
- # HACK: make sure component type is set for inflated components, because property is not serialized by Pydantic
- env_component_as_dict["type"] = component.type
component_data = self.substitute_in_component(env_component_as_dict)
@@ -246,34 +245,6 @@ def enrich_component(
**component_data,
)
- def print_yaml(self, substitution: dict | None = None) -> None:
- """Print the generated pipeline definition.
-
- :param substitution: Substitution dictionary, defaults to None
- """
- syntax = Syntax(
- substitute(str(self), substitution),
- "yaml",
- background_color="default",
- theme="ansi_dark",
- )
- Console(
- width=1000 # HACK: overwrite console width to avoid truncating output
- ).print(syntax)
-
- def __iter__(self) -> Iterator[PipelineComponent]:
- return iter(self.components)
-
- def __str__(self) -> str:
- return yaml.dump(
- json.loads( # HACK: serialize types on Pydantic model export, which are not serialized by .dict(); e.g. pathlib.Path
- self.components.json(exclude_none=True, by_alias=True)
- )
- )
-
- def __len__(self) -> int:
- return len(self.components)
-
def substitute_in_component(self, component_as_dict: dict) -> dict:
"""Substitute all $-placeholders in a component in dict representation.
@@ -283,8 +254,8 @@ def substitute_in_component(self, component_as_dict: dict) -> dict:
config = self.config
# Leftover variables that were previously introduced in the component by the substitution
# functions, still hardcoded, because of their names.
- # TODO: Get rid of them
- substitution_hardcoded = {
+ # TODO(Ivan Yordanov): Get rid of them
+ substitution_hardcoded: dict[str, JsonType] = {
"error_topic_name": config.topic_name_config.default_error_topic_name,
"output_topic_name": config.topic_name_config.default_output_topic_name,
}
@@ -292,9 +263,13 @@ def substitute_in_component(self, component_as_dict: dict) -> dict:
component_as_dict,
"component",
substitution_hardcoded,
+ separator=".",
)
substitution = generate_substitution(
- json.loads(config.json()), existing_substitution=component_substitution
+ config.model_dump(mode="json"),
+ "config",
+ existing_substitution=component_substitution,
+ separator=".",
)
return json.loads(
@@ -304,18 +279,15 @@ def substitute_in_component(self, component_as_dict: dict) -> dict:
)
)
- def validate(self) -> None:
- self.components.validate_unique_names()
-
@staticmethod
- def pipeline_filename_environment(path: Path, config: PipelineConfig) -> Path:
- """Add the environment name from the PipelineConfig to the pipeline.yaml path.
+ def pipeline_filename_environment(pipeline_path: Path, environment: str) -> Path:
+ """Add the environment name from the KpopsConfig to the pipeline.yaml path.
- :param path: Path to pipeline.yaml file
- :param config: The PipelineConfig
+ :param pipeline_path: Path to pipeline.yaml file
+ :param environment: Environment name
:returns: An absolute path to the pipeline_.yaml
"""
- return path.with_stem(f"{path.stem}_{config.environment}")
+ return pipeline_path.with_stem(f"{pipeline_path.stem}_{environment}")
@staticmethod
def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None:
@@ -325,9 +297,9 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None:
For example, for a given path ./data/v1/dev/pipeline.yaml the pipeline_name would be
set to data-v1-dev. Then the sub environment variables are set:
- pipeline_name_0 = data
- pipeline_name_1 = v1
- pipeline_name_2 = dev
+ pipeline.name_0 = data
+ pipeline.name_1 = v1
+ pipeline.name_2 = dev
:param base_dir: Base directory to the pipeline files
:param path: Path to pipeline.yaml file
@@ -337,6 +309,18 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None:
msg = "The pipeline-base-dir should not equal the pipeline-path"
raise ValueError(msg)
pipeline_name = "-".join(path_without_file)
- ENV["pipeline_name"] = pipeline_name
+ ENV["pipeline.name"] = pipeline_name
for level, parent in enumerate(path_without_file):
- ENV[f"pipeline_name_{level}"] = parent
+ ENV[f"pipeline.name_{level}"] = parent
+
+ @staticmethod
+ def set_environment_name(environment: str | None) -> None:
+ """Set the environment name.
+
+ It will be used to find environment-specific pipeline definitions,
+ defaults and configs.
+
+ :param environment: Environment name
+ """
+ if environment is not None:
+ ENV["environment"] = environment
diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py
index 934924e21..1c3dbdeb7 100644
--- a/kpops/utils/dict_differ.py
+++ b/kpops/utils/dict_differ.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from collections.abc import Mapping
from dataclasses import dataclass
from difflib import Differ
from enum import Enum
@@ -78,12 +79,12 @@ def __find_changed_key(key_1: list[str] | str, key_2: str = "") -> str:
return f"{key_1}.{key_2}"
-def render_diff(d1: dict, d2: dict, ignore: set[str] | None = None) -> str | None:
+def render_diff(d1: Mapping, d2: Mapping, ignore: set[str] | None = None) -> str | None:
differences = list(diff(d1, d2, ignore=ignore))
if not differences:
return None
- d2_filtered: dict = patch(differences, d1)
+ d2_filtered: Mapping = patch(differences, d1)
return "".join(
colorize_diff(
differ.compare(
@@ -109,5 +110,5 @@ def colorize_line(line: str) -> str:
return line
-def to_yaml(data: dict) -> Sequence[str]:
+def to_yaml(data: Mapping) -> Sequence[str]:
return yaml.dump(data, sort_keys=True).splitlines(keepends=True)
diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py
index 14cc849e3..fa5f73997 100644
--- a/kpops/utils/dict_ops.py
+++ b/kpops/utils/dict_ops.py
@@ -1,8 +1,17 @@
+import re
+from collections import ChainMap as _ChainMap
from collections.abc import Mapping
-from typing import Any
+from string import Template
+from typing import Any, TypeVar
+from typing_extensions import override
-def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict:
+_V = TypeVar("_V", bound=object)
+
+
+def update_nested_pair(
+ original_dict: dict[str, _V], other_dict: Mapping[str, _V]
+) -> dict[str, _V]:
"""Nested update for 2 dictionaries.
Adds all new fields in ``other_dict`` to ``original_dict``.
@@ -25,7 +34,7 @@ def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict:
return original_dict
-def update_nested(*argv: dict) -> dict:
+def update_nested(*argv: dict[str, _V]) -> dict[str, _V]:
"""Merge multiple configuration dicts.
The dicts have multiple layers. These layers will be merged recursively.
@@ -41,13 +50,15 @@ def update_nested(*argv: dict) -> dict:
if len(argv) == 1:
return argv[0]
if len(argv) == 2:
- return update_nested_pair(argv[0], argv[1])
+ return update_nested_pair(*argv)
return update_nested(update_nested_pair(argv[0], argv[1]), *argv[2:])
def flatten_mapping(
- nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_"
-) -> dict[str, Any]:
+ nested_mapping: Mapping[str, _V],
+ prefix: str | None = None,
+ separator: str = "_",
+) -> dict[str, _V]:
"""Flattens a Mapping.
:param nested_mapping: Nested mapping that is to be flattened
@@ -66,7 +77,7 @@ def flatten_mapping(
if prefix:
key = prefix + separator + key
if isinstance(value, Mapping):
- nested_mapping = flatten_mapping(value, key)
+ nested_mapping = flatten_mapping(value, key, separator)
top = update_nested_pair(top, nested_mapping)
else:
top[key] = value
@@ -74,10 +85,11 @@ def flatten_mapping(
def generate_substitution(
- input: dict,
+ input: dict[str, _V],
prefix: str | None = None,
- existing_substitution: dict | None = None,
-) -> dict:
+ existing_substitution: dict[str, _V] | None = None,
+ separator: str | None = None,
+) -> dict[str, _V]:
"""Generate a complete substitution dict from a given dict.
Finds all attributes that belong to a model and expands them to create
@@ -88,4 +100,44 @@ def generate_substitution(
:param substitution: existing substitution to include
:returns: Substitution dict of all variables related to the model.
"""
- return update_nested(existing_substitution or {}, flatten_mapping(input, prefix))
+ existing_substitution = existing_substitution or {}
+ if separator is None:
+ return update_nested_pair(existing_substitution, flatten_mapping(input, prefix))
+ return update_nested_pair(
+ existing_substitution or {}, flatten_mapping(input, prefix, separator)
+ )
+
+
+_sentinel_dict = {}
+
+
+class ImprovedTemplate(Template):
+ """Introduces the dot as an allowed character in placeholders."""
+
+ idpattern = r"(?a:[_a-z][_.a-z0-9]*)"
+
+ @override
+ def safe_substitute(self, mapping=_sentinel_dict, /, **kws) -> str:
+ if mapping is _sentinel_dict:
+ mapping = kws
+ elif kws:
+ mapping = _ChainMap(kws, mapping)
+
+ # Helper function for .sub()
+ def convert(mo: re.Match):
+ named = mo.group("named") or mo.group("braced")
+ if named is not None:
+ try:
+ if "." not in named:
+ return str(mapping[named])
+ return str(mapping[named.replace(".", "__")])
+ except KeyError:
+ return mo.group()
+ if mo.group("escaped") is not None:
+ return self.delimiter
+ if mo.group("invalid") is not None:
+ return mo.group()
+ msg = "Unrecognized named group in pattern"
+ raise ValueError(msg, self.pattern)
+
+ return self.pattern.sub(convert, self.template)
diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py
index 7cad9422d..034787ed9 100644
--- a/kpops/utils/gen_schema.py
+++ b/kpops/utils/gen_schema.py
@@ -1,44 +1,55 @@
import inspect
+import json
import logging
from abc import ABC
from collections.abc import Sequence
from enum import Enum
from typing import Annotated, Any, Literal, Union
-from pydantic import BaseConfig, Field, schema, schema_json_of
-from pydantic.fields import FieldInfo, ModelField
-from pydantic.schema import SkipField
+from pydantic import (
+ BaseModel,
+ Field,
+ RootModel,
+ create_model,
+)
+from pydantic.fields import FieldInfo
+from pydantic.json_schema import GenerateJsonSchema, model_json_schema
+from pydantic_core.core_schema import (
+ DefinitionsSchema,
+ LiteralSchema,
+ ModelField,
+ ModelFieldsSchema,
+)
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.cli.registry import _find_classes
-from kpops.components.base_components.pipeline_component import PipelineComponent
-from kpops.utils.docstring import describe_object
+from kpops.components import (
+ PipelineComponent,
+)
+from kpops.config import KpopsConfig
class SchemaScope(str, Enum):
PIPELINE = "pipeline"
+ DEFAULTS = "defaults"
CONFIG = "config"
-original_field_schema = schema.field_schema
+class MultiComponentGenerateJsonSchema(GenerateJsonSchema):
+ ...
-# adapted from https://github.com/tiangolo/fastapi/issues/1378#issuecomment-764966955
-def field_schema(field: ModelField, **kwargs: Any) -> Any:
- if field.field_info.extra.get("hidden_from_schema"):
- msg = f"{field.name} field is being hidden"
- raise SkipField(msg)
- else:
- return original_field_schema(field, **kwargs)
-
+log = logging.getLogger("")
-schema.field_schema = field_schema
-log = logging.getLogger("")
+def print_schema(model: type[BaseModel]) -> None:
+ schema = model_json_schema(model, by_alias=True)
+ print(json.dumps(schema, indent=4, sort_keys=True))
def _is_valid_component(
- defined_component_types: set[str], component: type[PipelineComponent]
+ defined_component_types: set[str],
+ component: type[PipelineComponent],
+ allow_abstract: bool,
) -> bool:
"""Check whether a PipelineComponent subclass has a valid definition for the schema generation.
@@ -46,7 +57,9 @@ def _is_valid_component(
:param component: component type to be validated
:return: Whether component is valid for schema generation
"""
- if inspect.isabstract(component) or ABC in component.__bases__:
+ if not allow_abstract and (
+ inspect.isabstract(component) or ABC in component.__bases__
+ ):
log.warning(f"SKIPPED {component.__name__}, component is abstract.")
return False
if component.type in defined_component_types:
@@ -57,8 +70,10 @@ def _is_valid_component(
def _add_components(
- components_module: str, components: tuple[type[PipelineComponent]] | None = None
-) -> tuple[type[PipelineComponent]]:
+ components_module: str,
+ allow_abstract: bool,
+ components: tuple[type[PipelineComponent], ...] | None = None,
+) -> tuple[type[PipelineComponent], ...]:
"""Add components to a components tuple.
If an empty tuple is provided or it is not provided at all, the components
@@ -70,18 +85,39 @@ def _add_components(
:return: Extended tuple
"""
if components is None:
- components = tuple() # noqa: C408
+ components = ()
# Set of existing types, against which to check the new ones
defined_component_types = {component.type for component in components}
custom_components = (
component
for component in _find_classes(components_module, PipelineComponent)
- if _is_valid_component(defined_component_types, component)
+ if _is_valid_component(defined_component_types, component, allow_abstract)
)
components += tuple(custom_components)
return components
+def find_components(
+ components_module: str | None,
+ include_stock_components: bool,
+ include_abstract: bool = False,
+) -> tuple[type[PipelineComponent], ...]:
+ if not (include_stock_components or components_module):
+ msg = "No components are provided, no schema is generated."
+ raise RuntimeError(msg)
+ # Add stock components if enabled
+ components: tuple[type[PipelineComponent], ...] = ()
+ if include_stock_components:
+ components = _add_components("kpops.components", include_abstract)
+ # Add custom components if provided
+ if components_module:
+ components = _add_components(components_module, include_abstract, components)
+ if not components:
+ msg = "No valid components found."
+ raise RuntimeError(msg)
+ return components
+
+
def gen_pipeline_schema(
components_module: str | None = None, include_stock_components: bool = True
) -> None:
@@ -92,55 +128,48 @@ def gen_pipeline_schema(
:param include_stock_components: Whether to include the stock components,
defaults to True
"""
- if not (include_stock_components or components_module):
- log.warning("No components are provided, no schema is generated.")
- return
- # Add stock components if enabled
- components: tuple[type[PipelineComponent]] = tuple() # noqa: C408
- if include_stock_components:
- components = _add_components("kpops.components")
- # Add custom components if provided
- if components_module:
- components = _add_components(components_module, components)
- if not components:
- msg = "No valid components found."
- raise RuntimeError(msg)
- # Create a type union that will hold the union of all component types
- PipelineComponents = Union[components] # type: ignore[valid-type]
+ components = find_components(components_module, include_stock_components)
# re-assign component type as Literal to work as discriminator
for component in components:
- component.__fields__["type"] = ModelField(
- name="type",
- type_=Literal[component.type], # type: ignore[reportGeneralTypeIssues]
- required=False,
+ component.model_fields["type"] = FieldInfo(
+ annotation=Literal[component.type], # type:ignore[valid-type]
default=component.type,
- final=True,
- field_info=FieldInfo(
- title="Component type",
- description=describe_object(component.__doc__),
+ )
+ core_schema: DefinitionsSchema = component.__pydantic_core_schema__ # pyright:ignore[reportGeneralTypeIssues]
+ model_schema: ModelFieldsSchema = core_schema["schema"]["schema"] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess]
+ model_schema["fields"]["type"] = ModelField(
+ type="model-field",
+ schema=LiteralSchema(
+ type="literal",
+ expected=[component.type],
),
- model_config=BaseConfig,
- class_validators=None,
)
+ PipelineComponents = Union[components] # type: ignore[valid-type]
AnnotatedPipelineComponents = Annotated[
PipelineComponents, Field(discriminator="type")
]
- schema = schema_json_of(
- Sequence[AnnotatedPipelineComponents],
- title="KPOps pipeline schema",
- by_alias=True,
- indent=4,
- sort_keys=True,
- )
- print(schema)
+ class PipelineSchema(RootModel):
+ root: Sequence[
+ AnnotatedPipelineComponents # pyright:ignore[reportGeneralTypeIssues]
+ ]
+
+ print_schema(PipelineSchema)
+
+
+def gen_defaults_schema(
+ components_module: str | None = None, include_stock_components: bool = True
+) -> None:
+ components = find_components(components_module, include_stock_components, True)
+ components_mapping: dict[str, Any] = {
+ component.type: (component, ...) for component in components
+ }
+ DefaultsSchema = create_model("DefaultsSchema", **components_mapping)
+ print_schema(DefaultsSchema)
def gen_config_schema() -> None:
- """Generate a json schema from the model of pipeline config."""
- schema = schema_json_of(
- PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True
- )
- print(schema)
+ """Generate JSON schema from the model."""
+ print_schema(KpopsConfig)
diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py
index 2eb0fa641..10c4b9415 100644
--- a/kpops/utils/pydantic.py
+++ b/kpops/utils/pydantic.py
@@ -1,9 +1,17 @@
+import logging
+from pathlib import Path
from typing import Any
import humps
-from pydantic import BaseConfig, BaseModel
+from pydantic import BaseModel, ConfigDict, Field
+from pydantic.alias_generators import to_snake
+from pydantic.fields import FieldInfo
+from pydantic_settings import PydanticBaseSettingsSource
+from typing_extensions import TypeVar, override
+from kpops.utils.dict_ops import update_nested_pair
from kpops.utils.docstring import describe_object
+from kpops.utils.yaml import load_yaml_file
def to_camel(s: str) -> str:
@@ -21,12 +29,171 @@ def to_dot(s: str) -> str:
return s.replace("_", ".")
-class CamelCaseConfig(BaseConfig):
- alias_generator = to_camel
- allow_population_by_field_name = True
+def by_alias(model: BaseModel, field_name: str) -> str:
+ """Return field alias if exists else field name.
+ :param field_name: Name of the field to get alias of
+ :param model: Model that owns the field
+ """
+ return model.model_fields.get(field_name, Field()).alias or field_name
-class DescConfig(BaseConfig):
- @classmethod
- def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None:
+
+_V = TypeVar("_V")
+
+
+def exclude_by_value(
+ dumped_model: dict[str, _V], *excluded_values: Any
+) -> dict[str, _V]:
+ """Strip all key-value pairs with certain values.
+
+ :param dumped_model: Dumped model
+ :param excluded_values: Excluded field values
+ :return: Dumped model without excluded fields
+ """
+ return {
+ field_name: field_value
+ for field_name, field_value in dumped_model.items()
+ if field_value not in excluded_values
+ }
+
+
+def exclude_by_name(
+ dumped_model: dict[str, _V], *excluded_fields: str
+) -> dict[str, _V]:
+ """Strip all key-value pairs with certain field names.
+
+ :param dumped_model: Dumped model
+ :param excluded_fields: Excluded field names
+ :return: Dumped model without excluded fields
+ """
+ return {
+ field_name: field_value
+ for field_name, field_value in dumped_model.items()
+ if field_name not in excluded_fields
+ }
+
+
+def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, _V]:
+ """Strip all key-value pairs with default values.
+
+ :param model: Model
+ :param dumped_model: Dumped model
+ :return: Dumped model without defaults
+ """
+ default_fields = {
+ field_name: field_info.default
+ for field_name, field_info in model.model_fields.items()
+ }
+ return {
+ field_name: field_value
+ for field_name, field_value in dumped_model.items()
+ if field_value
+ not in (
+ default_fields.get(field_name),
+ default_fields.get(to_snake(field_name)),
+ )
+ }
+
+
+def issubclass_patched(
+ __cls: type, __class_or_tuple: type | tuple[type, ...] = BaseModel
+) -> bool:
+ """Pydantic breaks ``issubclass``.
+
+ ``issubclass(set[str], set) # True``
+ ``issubclass(BaseSettings, BaseModel) # True``
+ ``issubclass(set[str], BaseModel) # raises exception``
+
+ :param cls: class to check
+ :base: class(es) to check against, defaults to ``BaseModel``
+ :return: Whether 'cls' is derived from another class or is the same class.
+ """
+ try:
+ return issubclass(__cls, __class_or_tuple)
+ except TypeError as e:
+ if str(e) == "issubclass() arg 1 must be a class":
+ return False
+ raise
+
+
+class CamelCaseConfigModel(BaseModel):
+ model_config = ConfigDict(
+ alias_generator=to_camel,
+ populate_by_name=True,
+ )
+
+
+class DescConfigModel(BaseModel):
+ @staticmethod
+ def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None:
schema["description"] = describe_object(model.__doc__)
+
+ model_config = ConfigDict(json_schema_extra=json_schema_extra)
+
+
+class YamlConfigSettingsSource(PydanticBaseSettingsSource):
+ """Loads variables from a YAML file at the project's root."""
+
+ log = logging.getLogger()
+
+ config_dir = Path()
+ config_file_base_name = "config"
+ environment: str | None = None
+
+ def __init__(self, settings_cls) -> None:
+ super().__init__(settings_cls)
+ default_config = self.load_config(
+ self.config_dir / f"{self.config_file_base_name}.yaml"
+ )
+ env_config = (
+ self.load_config(
+ self.config_dir
+ / f"{self.config_file_base_name}_{self.environment}.yaml"
+ )
+ if self.environment
+ else {}
+ )
+ self.config = update_nested_pair(env_config, default_config)
+
+ @staticmethod
+ def load_config(file: Path) -> dict:
+ """Load yaml file if it exists.
+
+ :param file: Path to a ``config*.yaml``
+ :return: Dict containing the config or empty dict if file doesn't exist
+ """
+ if file.exists() and isinstance((loaded_file := load_yaml_file(file)), dict):
+ return loaded_file
+ return {}
+
+ @override
+ def get_field_value(
+ self,
+ field: FieldInfo,
+ field_name: str,
+ ) -> tuple[Any, str, bool]:
+ return self.config.get(field_name), field_name, False
+
+ @override
+ def prepare_field_value(
+ self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool
+ ) -> Any:
+ return value
+
+ @override
+ def __call__(self) -> dict[str, Any]:
+ d: dict[str, Any] = {}
+ for field_name, field in self.settings_cls.model_fields.items():
+ field_value, field_key, value_is_complex = self.get_field_value(
+ field,
+ field_name,
+ )
+ field_value = self.prepare_field_value(
+ field_name,
+ field,
+ field_value,
+ value_is_complex,
+ )
+ if field_value is not None:
+ d[field_key] = field_value
+ return d
diff --git a/kpops/utils/types.py b/kpops/utils/types.py
new file mode 100644
index 000000000..d41225e35
--- /dev/null
+++ b/kpops/utils/types.py
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+from collections.abc import Mapping, Sequence
+from typing import TypeAlias
+
+# JSON values
+JsonType: TypeAlias = (
+ Mapping[str, "JsonType"] | Sequence["JsonType"] | str | int | float | bool | None
+)
diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml.py
similarity index 70%
rename from kpops/utils/yaml_loading.py
rename to kpops/utils/yaml.py
index fb810c193..be554cf6c 100644
--- a/kpops/utils/yaml_loading.py
+++ b/kpops/utils/yaml.py
@@ -1,11 +1,14 @@
from collections.abc import Mapping
from pathlib import Path
-from string import Template
from typing import Any
import yaml
from cachetools import cached
from cachetools.keys import hashkey
+from rich.console import Console
+from rich.syntax import Syntax
+
+from kpops.utils.dict_ops import ImprovedTemplate
def generate_hashkey(
@@ -33,7 +36,12 @@ def substitute(input: str, substitution: Mapping[str, Any] | None = None) -> str
"""
if not substitution:
return input
- return Template(input).safe_substitute(**substitution)
+
+ def prepare_substitution(substitution: Mapping[str, Any]) -> dict[str, Any]:
+ """Replace dots with underscores in the substitution keys."""
+ return {k.replace(".", "__"): v for k, v in substitution.items()}
+
+ return ImprovedTemplate(input).safe_substitute(**prepare_substitution(substitution))
def substitute_nested(input: str, **kwargs) -> str:
@@ -73,3 +81,22 @@ def substitute_nested(input: str, **kwargs) -> str:
msg = "An infinite loop condition detected. Check substitution variables."
raise ValueError(msg)
return old_str
+
+
+def print_yaml(data: Mapping | str, *, substitution: dict | None = None) -> None:
+ """Print YAML object with syntax highlighting.
+
+ :param data: YAML document
+ :param substitution: Substitution dictionary, defaults to None
+ """
+ if not isinstance(data, str):
+ data = yaml.safe_dump(dict(data))
+ syntax = Syntax(
+ substitute(data, substitution),
+ "yaml",
+ background_color="default",
+ theme="ansi_dark",
+ )
+ Console(
+ width=1000 # HACK: overwrite console width to avoid truncating output
+ ).print(syntax)
diff --git a/poetry.lock b/poetry.lock
index 05591986a..b9593a550 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand.
[[package]]
name = "aiofiles"
@@ -11,6 +11,17 @@ files = [
{file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"},
]
+[[package]]
+name = "annotated-types"
+version = "0.5.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"},
+ {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"},
+]
+
[[package]]
name = "anyio"
version = "3.6.2"
@@ -816,56 +827,154 @@ virtualenv = ">=20.0.8"
[[package]]
name = "pydantic"
-version = "1.10.8"
-description = "Data validation and settings management using python type hints"
+version = "2.5.2"
+description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"},
- {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"},
- {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"},
- {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"},
- {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"},
- {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"},
- {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"},
- {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"},
- {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"},
- {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"},
- {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"},
- {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"},
- {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"},
- {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"},
- {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"},
- {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"},
- {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"},
- {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"},
- {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"},
- {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"},
- {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"},
- {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"},
- {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"},
- {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"},
- {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"},
- {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"},
- {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"},
- {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"},
- {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"},
- {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"},
- {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"},
- {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"},
- {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"},
- {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"},
- {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"},
- {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"},
+ {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"},
+ {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"},
]
[package.dependencies]
-python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""}
-typing-extensions = ">=4.2.0"
+annotated-types = ">=0.4.0"
+pydantic-core = "2.14.5"
+typing-extensions = ">=4.6.1"
[package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.14.5"
+description = ""
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"},
+ {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"},
+ {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"},
+ {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"},
+ {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"},
+ {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"},
+ {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"},
+ {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"},
+ {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"},
+ {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"},
+ {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"},
+ {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"},
+ {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"},
+ {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"},
+ {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"},
+ {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"},
+ {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"},
+ {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"},
+ {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"},
+ {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"},
+ {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"},
+ {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"},
+ {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"},
+ {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"},
+ {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"},
+ {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"},
+ {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"},
+ {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"},
+ {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"},
+ {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"},
+ {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
+[[package]]
+name = "pydantic-settings"
+version = "2.0.3"
+description = "Settings management using Pydantic"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic_settings-2.0.3-py3-none-any.whl", hash = "sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625"},
+ {file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"},
+]
+
+[package.dependencies]
+pydantic = ">=2.0.1"
+python-dotenv = ">=0.21.0"
[[package]]
name = "pygments"
@@ -1377,28 +1486,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "ruff"
-version = "0.1.3"
-description = "An extremely fast Python linter, written in Rust."
+version = "0.1.7"
+description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.1.3-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b46d43d51f7061652eeadb426a9e3caa1e0002470229ab2fc19de8a7b0766901"},
- {file = "ruff-0.1.3-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b8afeb9abd26b4029c72adc9921b8363374f4e7edb78385ffaa80278313a15f9"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca3cf365bf32e9ba7e6db3f48a4d3e2c446cd19ebee04f05338bc3910114528b"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4874c165f96c14a00590dcc727a04dca0cfd110334c24b039458c06cf78a672e"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec2dd31eed114e48ea42dbffc443e9b7221976554a504767ceaee3dd38edeb8"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dc3ec4edb3b73f21b4aa51337e16674c752f1d76a4a543af56d7d04e97769613"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e3de9ed2e39160800281848ff4670e1698037ca039bda7b9274f849258d26ce"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c595193881922cc0556a90f3af99b1c5681f0c552e7a2a189956141d8666fe8"},
- {file = "ruff-0.1.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f75e670d529aa2288cd00fc0e9b9287603d95e1536d7a7e0cafe00f75e0dd9d"},
- {file = "ruff-0.1.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76dd49f6cd945d82d9d4a9a6622c54a994689d8d7b22fa1322983389b4892e20"},
- {file = "ruff-0.1.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:918b454bc4f8874a616f0d725590277c42949431ceb303950e87fef7a7d94cb3"},
- {file = "ruff-0.1.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8859605e729cd5e53aa38275568dbbdb4fe882d2ea2714c5453b678dca83784"},
- {file = "ruff-0.1.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0b6c55f5ef8d9dd05b230bb6ab80bc4381ecb60ae56db0330f660ea240cb0d4a"},
- {file = "ruff-0.1.3-py3-none-win32.whl", hash = "sha256:3e7afcbdcfbe3399c34e0f6370c30f6e529193c731b885316c5a09c9e4317eef"},
- {file = "ruff-0.1.3-py3-none-win_amd64.whl", hash = "sha256:7a18df6638cec4a5bd75350639b2bb2a2366e01222825562c7346674bdceb7ea"},
- {file = "ruff-0.1.3-py3-none-win_arm64.whl", hash = "sha256:12fd53696c83a194a2db7f9a46337ce06445fb9aa7d25ea6f293cf75b21aca9f"},
- {file = "ruff-0.1.3.tar.gz", hash = "sha256:3ba6145369a151401d5db79f0a47d50e470384d0d89d0d6f7fab0b589ad07c34"},
+ {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7f80496854fdc65b6659c271d2c26e90d4d401e6a4a31908e7e334fab4645aac"},
+ {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1ea109bdb23c2a4413f397ebd8ac32cb498bee234d4191ae1a310af760e5d287"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0c2de9dd9daf5e07624c24add25c3a490dbf74b0e9bca4145c632457b3b42a"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:69a4bed13bc1d5dabf3902522b5a2aadfebe28226c6269694283c3b0cecb45fd"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de02ca331f2143195a712983a57137c5ec0f10acc4aa81f7c1f86519e52b92a1"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45b38c3f8788a65e6a2cab02e0f7adfa88872696839d9882c13b7e2f35d64c5f"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c64cb67b2025b1ac6d58e5ffca8f7b3f7fd921f35e78198411237e4f0db8e73"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dcc6bb2f4df59cb5b4b40ff14be7d57012179d69c6565c1da0d1f013d29951b"},
+ {file = "ruff-0.1.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2bb4bb6bbe921f6b4f5b6fdd8d8468c940731cb9406f274ae8c5ed7a78c478"},
+ {file = "ruff-0.1.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:276a89bcb149b3d8c1b11d91aa81898fe698900ed553a08129b38d9d6570e717"},
+ {file = "ruff-0.1.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:90c958fe950735041f1c80d21b42184f1072cc3975d05e736e8d66fc377119ea"},
+ {file = "ruff-0.1.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b05e3b123f93bb4146a761b7a7d57af8cb7384ccb2502d29d736eaade0db519"},
+ {file = "ruff-0.1.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:290ecab680dce94affebefe0bbca2322a6277e83d4f29234627e0f8f6b4fa9ce"},
+ {file = "ruff-0.1.7-py3-none-win32.whl", hash = "sha256:416dfd0bd45d1a2baa3b1b07b1b9758e7d993c256d3e51dc6e03a5e7901c7d80"},
+ {file = "ruff-0.1.7-py3-none-win_amd64.whl", hash = "sha256:4af95fd1d3b001fc41325064336db36e3d27d2004cdb6d21fd617d45a172dd96"},
+ {file = "ruff-0.1.7-py3-none-win_arm64.whl", hash = "sha256:0683b7bfbb95e6df3c7c04fe9d78f631f8e8ba4868dfc932d43d690698057e2e"},
+ {file = "ruff-0.1.7.tar.gz", hash = "sha256:dffd699d07abf54833e5f6cc50b85a6ff043715da8788c4a79bcd4ab4734d306"},
]
[[package]]
@@ -1613,13 +1722,13 @@ typer = ">=0.4.0,<=0.7.0"
[[package]]
name = "typing-extensions"
-version = "4.4.0"
+version = "4.7.1"
description = "Backported and Experimental Type Hints for Python 3.7+"
optional = false
python-versions = ">=3.7"
files = [
- {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
- {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
+ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
]
[[package]]
@@ -1761,4 +1870,4 @@ watchmedo = ["PyYAML (>=3.10)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "cfa6447fe58b9cffa8247c4d2e09fa988bbb7484a2a728e432a63c05872e3bd8"
+content-hash = "2dac8180567353aea454a8d6f9dc5f6fcddce9d6c6ec9026c23fe31627385635"
diff --git a/pyproject.toml b/pyproject.toml
index 8e749efe0..2f929d944 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -28,7 +28,8 @@ kpops = "kpops.cli.main:app"
[tool.poetry.dependencies]
python = "^3.10"
-pydantic = { extras = ["dotenv"], version = "^1.10.8" }
+pydantic = { extras = ["dotenv"], version = "^2.5.2" }
+pydantic-settings = "^2.0.3"
rich = "^12.4.4"
PyYAML = "^6.0"
typer = { extras = ["all"], version = "^0.6.1" }
@@ -45,7 +46,7 @@ pytest-mock = "^3.10.0"
pytest-timeout = "^2.1.0"
snapshottest = "^0.6.0"
pre-commit = "^2.19.0"
-ruff = "^0.1.3"
+ruff = "^0.1.7"
typer-cli = "^0.0.13"
pyright = "^1.1.314"
pytest-rerunfailures = "^11.1.2"
@@ -142,7 +143,6 @@ select = [
"ICN", # flake8-import-conventions
"INP", # flake8-no-pep420
"PIE", # flake8-pie
- "PT", # flake8-pytest-style
"Q", # flake8-quotes
"RSE", # flake8-raise
"RET", # flake8-return
diff --git a/tests/cli/resources/config.yaml b/tests/cli/resources/config.yaml
new file mode 100644
index 000000000..046c98d2a
--- /dev/null
+++ b/tests/cli/resources/config.yaml
@@ -0,0 +1,2 @@
+kafka_brokers: http://127.0.0.1:9092
+components_module: tests.cli.test_schema_generation
diff --git a/tests/cli/resources/module.py b/tests/cli/resources/custom_module/__init__.py
similarity index 100%
rename from tests/cli/resources/module.py
rename to tests/cli/resources/custom_module/__init__.py
diff --git a/tests/cli/resources/empty_module.py b/tests/cli/resources/empty_module/__init__.py
similarity index 100%
rename from tests/cli/resources/empty_module.py
rename to tests/cli/resources/empty_module/__init__.py
diff --git a/tests/cli/resources/empty_module/config.yaml b/tests/cli/resources/empty_module/config.yaml
new file mode 100644
index 000000000..735b3904a
--- /dev/null
+++ b/tests/cli/resources/empty_module/config.yaml
@@ -0,0 +1,2 @@
+kafka_brokers: http://127.0.0.1:9092
+components_module: tests.cli.resources.empty_module
diff --git a/tests/cli/resources/no_module/config.yaml b/tests/cli/resources/no_module/config.yaml
new file mode 100644
index 000000000..79261856b
--- /dev/null
+++ b/tests/cli/resources/no_module/config.yaml
@@ -0,0 +1 @@
+kafka_brokers: http://127.0.0.1:9092
diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py
index 2dd92b512..f23e77422 100644
--- a/tests/cli/snapshots/snap_test_schema_generation.py
+++ b/tests/cli/snapshots/snap_test_schema_generation.py
@@ -8,16 +8,21 @@
snapshots = Snapshot()
snapshots['TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation'] = '''{
- "definitions": {
+ "$defs": {
"EmptyPipelineComponent": {
+ "additionalProperties": true,
"description": "",
"properties": {
"from": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/FromSection"
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -27,31 +32,31 @@
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "empty-pipeline-component",
- "enum": [
- "empty-pipeline-component"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "empty-pipeline-component",
+ "title": "Type"
}
},
"required": [
- "name"
+ "name",
+ "type"
],
"title": "EmptyPipelineComponent",
"type": "object"
@@ -62,7 +67,7 @@
"properties": {
"components": {
"additionalProperties": {
- "$ref": "#/definitions/FromTopic"
+ "$ref": "#/$defs/FromTopic"
},
"default": {},
"description": "Components to read from",
@@ -71,7 +76,7 @@
},
"topics": {
"additionalProperties": {
- "$ref": "#/definitions/FromTopic"
+ "$ref": "#/$defs/FromTopic"
},
"default": {},
"description": "Input topics",
@@ -87,16 +92,28 @@
"description": "Input topic.",
"properties": {
"role": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`",
- "title": "Role",
- "type": "string"
+ "title": "Role"
},
"type": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/InputTopicTypes"
+ },
{
- "$ref": "#/definitions/InputTopicTypes"
+ "type": "null"
}
],
+ "default": null,
"description": "Topic type"
}
},
@@ -122,14 +139,19 @@
"type": "string"
},
"SubPipelineComponent": {
+ "additionalProperties": true,
"description": "",
"properties": {
"from": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/FromSection"
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -139,44 +161,49 @@
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "sub-pipeline-component",
- "enum": [
- "sub-pipeline-component"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "sub-pipeline-component",
+ "title": "Type"
}
},
"required": [
- "name"
+ "name",
+ "type"
],
"title": "SubPipelineComponent",
"type": "object"
},
"SubPipelineComponentCorrect": {
+ "additionalProperties": true,
"description": "",
"properties": {
"from": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/FromSection"
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -186,36 +213,37 @@
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "sub-pipeline-component-correct",
- "enum": [
- "sub-pipeline-component-correct"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "sub-pipeline-component-correct",
+ "title": "Type"
}
},
"required": [
- "name"
+ "name",
+ "type"
],
"title": "SubPipelineComponentCorrect",
"type": "object"
},
"SubPipelineComponentCorrectDocstr": {
+ "additionalProperties": true,
"description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma",
"properties": {
"example_attr": {
@@ -224,11 +252,15 @@
"type": "string"
},
"from": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/FromSection"
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -238,46 +270,50 @@
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "sub-pipeline-component-correct-docstr",
- "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma",
- "enum": [
- "sub-pipeline-component-correct-docstr"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "sub-pipeline-component-correct-docstr",
+ "title": "Type"
}
},
"required": [
"name",
- "example_attr"
+ "example_attr",
+ "type"
],
"title": "SubPipelineComponentCorrectDocstr",
"type": "object"
},
"SubPipelineComponentNoSchemaTypeNoType": {
+ "additionalProperties": true,
"description": "",
"properties": {
"from": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/FromSection"
+ "$ref": "#/$defs/FromSection"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic(s) and/or components from which the component will read input",
"title": "From"
},
@@ -287,36 +323,37 @@
"type": "string"
},
"prefix": {
- "default": "${pipeline_name}-",
+ "default": "${pipeline.name}-",
"description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.",
"title": "Prefix",
"type": "string"
},
"to": {
- "allOf": [
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ToSection"
+ },
{
- "$ref": "#/definitions/ToSection"
+ "type": "null"
}
],
- "description": "Topic(s) into which the component will write output",
- "title": "To"
+ "default": null,
+ "description": "Topic(s) into which the component will write output"
},
"type": {
- "default": "sub-pipeline-component-no-schema-type-no-type",
- "enum": [
- "sub-pipeline-component-no-schema-type-no-type"
- ],
- "title": "Component type",
- "type": "string"
+ "const": "sub-pipeline-component-no-schema-type-no-type",
+ "title": "Type"
}
},
"required": [
- "name"
+ "name",
+ "type"
],
"title": "SubPipelineComponentNoSchemaTypeNoType",
"type": "object"
},
"ToSection": {
+ "additionalProperties": false,
"description": "Holds multiple output topics.",
"properties": {
"models": {
@@ -330,7 +367,7 @@
},
"topics": {
"additionalProperties": {
- "$ref": "#/definitions/TopicConfig"
+ "$ref": "#/$defs/TopicConfig"
},
"default": {},
"description": "Output topics",
@@ -362,38 +399,82 @@
"type": "object"
},
"key_schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Key schema class name",
- "title": "Key schema",
- "type": "string"
+ "title": "Key schema"
},
"partitions_count": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Number of partitions into which the topic is divided",
- "title": "Partitions count",
- "type": "integer"
+ "title": "Partitions count"
},
"replication_factor": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Replication factor of the topic",
- "title": "Replication factor",
- "type": "integer"
+ "title": "Replication factor"
},
"role": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`",
- "title": "Role",
- "type": "string"
+ "title": "Role"
},
"type": {
- "allOf": [
+ "anyOf": [
{
- "$ref": "#/definitions/OutputTopicTypes"
+ "$ref": "#/$defs/OutputTopicTypes"
+ },
+ {
+ "type": "null"
}
],
+ "default": null,
"description": "Topic type",
"title": "Topic type"
},
"value_schema": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
"description": "Value schema class name",
- "title": "Value schema",
- "type": "string"
+ "title": "Value schema"
}
},
"title": "TopicConfig",
@@ -403,33 +484,33 @@
"items": {
"discriminator": {
"mapping": {
- "empty-pipeline-component": "#/definitions/EmptyPipelineComponent",
- "sub-pipeline-component": "#/definitions/SubPipelineComponent",
- "sub-pipeline-component-correct": "#/definitions/SubPipelineComponentCorrect",
- "sub-pipeline-component-correct-docstr": "#/definitions/SubPipelineComponentCorrectDocstr",
- "sub-pipeline-component-no-schema-type-no-type": "#/definitions/SubPipelineComponentNoSchemaTypeNoType"
+ "empty-pipeline-component": "#/$defs/EmptyPipelineComponent",
+ "sub-pipeline-component": "#/$defs/SubPipelineComponent",
+ "sub-pipeline-component-correct": "#/$defs/SubPipelineComponentCorrect",
+ "sub-pipeline-component-correct-docstr": "#/$defs/SubPipelineComponentCorrectDocstr",
+ "sub-pipeline-component-no-schema-type-no-type": "#/$defs/SubPipelineComponentNoSchemaTypeNoType"
},
"propertyName": "type"
},
"oneOf": [
{
- "$ref": "#/definitions/EmptyPipelineComponent"
+ "$ref": "#/$defs/EmptyPipelineComponent"
},
{
- "$ref": "#/definitions/SubPipelineComponent"
+ "$ref": "#/$defs/SubPipelineComponent"
},
{
- "$ref": "#/definitions/SubPipelineComponentCorrect"
+ "$ref": "#/$defs/SubPipelineComponentCorrect"
},
{
- "$ref": "#/definitions/SubPipelineComponentCorrectDocstr"
+ "$ref": "#/$defs/SubPipelineComponentCorrectDocstr"
},
{
- "$ref": "#/definitions/SubPipelineComponentNoSchemaTypeNoType"
+ "$ref": "#/$defs/SubPipelineComponentNoSchemaTypeNoType"
}
]
},
- "title": "KPOps pipeline schema",
+ "title": "PipelineSchema",
"type": "array"
}
'''
diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py
index 509c5e0cc..7732ed61f 100644
--- a/tests/cli/test_handlers.py
+++ b/tests/cli/test_handlers.py
@@ -3,28 +3,27 @@
from pytest_mock import MockerFixture
from kpops.cli.main import setup_handlers
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.kafka_connect.kafka_connect_handler import (
KafkaConnectHandler,
)
from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler
from kpops.component_handlers.topic.handler import TopicHandler
-from tests.cli.resources.module import CustomSchemaProvider
+from kpops.config import KpopsConfig, SchemaRegistryConfig
+from tests.cli.resources.custom_module import CustomSchemaProvider
MODULE = CustomSchemaProvider.__module__
def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture):
- config = PipelineConfig(
+ config = KpopsConfig(
defaults_path=Path("fake"),
- environment="development",
- kafka_rest_host="https://testhost:8082",
- schema_registry_url=None,
+ kafka_brokers="broker:9092",
+ components_module=MODULE,
)
connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler")
- connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config)
- connector_handler_mock.from_pipeline_config.return_value = connector_handler
+ connector_handler = KafkaConnectHandler.from_kpops_config(config)
+ connector_handler_mock.from_kpops_config.return_value = connector_handler
topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler")
wrapper = mocker.patch("kpops.cli.main.ProxyWrapper")
@@ -37,9 +36,9 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture):
topic_handler=topic_handler,
)
- actual_handlers = setup_handlers(MODULE, config)
+ actual_handlers = setup_handlers(config)
- connector_handler_mock.from_pipeline_config.assert_called_once_with(config)
+ connector_handler_mock.from_kpops_config.assert_called_once_with(config)
assert actual_handlers.schema_handler == expected.schema_handler
assert actual_handlers.connector_handler == expected.connector_handler
@@ -51,19 +50,18 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture):
def test_set_up_handlers_with_schema_handler(mocker: MockerFixture):
- config = PipelineConfig(
+ config = KpopsConfig(
defaults_path=Path("fake"),
- environment="development",
- kafka_rest_host="https://testhost:8082",
- schema_registry_url="https://testhost:8081",
+ schema_registry=SchemaRegistryConfig(enabled=True),
+ kafka_brokers="broker:9092",
)
schema_handler_mock = mocker.patch("kpops.cli.main.SchemaHandler")
- schema_handler = SchemaHandler.load_schema_handler(MODULE, config)
+ schema_handler = SchemaHandler.load_schema_handler(config)
schema_handler_mock.load_schema_handler.return_value = schema_handler
connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler")
- connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config)
- connector_handler_mock.from_pipeline_config.return_value = connector_handler
+ connector_handler = KafkaConnectHandler.from_kpops_config(config)
+ connector_handler_mock.from_kpops_config.return_value = connector_handler
topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler")
wrapper = mocker.patch("kpops.cli.main.ProxyWrapper")
@@ -76,11 +74,11 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture):
topic_handler=topic_handler,
)
- actual_handlers = setup_handlers(MODULE, config)
+ actual_handlers = setup_handlers(config)
- schema_handler_mock.load_schema_handler.assert_called_once_with(MODULE, config)
+ schema_handler_mock.load_schema_handler.assert_called_once_with(config)
- connector_handler_mock.from_pipeline_config.assert_called_once_with(config)
+ connector_handler_mock.from_kpops_config.assert_called_once_with(config)
assert actual_handlers.schema_handler == expected.schema_handler
assert actual_handlers.connector_handler == expected.connector_handler
diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py
new file mode 100644
index 000000000..5c9655ca3
--- /dev/null
+++ b/tests/cli/test_kpops_config.py
@@ -0,0 +1,64 @@
+from pathlib import Path
+
+import pytest
+from pydantic import AnyHttpUrl, TypeAdapter, ValidationError
+
+from kpops.config import (
+ KafkaConnectConfig,
+ KafkaRestConfig,
+ KpopsConfig,
+ SchemaRegistryConfig,
+)
+
+
+def test_kpops_config_with_default_values():
+ default_config = KpopsConfig(kafka_brokers="http://broker:9092")
+
+ assert default_config.defaults_path == Path()
+ assert default_config.defaults_filename_prefix == "defaults"
+ assert (
+ default_config.topic_name_config.default_output_topic_name
+ == "${pipeline.name}-${component.name}"
+ )
+ assert (
+ default_config.topic_name_config.default_error_topic_name
+ == "${pipeline.name}-${component.name}-error"
+ )
+ assert default_config.schema_registry.enabled is False
+ assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081")
+ assert default_config.kafka_rest.url == AnyHttpUrl("http://localhost:8082")
+ assert default_config.kafka_connect.url == AnyHttpUrl("http://localhost:8083")
+ assert default_config.timeout == 300
+ assert default_config.create_namespace is False
+ assert default_config.helm_config.context is None
+ assert default_config.helm_config.debug is False
+ assert default_config.helm_config.api_version is None
+ assert default_config.helm_diff_config.ignore == set()
+ assert default_config.retain_clean_jobs is False
+
+
+def test_kpops_config_with_different_invalid_urls():
+ with pytest.raises(ValidationError):
+ KpopsConfig(
+ kafka_brokers="http://broker:9092",
+ kafka_connect=KafkaConnectConfig(
+ url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host")
+ ),
+ )
+
+ with pytest.raises(ValidationError):
+ KpopsConfig(
+ kafka_brokers="http://broker:9092",
+ kafka_rest=KafkaRestConfig(
+ url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host")
+ ),
+ )
+
+ with pytest.raises(ValidationError):
+ KpopsConfig(
+ kafka_brokers="http://broker:9092",
+ schema_registry=SchemaRegistryConfig(
+ enabled=True,
+ url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host"),
+ ),
+ )
diff --git a/tests/cli/test_pipeline_steps.py b/tests/cli/test_pipeline_steps.py
index a09d7b064..f9a345ae7 100644
--- a/tests/cli/test_pipeline_steps.py
+++ b/tests/cli/test_pipeline_steps.py
@@ -6,7 +6,8 @@
from pytest_mock import MockerFixture
from kpops.cli.main import FilterType, get_steps_to_apply
-from kpops.pipeline_generator.pipeline import Pipeline
+from kpops.components import PipelineComponent
+from kpops.pipeline import Pipeline
PREFIX = "example-prefix-"
@@ -25,17 +26,11 @@ class TestComponent:
@pytest.fixture(autouse=True)
def pipeline() -> Pipeline:
- class TestPipeline:
- components = [
- test_component_1,
- test_component_2,
- test_component_3,
- ]
-
- def __iter__(self):
- return iter(self.components)
-
- return cast(Pipeline, TestPipeline())
+ pipeline = Pipeline()
+ pipeline.add(cast(PipelineComponent, test_component_1))
+ pipeline.add(cast(PipelineComponent, test_component_2))
+ pipeline.add(cast(PipelineComponent, test_component_3))
+ return pipeline
@pytest.fixture(autouse=True)
diff --git a/tests/cli/test_registry.py b/tests/cli/test_registry.py
index 0e6722eeb..473c340c4 100644
--- a/tests/cli/test_registry.py
+++ b/tests/cli/test_registry.py
@@ -5,7 +5,7 @@
from kpops.cli.registry import ClassNotFoundError, Registry, _find_classes, find_class
from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider
from kpops.components.base_components.pipeline_component import PipelineComponent
-from tests.cli.resources.module import CustomSchemaProvider
+from tests.cli.resources.custom_module import CustomSchemaProvider
class SubComponent(PipelineComponent):
@@ -36,8 +36,9 @@ def test_find_builtin_classes():
class_.__name__
for class_ in _find_classes("kpops.components", PipelineComponent)
]
- assert len(components) == 8
+ assert len(components) == 10
assert components == [
+ "HelmApp",
"KafkaApp",
"KafkaConnector",
"KafkaSinkConnector",
@@ -46,6 +47,7 @@ def test_find_builtin_classes():
"PipelineComponent",
"ProducerApp",
"StreamsApp",
+ "StreamsBootstrap",
]
diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py
index cbb855d14..ddc4977dd 100644
--- a/tests/cli/test_schema_generation.py
+++ b/tests/cli/test_schema_generation.py
@@ -1,18 +1,18 @@
from __future__ import annotations
-import logging
+import json
from abc import ABC, abstractmethod
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
-from pydantic import Field
+from pydantic import ConfigDict, Field
from typer.testing import CliRunner
from kpops.cli.main import app
+from kpops.cli.registry import Registry
from kpops.components.base_components import PipelineComponent
from kpops.utils.docstring import describe_attr
-from tests.cli.resources import empty_module
if TYPE_CHECKING:
from snapshottest.module import SnapshotTest
@@ -25,8 +25,7 @@
# type is inherited from PipelineComponent
class EmptyPipelineComponent(PipelineComponent):
- class Config:
- anystr_strip_whitespace = True
+ model_config = ConfigDict(str_strip_whitespace=True)
# abstract component inheriting from ABC should be excluded
@@ -82,31 +81,31 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent):
)
-MODULE = EmptyPipelineComponent.__module__
-
-
@pytest.mark.filterwarnings(
"ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate"
)
class TestGenSchema:
- def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture):
- result = runner.invoke(
- app,
- [
- "schema",
- "pipeline",
- "--no-include-stock-components",
- ],
- catch_exceptions=False,
- )
- assert caplog.record_tuples == [
- (
- "root",
- logging.WARNING,
- "No components are provided, no schema is generated.",
+ @pytest.fixture
+ def stock_components(self) -> list[type[PipelineComponent]]:
+ registry = Registry()
+ registry.find_components("kpops.components")
+ return list(registry._classes.values())
+
+ def test_gen_pipeline_schema_no_modules(self):
+ with pytest.raises(
+ RuntimeError, match="^No components are provided, no schema is generated.$"
+ ):
+ runner.invoke(
+ app,
+ [
+ "schema",
+ "pipeline",
+ "--no-include-stock-components",
+ "--config",
+ str(RESOURCE_PATH / "no_module"),
+ ],
+ catch_exceptions=False,
)
- ]
- assert result.exit_code == 0
def test_gen_pipeline_schema_no_components(self):
with pytest.raises(RuntimeError, match="^No valid components found.$"):
@@ -116,7 +115,8 @@ def test_gen_pipeline_schema_no_components(self):
"schema",
"pipeline",
"--no-include-stock-components",
- empty_module.__name__,
+ "--config",
+ str(RESOURCE_PATH / "empty_module"),
],
catch_exceptions=False,
)
@@ -131,7 +131,7 @@ def test_gen_pipeline_schema_only_stock_module(self):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
assert result.stdout
result = runner.invoke(
@@ -144,24 +144,32 @@ def test_gen_pipeline_schema_only_stock_module(self):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
assert result.stdout
- def test_gen_pipeline_schema_only_custom_module(self, snapshot: SnapshotTest):
+ def test_gen_pipeline_schema_only_custom_module(
+ self, snapshot: SnapshotTest, stock_components: list[type[PipelineComponent]]
+ ):
result = runner.invoke(
app,
[
"schema",
"pipeline",
- MODULE,
"--no-include-stock-components",
+ "--config",
+ str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
snapshot.assert_match(result.stdout, "test-schema-generation")
+ schema = json.loads(result.stdout)
+ assert schema["title"] == "PipelineSchema"
+ assert set(schema["items"]["discriminator"]["mapping"].keys()).isdisjoint(
+ {component.type for component in stock_components}
+ )
def test_gen_pipeline_schema_stock_and_custom_module(self):
result = runner.invoke(
@@ -169,20 +177,40 @@ def test_gen_pipeline_schema_stock_and_custom_module(self):
[
"schema",
"pipeline",
- MODULE,
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
+ assert result.stdout
+
+ def test_gen_defaults_schema(self, stock_components: list[type[PipelineComponent]]):
+ result = runner.invoke(
+ app,
+ [
+ "schema",
+ "defaults",
+ "--config",
+ str(RESOURCE_PATH / "no_module"),
+ ],
+ catch_exceptions=False,
+ )
+
+ assert result.exit_code == 0, result.stdout
assert result.stdout
+ schema = json.loads(result.stdout)
+ assert schema["title"] == "DefaultsSchema"
+ assert schema["required"] == [component.type for component in stock_components]
def test_gen_config_schema(self):
result = runner.invoke(
app,
- ["schema", "config"],
+ [
+ "schema",
+ "config",
+ ],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
assert result.stdout
diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py
index f0a1b1b1e..99a228cfe 100644
--- a/tests/compiler/test_pipeline_name.py
+++ b/tests/compiler/test_pipeline_name.py
@@ -2,8 +2,7 @@
import pytest
-from kpops.cli.pipeline_config import PipelineConfig
-from kpops.pipeline_generator.pipeline import Pipeline
+from kpops.pipeline import PipelineGenerator
from kpops.utils.environment import ENV
DEFAULTS_PATH = Path(__file__).parent / "resources"
@@ -12,54 +11,56 @@
def test_should_set_pipeline_name_with_default_base_dir():
- Pipeline.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH)
+ PipelineGenerator.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH)
- assert ENV["pipeline_name"] == "some-random-path-for-testing"
- assert ENV["pipeline_name_0"] == "some"
- assert ENV["pipeline_name_1"] == "random"
- assert ENV["pipeline_name_2"] == "path"
- assert ENV["pipeline_name_3"] == "for"
- assert ENV["pipeline_name_4"] == "testing"
+ assert ENV["pipeline.name"] == "some-random-path-for-testing"
+ assert ENV["pipeline.name_0"] == "some"
+ assert ENV["pipeline.name_1"] == "random"
+ assert ENV["pipeline.name_2"] == "path"
+ assert ENV["pipeline.name_3"] == "for"
+ assert ENV["pipeline.name_4"] == "testing"
def test_should_set_pipeline_name_with_specific_relative_base_dir():
- Pipeline.set_pipeline_name_env_vars(Path("./some/random/path"), PIPELINE_PATH)
+ PipelineGenerator.set_pipeline_name_env_vars(
+ Path("./some/random/path"), PIPELINE_PATH
+ )
- assert ENV["pipeline_name"] == "for-testing"
- assert ENV["pipeline_name_0"] == "for"
- assert ENV["pipeline_name_1"] == "testing"
+ assert ENV["pipeline.name"] == "for-testing"
+ assert ENV["pipeline.name_0"] == "for"
+ assert ENV["pipeline.name_1"] == "testing"
def test_should_set_pipeline_name_with_specific_absolute_base_dir():
- Pipeline.set_pipeline_name_env_vars(Path("some/random/path"), PIPELINE_PATH)
+ PipelineGenerator.set_pipeline_name_env_vars(
+ Path("some/random/path"), PIPELINE_PATH
+ )
- assert ENV["pipeline_name"] == "for-testing"
- assert ENV["pipeline_name_0"] == "for"
- assert ENV["pipeline_name_1"] == "testing"
+ assert ENV["pipeline.name"] == "for-testing"
+ assert ENV["pipeline.name_0"] == "for"
+ assert ENV["pipeline.name_1"] == "testing"
def test_should_set_pipeline_name_with_absolute_base_dir():
- Pipeline.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH)
+ PipelineGenerator.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH)
- assert ENV["pipeline_name"] == "some-random-path-for-testing"
- assert ENV["pipeline_name_0"] == "some"
- assert ENV["pipeline_name_1"] == "random"
- assert ENV["pipeline_name_2"] == "path"
- assert ENV["pipeline_name_3"] == "for"
- assert ENV["pipeline_name_4"] == "testing"
+ assert ENV["pipeline.name"] == "some-random-path-for-testing"
+ assert ENV["pipeline.name_0"] == "some"
+ assert ENV["pipeline.name_1"] == "random"
+ assert ENV["pipeline.name_2"] == "path"
+ assert ENV["pipeline.name_3"] == "for"
+ assert ENV["pipeline.name_4"] == "testing"
def test_should_not_set_pipeline_name_with_the_same_base_dir():
with pytest.raises(
ValueError, match="The pipeline-base-dir should not equal the pipeline-path"
):
- Pipeline.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH)
+ PipelineGenerator.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH)
def test_pipeline_file_name_environment():
- config = PipelineConfig(
- defaults_path=DEFAULTS_PATH,
- environment="some_environment",
+ environment = PipelineGenerator.pipeline_filename_environment(
+ PIPELINE_PATH, "some_environment"
)
- environment = Pipeline.pipeline_filename_environment(PIPELINE_PATH, config)
assert environment.name == "pipeline_some_environment.yaml"
diff --git a/tests/compiler/test_yaml_loading.py b/tests/compiler/test_yaml_loading.py
index c4bdcc3cf..47db51bfd 100644
--- a/tests/compiler/test_yaml_loading.py
+++ b/tests/compiler/test_yaml_loading.py
@@ -4,7 +4,7 @@
import pytest
import yaml
-from kpops.utils.yaml_loading import load_yaml_file
+from kpops.utils.yaml import load_yaml_file
RESOURCE_PATH = Path(__file__).parent / "resources"
diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py
index bad4f2aa8..584becd32 100644
--- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py
+++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py
@@ -1,4 +1,5 @@
from logging import Logger
+from pathlib import Path
from unittest.mock import MagicMock
import pytest
@@ -7,6 +8,7 @@
from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler
from kpops.component_handlers.helm_wrapper.model import HelmTemplate
+from kpops.component_handlers.kubernetes.model import KubernetesManifest
log = Logger("TestLogger")
@@ -34,7 +36,9 @@ def test_should_print_helm_diff_when_release_is_new(
helm_mock.get_manifest.return_value = iter(())
mock_load_manifest = mocker.patch(
"kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest",
- return_value=iter([HelmTemplate("path.yaml", {"a": 1})]),
+ return_value=iter(
+ [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))]
+ ),
)
log.addHandler(caplog.handler)
@@ -55,11 +59,13 @@ def test_should_print_helm_diff_when_release_exists(
caplog: LogCaptureFixture,
):
helm_mock.get_manifest.return_value = iter(
- [HelmTemplate("path.yaml", {"a": 1})]
+ [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))]
)
mock_load_manifest = mocker.patch(
"kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest",
- return_value=iter([HelmTemplate("path.yaml", {"a": 1})]),
+ return_value=iter(
+ [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))]
+ ),
)
log.addHandler(caplog.handler)
diff --git a/tests/component_handlers/helm_wrapper/test_helm_diff.py b/tests/component_handlers/helm_wrapper/test_helm_diff.py
index 15a58a023..edb64363e 100644
--- a/tests/component_handlers/helm_wrapper/test_helm_diff.py
+++ b/tests/component_handlers/helm_wrapper/test_helm_diff.py
@@ -1,11 +1,14 @@
+from pathlib import Path
+
from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff
from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate
+from kpops.component_handlers.kubernetes.model import KubernetesManifest
from kpops.utils.dict_differ import Change
def test_diff():
helm_diff = HelmDiff(HelmDiffConfig())
- templates = [HelmTemplate("a.yaml", {})]
+ templates = [HelmTemplate(Path("a.yaml"), KubernetesManifest())]
assert list(helm_diff.calculate_changes(templates, templates)) == [
Change(
old_value={},
@@ -17,12 +20,12 @@ def test_diff():
assert list(
helm_diff.calculate_changes(
[
- HelmTemplate("a.yaml", {"a": 1}),
- HelmTemplate("b.yaml", {"b": 1}),
+ HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1})),
+ HelmTemplate(Path("b.yaml"), KubernetesManifest({"b": 1})),
],
[
- HelmTemplate("a.yaml", {"a": 2}),
- HelmTemplate("c.yaml", {"c": 1}),
+ HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 2})),
+ HelmTemplate(Path("c.yaml"), KubernetesManifest({"c": 1})),
],
)
) == [
@@ -42,7 +45,9 @@ def test_diff():
# test no current release
assert list(
- helm_diff.calculate_changes((), [HelmTemplate("a.yaml", {"a": 1})])
+ helm_diff.calculate_changes(
+ (), [HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1}))]
+ )
) == [
Change(
old_value={},
diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py
index ce6fae709..cdc7e9d9d 100644
--- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py
+++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py
@@ -13,6 +13,7 @@
HelmConfig,
HelmTemplateFlags,
HelmUpgradeInstallFlags,
+ KubernetesManifest,
ParseError,
RepoAuthFlags,
Version,
@@ -30,8 +31,10 @@ def temp_file_mock(self, mocker: MockerFixture) -> MagicMock:
return temp_file_mock
@pytest.fixture()
- def run_command(self, mocker: MockerFixture) -> MagicMock:
- return mocker.patch.object(Helm, "_Helm__execute")
+ def mock_execute(self, mocker: MockerFixture) -> MagicMock:
+ mock_execute = mocker.patch.object(Helm, "_Helm__execute")
+ mock_execute.return_value = ""
+ return mock_execute
@pytest.fixture()
def log_warning_mock(self, mocker: MockerFixture) -> MagicMock:
@@ -43,12 +46,14 @@ def mock_get_version(self, mocker: MockerFixture) -> MagicMock:
mock_get_version.return_value = Version(major=3, minor=12, patch=0)
return mock_get_version
+ @pytest.fixture()
+ def helm(self, mock_get_version: MagicMock) -> Helm:
+ return Helm(helm_config=HelmConfig())
+
def test_should_call_run_command_method_when_helm_install_with_defaults(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
-
- helm_wrapper.upgrade_install(
+ helm.upgrade_install(
release_name="test-release",
chart=f"bakdata-streams-bootstrap/{AppType.STREAMS_APP.value}",
dry_run=False,
@@ -56,7 +61,7 @@ def test_should_call_run_command_method_when_helm_install_with_defaults(
values={"commandLine": "test"},
flags=HelmUpgradeInstallFlags(),
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"upgrade",
@@ -74,7 +79,7 @@ def test_should_call_run_command_method_when_helm_install_with_defaults(
)
def test_should_include_configured_tls_parameters_on_add_when_version_is_old(
- self, run_command: MagicMock, mocker: MockerFixture
+ self, mock_execute: MagicMock, mocker: MockerFixture
):
mock_get_version = mocker.patch.object(Helm, "get_version")
mock_get_version.return_value = Version(major=3, minor=6, patch=0)
@@ -85,7 +90,7 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old(
"fake",
RepoAuthFlags(ca_file=Path("a_file.ca"), insecure_skip_tls_verify=True),
)
- assert run_command.mock_calls == [
+ assert mock_execute.mock_calls == [
mock.call(
[
"helm",
@@ -104,16 +109,14 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old(
]
def test_should_include_configured_tls_parameters_on_add_when_version_is_new(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm = Helm(HelmConfig())
-
helm.add_repo(
"test-repository",
"fake",
RepoAuthFlags(ca_file=Path("a_file.ca"), insecure_skip_tls_verify=True),
)
- assert run_command.mock_calls == [
+ assert mock_execute.mock_calls == [
mock.call(
[
"helm",
@@ -132,10 +135,9 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_new(
]
def test_should_include_configured_tls_parameters_on_update(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
- helm_wrapper.upgrade_install(
+ helm.upgrade_install(
release_name="test-release",
chart="test-repository/test-chart",
dry_run=False,
@@ -147,7 +149,7 @@ def test_should_include_configured_tls_parameters_on_update(
),
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"upgrade",
@@ -168,10 +170,9 @@ def test_should_include_configured_tls_parameters_on_update(
)
def test_should_call_run_command_method_when_helm_install_with_non_defaults(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
- helm_wrapper.upgrade_install(
+ helm.upgrade_install(
release_name="test-release",
chart="test-repository/streams-app",
namespace="test-namespace",
@@ -187,7 +188,7 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults(
version="2.4.2",
),
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"upgrade",
@@ -213,27 +214,25 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults(
)
def test_should_call_run_command_method_when_uninstalling_streams_app(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
- helm_wrapper.uninstall(
+ helm.uninstall(
namespace="test-namespace",
release_name="test-release",
dry_run=False,
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
["helm", "uninstall", "test-release", "--namespace", "test-namespace"],
)
def test_should_log_warning_when_release_not_found(
self,
- run_command: MagicMock,
+ helm: Helm,
+ mock_execute: MagicMock,
log_warning_mock: MagicMock,
- mock_get_version: MagicMock,
):
- helm_wrapper = Helm(helm_config=HelmConfig())
- run_command.side_effect = ReleaseNotFoundException()
- helm_wrapper.uninstall(
+ mock_execute.side_effect = ReleaseNotFoundException()
+ helm.uninstall(
namespace="test-namespace",
release_name="test-release",
dry_run=False,
@@ -244,16 +243,14 @@ def test_should_log_warning_when_release_not_found(
)
def test_should_call_run_command_method_when_installing_streams_app__with_dry_run(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
-
- helm_wrapper.uninstall(
+ helm.uninstall(
namespace="test-namespace",
release_name="test-release",
dry_run=True,
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"uninstall",
@@ -284,26 +281,18 @@ def test_validate_console_output(self):
f"validate_console_output() raised ReleaseNotFoundException unexpectedly!\nError message: {ReleaseNotFoundException}"
)
- def test_helm_template_load(self):
- stdout = dedent(
- """
- ---
- # Source: chart/templates/test2.yaml
- apiVersion: v1
- kind: ServiceAccount
- metadata:
- labels:
- foo: bar
- """
+ def test_helm_template(self):
+ path = Path("test2.yaml")
+ manifest = KubernetesManifest(
+ {
+ "apiVersion": "v1",
+ "kind": "ServiceAccount",
+ "metadata": {"labels": {"foo": "bar"}},
+ }
)
-
- helm_template = HelmTemplate.load("test2.yaml", stdout)
- assert helm_template.filepath == "test2.yaml"
- assert helm_template.template == {
- "apiVersion": "v1",
- "kind": "ServiceAccount",
- "metadata": {"labels": {"foo": "bar"}},
- }
+ helm_template = HelmTemplate(path, manifest)
+ assert helm_template.filepath == path
+ assert helm_template.manifest == manifest
def test_load_manifest_with_no_notes(self):
stdout = dedent(
@@ -324,10 +313,12 @@ def test_load_manifest_with_no_notes(self):
assert all(
isinstance(helm_template, HelmTemplate) for helm_template in helm_templates
)
- assert helm_templates[0].filepath == "chart/templates/test3a.yaml"
- assert helm_templates[0].template == {"data": [{"a": 1}, {"b": 2}]}
- assert helm_templates[1].filepath == "chart/templates/test3b.yaml"
- assert helm_templates[1].template == {"foo": "bar"}
+ assert helm_templates[0].filepath == Path("chart/templates/test3a.yaml")
+ assert helm_templates[0].manifest == KubernetesManifest(
+ {"data": [{"a": 1}, {"b": 2}]}
+ )
+ assert helm_templates[1].filepath == Path("chart/templates/test3b.yaml")
+ assert helm_templates[1].manifest == KubernetesManifest({"foo": "bar"})
def test_raise_parse_error_when_helm_content_is_invalid(self):
stdout = dedent(
@@ -392,16 +383,15 @@ def test_load_manifest(self):
assert all(
isinstance(helm_template, HelmTemplate) for helm_template in helm_templates
)
- assert helm_templates[0].filepath == "chart/templates/test3a.yaml"
- assert helm_templates[0].template == {"data": [{"a": 1}, {"b": 2}]}
- assert helm_templates[1].filepath == "chart/templates/test3b.yaml"
- assert helm_templates[1].template == {"foo": "bar"}
+ assert helm_templates[0].filepath == Path("chart/templates/test3a.yaml")
+ assert helm_templates[0].manifest == KubernetesManifest(
+ {"data": [{"a": 1}, {"b": 2}]}
+ )
+ assert helm_templates[1].filepath == Path("chart/templates/test3b.yaml")
+ assert helm_templates[1].manifest == KubernetesManifest({"foo": "bar"})
- def test_helm_get_manifest(
- self, run_command: MagicMock, mock_get_version: MagicMock
- ):
- helm_wrapper = Helm(helm_config=HelmConfig())
- run_command.return_value = dedent(
+ def test_helm_get_manifest(self, helm: Helm, mock_execute: MagicMock):
+ mock_execute.return_value = dedent(
"""
---
# Source: chart/templates/test.yaml
@@ -410,10 +400,8 @@ def test_helm_get_manifest(
- b: 2
"""
)
- helm_templates = list(
- helm_wrapper.get_manifest("test-release", "test-namespace")
- )
- run_command.assert_called_once_with(
+ helm_templates = list(helm.get_manifest("test-release", "test-namespace"))
+ mock_execute.assert_called_once_with(
command=[
"helm",
"get",
@@ -424,18 +412,18 @@ def test_helm_get_manifest(
],
)
assert len(helm_templates) == 1
- assert helm_templates[0].filepath == "chart/templates/test.yaml"
- assert helm_templates[0].template == {"data": [{"a": 1}, {"b": 2}]}
+ assert helm_templates[0].filepath == Path("chart/templates/test.yaml")
+ assert helm_templates[0].manifest == KubernetesManifest(
+ {"data": [{"a": 1}, {"b": 2}]}
+ )
- run_command.side_effect = ReleaseNotFoundException()
- assert helm_wrapper.get_manifest("test-release", "test-namespace") == ()
+ mock_execute.side_effect = ReleaseNotFoundException()
+ assert helm.get_manifest("test-release", "test-namespace") == ()
def test_should_call_run_command_method_when_helm_template_with_optional_args(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
-
- helm_wrapper.template(
+ helm.template(
release_name="test-release",
chart="bakdata-streams-bootstrap/streams-app",
namespace="test-ns",
@@ -446,7 +434,7 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args(
cert_file=Path("a_file.pem"),
),
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"template",
@@ -469,18 +457,15 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args(
)
def test_should_call_run_command_method_when_helm_template_without_optional_args(
- self, run_command: MagicMock, mock_get_version: MagicMock
+ self, helm: Helm, mock_execute: MagicMock
):
- helm_wrapper = Helm(helm_config=HelmConfig())
-
- helm_wrapper.template(
+ helm.template(
release_name="test-release",
chart="bakdata-streams-bootstrap/streams-app",
namespace="test-ns",
values={"commandLine": "test"},
- flags=HelmTemplateFlags(),
)
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"template",
@@ -507,14 +492,14 @@ def test_should_call_run_command_method_when_helm_template_without_optional_args
)
def test_should_call_helm_version(
self,
- run_command: MagicMock,
+ mock_execute: MagicMock,
raw_version: str,
expected_version: Version,
):
- run_command.return_value = raw_version
+ mock_execute.return_value = raw_version
helm = Helm(helm_config=HelmConfig())
- run_command.assert_called_once_with(
+ mock_execute.assert_called_once_with(
[
"helm",
"version",
@@ -525,9 +510,9 @@ def test_should_call_helm_version(
assert helm._version == expected_version
def test_should_raise_exception_if_helm_version_is_old(
- self, run_command: MagicMock
+ self, mock_execute: MagicMock
):
- run_command.return_value = "v2.9.0+gc9f554d"
+ mock_execute.return_value = "v2.9.0+gc9f554d"
with pytest.raises(
RuntimeError,
match="The supported Helm version is 3.x.x. The current Helm version is 2.9.0",
@@ -535,9 +520,9 @@ def test_should_raise_exception_if_helm_version_is_old(
Helm(helm_config=HelmConfig())
def test_should_raise_exception_if_helm_version_cannot_be_parsed(
- self, run_command: MagicMock
+ self, mock_execute: MagicMock
):
- run_command.return_value = "123"
+ mock_execute.return_value = "123"
with pytest.raises(
RuntimeError, match="Could not parse the Helm version.\n\nHelm output:\n123"
):
diff --git a/tests/component_handlers/helm_wrapper/test_utils.py b/tests/component_handlers/helm_wrapper/test_utils.py
index eef6ca14f..a0acc4707 100644
--- a/tests/component_handlers/helm_wrapper/test_utils.py
+++ b/tests/component_handlers/helm_wrapper/test_utils.py
@@ -1,29 +1,33 @@
-from kpops.component_handlers.helm_wrapper.utils import trim_release_name
+from kpops.component_handlers.helm_wrapper.utils import (
+ create_helm_release_name,
+)
-def test_trim_release_name_with_suffix():
- name = trim_release_name(
- "example-component-name-too-long-fake-fakefakefakefakefake-clean",
- suffix="-clean",
- )
- assert name == "example-component-name-too-long-fake-fakefakef-clean"
- assert len(name) == 52
+def test_helm_release_name_for_long_names():
+ long_release_name = "example-component-name-too-long-fake-fakefakefakefakefake"
+ actual_release_name = create_helm_release_name(long_release_name)
-def test_trim_release_name_without_suffix():
- name = trim_release_name(
- "example-component-name-too-long-fake-fakefakefakefakefake"
- )
- assert name == "example-component-name-too-long-fake-fakefakefakefak"
- assert len(name) == 52
+ expected_helm_release_name = "example-component-name-too-long-fake-fakefakefa-0a7fc"
+ assert expected_helm_release_name == actual_release_name
+ assert len(expected_helm_release_name) == 53
-def test_no_trim_release_name():
- assert (
- trim_release_name("normal-name-with-no-need-of-trim-clean", suffix="-clean")
- == "normal-name-with-no-need-of-trim-clean"
- )
- assert (
- trim_release_name("normal-name-with-no-need-of-trim")
- == "normal-name-with-no-need-of-trim"
+def test_helm_release_name_for_install_and_clean_must_be_different():
+ long_release_name = "example-component-name-too-long-fake-fakefakefakefakefake"
+
+ helm_clean_release_name = create_helm_release_name(long_release_name, "-clean")
+ expected_helm_release_name = (
+ "example-component-name-too-long-fake-fakefakef-0a7fc-clean"
)
+
+ assert expected_helm_release_name != helm_clean_release_name
+
+
+def test_helm_release_name_for_short_names():
+ short_release_name = "example-component-name"
+
+ actual_helm_release_name = create_helm_release_name(short_release_name)
+
+ assert actual_helm_release_name == short_release_name
+ assert len(actual_helm_release_name) <= 53
diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py
index 8e60d92a7..86eb0690b 100644
--- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py
+++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py
@@ -6,7 +6,6 @@
import pytest
from pytest_httpx import HTTPXMock
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers.kafka_connect.connect_wrapper import ConnectWrapper
from kpops.component_handlers.kafka_connect.exception import (
ConnectorNotFoundException,
@@ -17,22 +16,19 @@
KafkaConnectResponse,
)
from kpops.component_handlers.kafka_connect.timeout import timeout
+from kpops.config import KpopsConfig
HEADERS = {"Accept": "application/json", "Content-Type": "application/json"}
-HOST = "http://localhost:8083"
+DEFAULT_HOST = "http://localhost:8083"
DEFAULTS_PATH = Path(__file__).parent / "resources"
class TestConnectorApiWrapper:
@pytest.fixture(autouse=True)
def _setup(self):
- config = PipelineConfig(
- defaults_path=DEFAULTS_PATH,
- environment="development",
- kafka_connect_host=HOST,
- )
- self.connect_wrapper = ConnectWrapper(host=config.kafka_connect_host)
+ config = KpopsConfig(defaults_path=DEFAULTS_PATH)
+ self.connect_wrapper = ConnectWrapper(config.kafka_connect)
@pytest.fixture()
def connector_config(self) -> KafkaConnectorConfig:
@@ -43,19 +39,6 @@ def connector_config(self) -> KafkaConnectorConfig:
}
)
- def test_should_through_exception_when_host_is_not_set(self):
- config = PipelineConfig(
- defaults_path=DEFAULTS_PATH,
- environment="development",
- kafka_connect_host=None,
- )
- with pytest.raises(RuntimeError) as run_time_error:
- ConnectWrapper(host=config.kafka_connect_host)
- assert (
- str(run_time_error.value)
- == "The Kafka Connect host is not set. Please set the host in the config."
- )
-
@patch("httpx.post")
def test_should_create_post_requests_for_given_connector_configuration(
self, mock_post: MagicMock
@@ -75,11 +58,11 @@ def test_should_create_post_requests_for_given_connector_configuration(
self.connect_wrapper.create_connector(KafkaConnectorConfig(**configs))
mock_post.assert_called_with(
- url=f"{HOST}/connectors",
+ url=f"{DEFAULT_HOST}/connectors",
headers=HEADERS,
json={
"name": "test-connector",
- "config": KafkaConnectorConfig(**configs).dict(),
+ "config": KafkaConnectorConfig(**configs).model_dump(),
},
)
@@ -107,7 +90,7 @@ def test_should_return_correct_response_when_connector_created(
}
httpx_mock.add_response(
method="POST",
- url=f"{HOST}/connectors",
+ url=f"{DEFAULT_HOST}/connectors",
headers=HEADERS,
json=actual_response,
status_code=201,
@@ -124,7 +107,7 @@ def test_should_raise_connector_exists_exception_when_connector_exists(
):
httpx_mock.add_response(
method="POST",
- url=f"{HOST}/connectors",
+ url=f"{DEFAULT_HOST}/connectors",
json={},
status_code=409,
)
@@ -145,7 +128,7 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock):
self.connect_wrapper.get_connector(connector_name)
mock_get.assert_called_with(
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers={"Accept": "application/json", "Content-Type": "application/json"},
)
@@ -176,7 +159,7 @@ def test_should_return_correct_response_when_getting_connector(
}
httpx_mock.add_response(
method="GET",
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
json=actual_response,
status_code=200,
@@ -193,7 +176,7 @@ def test_should_raise_connector_not_found_when_getting_connector(
httpx_mock.add_response(
method="GET",
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
json={},
status_code=404,
@@ -213,7 +196,7 @@ def test_should_raise_rebalance_in_progress_when_getting_connector(
httpx_mock.add_response(
method="GET",
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
json={},
status_code=409,
@@ -247,9 +230,9 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc
)
mock_put.assert_called_with(
- url=f"{HOST}/connectors/{connector_name}/config",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}/config",
headers={"Accept": "application/json", "Content-Type": "application/json"},
- json=KafkaConnectorConfig(**configs).dict(),
+ json=KafkaConnectorConfig(**configs).model_dump(),
)
@patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info")
@@ -281,7 +264,7 @@ def test_should_return_correct_response_when_update_connector(
}
httpx_mock.add_response(
method="PUT",
- url=f"{HOST}/connectors/{connector_name}/config",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}/config",
headers=HEADERS,
json=actual_response,
status_code=200,
@@ -323,7 +306,7 @@ def test_should_return_correct_response_when_update_connector_created(
}
httpx_mock.add_response(
method="PUT",
- url=f"{HOST}/connectors/{connector_name}/config",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}/config",
headers=HEADERS,
json=actual_response,
status_code=201,
@@ -345,7 +328,7 @@ def test_should_raise_connector_exists_exception_when_update_connector(
httpx_mock.add_response(
method="PUT",
- url=f"{HOST}/connectors/{connector_name}/config",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}/config",
headers=HEADERS,
json={},
status_code=409,
@@ -369,7 +352,7 @@ def test_should_create_correct_delete_connector_request(
self.connect_wrapper.delete_connector(connector_name)
mock_delete.assert_called_with(
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
)
@@ -399,7 +382,7 @@ def test_should_return_correct_response_when_deleting_connector(
}
httpx_mock.add_response(
method="DELETE",
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
json=actual_response,
status_code=204,
@@ -416,7 +399,7 @@ def test_should_raise_connector_not_found_when_deleting_connector(
httpx_mock.add_response(
method="DELETE",
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
json={},
status_code=404,
@@ -436,7 +419,7 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector(
httpx_mock.add_response(
method="DELETE",
- url=f"{HOST}/connectors/{connector_name}",
+ url=f"{DEFAULT_HOST}/connectors/{connector_name}",
headers=HEADERS,
json={},
status_code=409,
@@ -467,9 +450,9 @@ def test_should_create_correct_validate_connector_config_request(
self.connect_wrapper.validate_connector_config(connector_config)
mock_put.assert_called_with(
- url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate",
+ url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate",
headers={"Accept": "application/json", "Content-Type": "application/json"},
- json=connector_config.dict(),
+ json=connector_config.model_dump(),
)
@patch("httpx.put")
@@ -489,9 +472,11 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added(
)
mock_put.assert_called_with(
- url=f"{HOST}/connector-plugins/{connector_name}/config/validate",
+ url=f"{DEFAULT_HOST}/connector-plugins/{connector_name}/config/validate",
headers={"Accept": "application/json", "Content-Type": "application/json"},
- json=KafkaConnectorConfig(**{"name": connector_name, **configs}).dict(),
+ json=KafkaConnectorConfig(
+ **{"name": connector_name, **configs}
+ ).model_dump(),
)
def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock):
@@ -501,7 +486,7 @@ def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock):
actual_response = json.load(f)
httpx_mock.add_response(
method="PUT",
- url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate",
+ url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate",
headers=HEADERS,
json=actual_response,
status_code=200,
diff --git a/tests/component_handlers/kubernetes/__init__.py b/tests/component_handlers/kubernetes/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/component_handlers/kubernetes/model.py b/tests/component_handlers/kubernetes/model.py
new file mode 100644
index 000000000..334c1f937
--- /dev/null
+++ b/tests/component_handlers/kubernetes/model.py
@@ -0,0 +1,38 @@
+from textwrap import dedent
+
+import pytest
+
+from kpops.component_handlers.kubernetes.model import KubernetesManifest
+
+
+class TestKubernetesManifest:
+ @pytest.mark.parametrize(
+ ("helm_template", "expected_manifest"),
+ [
+ pytest.param(
+ dedent(
+ """
+ ---
+ # Source: chart/templates/test2.yaml
+ apiVersion: v1
+ kind: ServiceAccount
+ metadata:
+ labels:
+ foo: bar
+ """
+ ),
+ [
+ KubernetesManifest(
+ {
+ "apiVersion": "v1",
+ "kind": "ServiceAccount",
+ "metadata": {"labels": {"foo": "bar"}},
+ }
+ )
+ ],
+ )
+ ],
+ )
+ def test_from_yaml(self, helm_template: str, expected_manifest: KubernetesManifest):
+ manifests = KubernetesManifest.from_yaml(helm_template)
+ assert list(manifests) == expected_manifest
diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py
index faf54ba09..81e31d35d 100644
--- a/tests/component_handlers/schema_handler/test_schema_handler.py
+++ b/tests/component_handlers/schema_handler/test_schema_handler.py
@@ -1,15 +1,13 @@
import json
-from pathlib import Path
from unittest import mock
from unittest.mock import MagicMock
import pytest
-from pydantic import BaseModel
+from pydantic import AnyHttpUrl, BaseModel, TypeAdapter
from pytest_mock import MockerFixture
from schema_registry.client.schema import AvroSchema
from schema_registry.client.utils import SchemaVersion
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler
from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider
from kpops.components.base_components.models import TopicName
@@ -18,6 +16,7 @@
TopicConfig,
ToSection,
)
+from kpops.config import KpopsConfig, SchemaRegistryConfig
from kpops.utils.colorify import greenify, magentaify
from tests.pipeline.test_components import TestSchemaProvider
@@ -69,35 +68,34 @@ def to_section(topic_config: TopicConfig) -> ToSection:
return ToSection(topics={TopicName("topic-X"): topic_config})
-def test_load_schema_handler():
- config_enable = PipelineConfig(
- defaults_path=Path("fake"),
- environment="development",
- schema_registry_url="http://localhost:8081",
+@pytest.fixture()
+def kpops_config() -> KpopsConfig:
+ return KpopsConfig(
+ kafka_brokers="broker:9092",
+ schema_registry=SchemaRegistryConfig(
+ enabled=True,
+ url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081"),
+ ),
+ components_module=TEST_SCHEMA_PROVIDER_MODULE,
)
- config_disable = config_enable.copy()
- config_disable.schema_registry_url = None
- assert (
- SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_disable)
- is None
- )
+def test_load_schema_handler(kpops_config: KpopsConfig):
assert isinstance(
- SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_enable),
+ SchemaHandler.load_schema_handler(kpops_config),
SchemaHandler,
)
+ config_disable = kpops_config.model_copy()
+ config_disable.schema_registry = SchemaRegistryConfig(enabled=False)
-def test_should_lazy_load_schema_provider(find_class_mock: MagicMock):
- config_enable = PipelineConfig(
- defaults_path=Path("fake"),
- environment="development",
- schema_registry_url="http://localhost:8081",
- )
- schema_handler = SchemaHandler.load_schema_handler(
- TEST_SCHEMA_PROVIDER_MODULE, config_enable
- )
+ assert SchemaHandler.load_schema_handler(config_disable) is None
+
+
+def test_should_lazy_load_schema_provider(
+ find_class_mock: MagicMock, kpops_config: KpopsConfig
+):
+ schema_handler = SchemaHandler.load_schema_handler(kpops_config)
assert schema_handler is not None
@@ -111,10 +109,11 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock):
find_class_mock.assert_called_once_with(TEST_SCHEMA_PROVIDER_MODULE, SchemaProvider)
-def test_should_raise_value_error_if_schema_provider_class_not_found():
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE
- )
+def test_should_raise_value_error_if_schema_provider_class_not_found(
+ kpops_config: KpopsConfig,
+):
+ kpops_config.components_module = NON_EXISTING_PROVIDER_MODULE
+ schema_handler = SchemaHandler(kpops_config)
with pytest.raises(
ValueError,
@@ -141,14 +140,10 @@ def test_should_raise_value_error_if_schema_provider_class_not_found():
],
)
def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(
- components_module: str,
+ kpops_config: KpopsConfig, components_module: str | None
):
- config_enable = PipelineConfig(
- defaults_path=Path("fake"),
- environment="development",
- schema_registry_url="http://localhost:8081",
- )
- schema_handler = SchemaHandler.load_schema_handler(components_module, config_enable)
+ kpops_config.components_module = components_module
+ schema_handler = SchemaHandler.load_schema_handler(kpops_config)
assert schema_handler is not None
with pytest.raises(
ValueError,
@@ -160,11 +155,12 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_
def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true(
- to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock
+ to_section: ToSection,
+ log_info_mock: MagicMock,
+ schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_registry_mock.get_versions.return_value = []
@@ -181,10 +177,9 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true(
to_section: ToSection,
log_info_mock: MagicMock,
schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_registry_mock.get_versions.return_value = [1, 2, 3]
schema_registry_mock.check_version.return_value = None
@@ -202,11 +197,10 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl
topic_config: TopicConfig,
to_section: ToSection,
schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
schema_provider = TestSchemaProvider()
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_class = "com.bakdata.kpops.test.SchemaHandlerTest"
schema_registry_mock.get_versions.return_value = [1, 2, 3]
@@ -241,11 +235,10 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve
log_info_mock: MagicMock,
log_debug_mock: MagicMock,
schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
schema_provider = TestSchemaProvider()
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_class = "com.bakdata.kpops.test.SchemaHandlerTest"
schema = schema_provider.provide_schema(schema_class, {})
registered_version = SchemaVersion(topic_config.value_schema, 1, schema, 1)
@@ -275,13 +268,12 @@ def test_should_submit_non_existing_schema_when_not_dry(
to_section: ToSection,
log_info_mock: MagicMock,
schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
schema_provider = TestSchemaProvider()
schema_class = "com.bakdata.kpops.test.SchemaHandlerTest"
schema = schema_provider.provide_schema(schema_class, {})
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_registry_mock.get_versions.return_value = []
@@ -302,10 +294,9 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run(
to_section: ToSection,
log_info_mock: MagicMock,
schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_registry_mock.get_versions.return_value = []
@@ -319,11 +310,11 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run(
def test_should_delete_schemas_when_not_in_dry_run(
- to_section: ToSection, schema_registry_mock: MagicMock
+ to_section: ToSection,
+ schema_registry_mock: MagicMock,
+ kpops_config: KpopsConfig,
):
- schema_handler = SchemaHandler(
- url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE
- )
+ schema_handler = SchemaHandler(kpops_config)
schema_registry_mock.get_versions.return_value = []
diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py
index e26fb0e5a..f46c4b87f 100644
--- a/tests/component_handlers/topic/test_proxy_wrapper.py
+++ b/tests/component_handlers/topic/test_proxy_wrapper.py
@@ -4,19 +4,20 @@
from unittest.mock import MagicMock, patch
import pytest
+from pydantic import AnyHttpUrl
from pytest_httpx import HTTPXMock
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers.topic.exception import (
KafkaRestProxyError,
TopicNotFoundException,
)
from kpops.component_handlers.topic.model import TopicResponse, TopicSpec
from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper
+from kpops.config import KpopsConfig
HEADERS = {"Content-Type": "application/json"}
-HOST = "http://localhost:8082"
+DEFAULT_HOST = "http://localhost:8082"
DEFAULTS_PATH = Path(__file__).parent.parent / "resources"
@@ -31,10 +32,8 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock:
@pytest.fixture(autouse=True)
def _setup(self, httpx_mock: HTTPXMock):
- config = PipelineConfig(
- defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST
- )
- self.proxy_wrapper = ProxyWrapper(pipeline_config=config)
+ config = KpopsConfig(defaults_path=DEFAULTS_PATH)
+ self.proxy_wrapper = ProxyWrapper(config.kafka_rest)
with Path(
DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json",
@@ -43,22 +42,13 @@ def _setup(self, httpx_mock: HTTPXMock):
httpx_mock.add_response(
method="GET",
- url=f"{HOST}/v3/clusters",
+ url=f"{DEFAULT_HOST}/v3/clusters",
json=cluster_response,
status_code=200,
)
- assert self.proxy_wrapper.host == HOST
+ assert self.proxy_wrapper.url == AnyHttpUrl(DEFAULT_HOST)
assert self.proxy_wrapper.cluster_id == "cluster-1"
- def test_should_raise_exception_when_host_is_not_set(self):
- config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development")
- config.kafka_rest_host = None
- with pytest.raises(
- ValueError,
- match="The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST.",
- ):
- ProxyWrapper(pipeline_config=config)
-
@patch("httpx.post")
def test_should_create_topic_with_all_topic_configuration(
self, mock_post: MagicMock
@@ -77,7 +67,7 @@ def test_should_create_topic_with_all_topic_configuration(
self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec))
mock_post.assert_called_with(
- url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics",
+ url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics",
headers=HEADERS,
json=topic_spec,
)
@@ -90,7 +80,7 @@ def test_should_create_topic_with_no_configuration(self, mock_post: MagicMock):
self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec))
mock_post.assert_called_with(
- url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics",
+ url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics",
headers=HEADERS,
json=topic_spec,
)
@@ -103,7 +93,7 @@ def test_should_call_get_topic(self, mock_get: MagicMock):
self.proxy_wrapper.get_topic(topic_name=topic_name)
mock_get.assert_called_with(
- url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}",
+ url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}",
headers=HEADERS,
)
@@ -121,7 +111,7 @@ def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock):
)
mock_put.assert_called_with(
- url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter",
+ url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter",
headers=HEADERS,
json={
"data": [
@@ -139,7 +129,7 @@ def test_should_call_delete_topic(self, mock_delete: MagicMock):
self.proxy_wrapper.delete_topic(topic_name=topic_name)
mock_delete.assert_called_with(
- url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}",
+ url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}",
headers=HEADERS,
)
@@ -149,7 +139,7 @@ def test_should_call_get_broker_config(self, mock_get: MagicMock):
self.proxy_wrapper.get_broker_config()
mock_get.assert_called_with(
- url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs",
+ url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs",
headers=HEADERS,
)
@@ -168,7 +158,7 @@ def test_should_log_topic_creation(
httpx_mock.add_response(
method="POST",
- url=f"{HOST}/v3/clusters/cluster-1/topics",
+ url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics",
json=topic_spec,
headers=HEADERS,
status_code=201,
@@ -183,7 +173,7 @@ def test_should_log_topic_deletion(
httpx_mock.add_response(
method="DELETE",
- url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}",
+ url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}",
headers=HEADERS,
status_code=204,
)
@@ -212,7 +202,7 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock
httpx_mock.add_response(
method="GET",
- url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}",
+ url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}",
headers=HEADERS,
status_code=200,
json=res,
@@ -230,7 +220,7 @@ def test_should_rais_topic_not_found_exception_get_topic(
httpx_mock.add_response(
method="GET",
- url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}",
+ url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}",
headers=HEADERS,
status_code=404,
json={
@@ -250,7 +240,7 @@ def test_should_log_reset_default_topic_config_when_deleted(
httpx_mock.add_response(
method="POST",
- url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter",
+ url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter",
headers=HEADERS,
json={"data": [{"name": config_name, "operation": "DELETE"}]},
status_code=204,
diff --git a/tests/components/resources/defaults.yaml b/tests/components/resources/defaults.yaml
index 40ed52a21..a854a6d95 100644
--- a/tests/components/resources/defaults.yaml
+++ b/tests/components/resources/defaults.yaml
@@ -5,5 +5,7 @@ child:
name: fake-child-name
nice:
fake-value: must-be-overwritten
+ nested:
+ foo: foo
env-var-test:
name: $pipeline_name
diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py
index d066d431b..360d40b6c 100644
--- a/tests/components/test_base_defaults_component.py
+++ b/tests/components/test_base_defaults_component.py
@@ -1,14 +1,17 @@
+from __future__ import annotations
+
from pathlib import Path
from unittest.mock import MagicMock
+import pydantic
import pytest
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
from kpops.components.base_components.base_defaults_component import (
BaseDefaultsComponent,
load_defaults,
)
+from kpops.config import KpopsConfig
from kpops.utils.environment import ENV
DEFAULTS_PATH = Path(__file__).parent / "resources"
@@ -21,10 +24,15 @@ class Parent(BaseDefaultsComponent):
hard_coded: str = "hard_coded_value"
+class Nested(pydantic.BaseModel):
+ model_config = pydantic.ConfigDict(extra="allow")
+
+
class Child(Parent):
__test__ = False
nice: dict | None = None
another_hard_coded: str = "another_hard_coded_value"
+ nested: Nested | None = None
class GrandChild(Child):
@@ -38,11 +46,8 @@ class EnvVarTest(BaseDefaultsComponent):
@pytest.fixture()
-def config() -> PipelineConfig:
- return PipelineConfig(
- defaults_path=DEFAULTS_PATH,
- environment="development",
- )
+def config() -> KpopsConfig:
+ return KpopsConfig(defaults_path=DEFAULTS_PATH)
@pytest.fixture()
@@ -72,6 +77,7 @@ class TestBaseDefaultsComponent:
"name": "fake-child-name",
"nice": {"fake-value": "must-be-overwritten"},
"value": 1.0,
+ "nested": {"foo": "foo"},
},
),
],
@@ -100,6 +106,7 @@ def test_load_defaults(
"name": "fake-child-name",
"nice": {"fake-value": "fake"},
"value": 2.0,
+ "nested": {"foo": "foo"},
},
),
],
@@ -116,9 +123,8 @@ def test_load_defaults_with_environment(
== defaults
)
- def test_inherit_defaults(
- self, config: PipelineConfig, handlers: ComponentHandlers
- ):
+ def test_inherit_defaults(self, config: KpopsConfig, handlers: ComponentHandlers):
+ ENV["environment"] = "development"
component = Child(config=config, handlers=handlers)
assert (
@@ -137,15 +143,15 @@ def test_inherit_defaults(
component.hard_coded == "hard_coded_value"
), "Defaults in code should be kept for parents"
- def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers):
+ def test_inherit(self, config: KpopsConfig, handlers: ComponentHandlers):
component = Child(
config=config,
handlers=handlers,
- name="name-defined-in-pipeline_generator",
+ name="name-defined-in-pipeline_parser",
)
assert (
- component.name == "name-defined-in-pipeline_generator"
+ component.name == "name-defined-in-pipeline_parser"
), "Kwargs should should overwrite all other values"
assert component.nice == {
"fake-value": "fake"
@@ -161,7 +167,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers):
), "Defaults in code should be kept for parents"
def test_multiple_generations(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
):
component = GrandChild(config=config, handlers=handlers)
@@ -183,7 +189,7 @@ def test_multiple_generations(
assert component.grand_child == "grand-child-value"
def test_env_var_substitution(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
):
ENV["pipeline_name"] = str(DEFAULTS_PATH)
component = EnvVarTest(config=config, handlers=handlers)
@@ -191,3 +197,10 @@ def test_env_var_substitution(
assert component.name == str(
DEFAULTS_PATH
), "Environment variables should be substituted"
+
+ def test_merge_defaults(self, config: KpopsConfig, handlers: ComponentHandlers):
+ component = GrandChild(
+ config=config, handlers=handlers, nested=Nested(**{"bar": False})
+ )
+ assert isinstance(component.nested, Nested)
+ assert component.nested == Nested(**{"foo": "foo", "bar": False})
diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py
new file mode 100644
index 000000000..f01f30d10
--- /dev/null
+++ b/tests/components/test_helm_app.py
@@ -0,0 +1,218 @@
+from pathlib import Path
+from unittest.mock import MagicMock
+
+import pytest
+from pytest_mock import MockerFixture
+from typing_extensions import override
+
+from kpops.component_handlers import ComponentHandlers
+from kpops.component_handlers.helm_wrapper.model import (
+ HelmDiffConfig,
+ HelmRepoConfig,
+ HelmUpgradeInstallFlags,
+ RepoAuthFlags,
+)
+from kpops.components.base_components.helm_app import HelmApp, HelmAppValues
+from kpops.config import KpopsConfig
+from kpops.utils.colorify import magentaify
+
+DEFAULTS_PATH = Path(__file__).parent / "resources"
+
+
+class TestHelmApp:
+ @pytest.fixture()
+ def config(self) -> KpopsConfig:
+ return KpopsConfig(
+ defaults_path=DEFAULTS_PATH,
+ helm_diff_config=HelmDiffConfig(),
+ )
+
+ @pytest.fixture()
+ def handlers(self) -> ComponentHandlers:
+ return ComponentHandlers(
+ schema_handler=MagicMock(),
+ connector_handler=MagicMock(),
+ topic_handler=MagicMock(),
+ )
+
+ @pytest.fixture()
+ def helm_mock(self, mocker: MockerFixture) -> MagicMock:
+ return mocker.patch(
+ "kpops.components.base_components.helm_app.Helm"
+ ).return_value
+
+ @pytest.fixture()
+ def log_info_mock(self, mocker: MockerFixture) -> MagicMock:
+ return mocker.patch("kpops.components.base_components.helm_app.log.info")
+
+ @pytest.fixture()
+ def app_values(self) -> HelmAppValues:
+ return HelmAppValues(**{"foo": "test-value"})
+
+ @pytest.fixture()
+ def repo_config(self) -> HelmRepoConfig:
+ return HelmRepoConfig(repository_name="test", url="https://bakdata.com")
+
+ @pytest.fixture()
+ def helm_app(
+ self,
+ config: KpopsConfig,
+ handlers: ComponentHandlers,
+ app_values: HelmAppValues,
+ repo_config: HelmRepoConfig,
+ ) -> HelmApp:
+ return HelmApp(
+ name="test-helm-app",
+ config=config,
+ handlers=handlers,
+ app=app_values,
+ namespace="test-namespace",
+ repo_config=repo_config,
+ )
+
+ def test_should_lazy_load_helm_wrapper_and_not_repo_add(
+ self,
+ helm_app: HelmApp,
+ mocker: MockerFixture,
+ helm_mock: MagicMock,
+ ):
+ helm_mock.add_repo.assert_not_called()
+
+ mocker.patch.object(
+ HelmApp,
+ "helm_chart",
+ return_value="test/test-chart",
+ new_callable=mocker.PropertyMock,
+ )
+
+ helm_app.deploy(False)
+
+ helm_mock.upgrade_install.assert_called_once_with(
+ "${pipeline.name}-test-helm-app",
+ "test/test-chart",
+ False,
+ "test-namespace",
+ {
+ "nameOverride": "${pipeline.name}-test-helm-app",
+ "foo": "test-value",
+ },
+ HelmUpgradeInstallFlags(),
+ )
+
+ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented(
+ self,
+ config: KpopsConfig,
+ handlers: ComponentHandlers,
+ helm_mock: MagicMock,
+ mocker: MockerFixture,
+ app_values: HelmAppValues,
+ ):
+ repo_config = HelmRepoConfig(
+ repository_name="test-repo", url="https://test.com/charts/"
+ )
+ helm_app = HelmApp(
+ name="test-helm-app",
+ config=config,
+ handlers=handlers,
+ app=app_values,
+ namespace="test-namespace",
+ repo_config=repo_config,
+ version="3.4.5",
+ )
+
+ mocker.patch.object(
+ HelmApp,
+ "helm_chart",
+ return_value="test/test-chart",
+ new_callable=mocker.PropertyMock,
+ )
+
+ helm_app.deploy(dry_run=False)
+
+ assert helm_mock.mock_calls == [
+ mocker.call.add_repo(
+ "test-repo",
+ "https://test.com/charts/",
+ RepoAuthFlags(),
+ ),
+ mocker.call.upgrade_install(
+ "${pipeline.name}-test-helm-app",
+ "test/test-chart",
+ False,
+ "test-namespace",
+ {
+ "nameOverride": "${pipeline.name}-test-helm-app",
+ "foo": "test-value",
+ },
+ HelmUpgradeInstallFlags(version="3.4.5"),
+ ),
+ ]
+
+ def test_should_deploy_app_with_local_helm_chart(
+ self,
+ config: KpopsConfig,
+ handlers: ComponentHandlers,
+ helm_mock: MagicMock,
+ app_values: HelmAppValues,
+ ):
+ class AppWithLocalChart(HelmApp):
+ repo_config: None = None
+
+ @property
+ @override
+ def helm_chart(self) -> str:
+ return "path/to/helm/charts/"
+
+ app_with_local_chart = AppWithLocalChart(
+ name="test-app-with-local-chart",
+ config=config,
+ handlers=handlers,
+ app=app_values,
+ namespace="test-namespace",
+ )
+
+ app_with_local_chart.deploy(dry_run=False)
+
+ helm_mock.add_repo.assert_not_called()
+
+ helm_mock.upgrade_install.assert_called_once_with(
+ "${pipeline.name}-test-app-with-local-chart",
+ "path/to/helm/charts/",
+ False,
+ "test-namespace",
+ {
+ "nameOverride": "${pipeline.name}-test-app-with-local-chart",
+ "foo": "test-value",
+ },
+ HelmUpgradeInstallFlags(),
+ )
+
+ def test_should_raise_not_implemented_error_when_helm_chart_is_not_set(
+ self,
+ helm_app: HelmApp,
+ helm_mock: MagicMock,
+ ):
+ with pytest.raises(NotImplementedError) as error:
+ helm_app.deploy(True)
+ helm_mock.add_repo.assert_called()
+ assert (
+ str(error.value)
+ == "Please implement the helm_chart property of the kpops.components.base_components.helm_app module."
+ )
+
+ def test_should_call_helm_uninstall_when_destroying_helm_app(
+ self,
+ helm_app: HelmApp,
+ helm_mock: MagicMock,
+ log_info_mock: MagicMock,
+ ):
+ stdout = 'HelmApp - release "test-helm-app" uninstalled'
+ helm_mock.uninstall.return_value = stdout
+
+ helm_app.destroy(True)
+
+ helm_mock.uninstall.assert_called_once_with(
+ "test-namespace", "${pipeline.name}-test-helm-app", True
+ )
+
+ log_info_mock.assert_called_once_with(magentaify(stdout))
diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py
index 98771d4af..16d178f02 100644
--- a/tests/components/test_kafka_connector.py
+++ b/tests/components/test_kafka_connector.py
@@ -5,30 +5,33 @@
import pytest
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig
from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig
-from kpops.components.base_components.kafka_connector import KafkaConnector
+from kpops.components.base_components.kafka_connector import (
+ KafkaConnector,
+)
+from kpops.config import KpopsConfig, TopicNameConfig
DEFAULTS_PATH = Path(__file__).parent / "resources"
CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop"
-CONNECTOR_FULL_NAME = "${pipeline_name}-" + CONNECTOR_NAME
-CONNECTOR_CLEAN_FULL_NAME = "${pipeline_name}-test-connector-with-long-name-clean"
+CONNECTOR_FULL_NAME = "${pipeline.name}-" + CONNECTOR_NAME
+CONNECTOR_CLEAN_FULL_NAME = CONNECTOR_FULL_NAME + "-clean"
+CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline.name}-test-connector-with-long-612f3-clean"
CONNECTOR_CLASS = "com.bakdata.connect.TestConnector"
+RESETTER_NAMESPACE = "test-namespace"
class TestKafkaConnector:
@pytest.fixture()
- def config(self) -> PipelineConfig:
- return PipelineConfig(
+ def config(self) -> KpopsConfig:
+ return KpopsConfig(
defaults_path=DEFAULTS_PATH,
- environment="development",
topic_name_config=TopicNameConfig(
default_error_topic_name="${component_type}-error-topic",
default_output_topic_name="${component_type}-output-topic",
),
- brokers="broker:9092",
+ kafka_brokers="broker:9092",
helm_diff_config=HelmDiffConfig(),
)
@@ -43,13 +46,13 @@ def handlers(self) -> ComponentHandlers:
@pytest.fixture(autouse=True)
def helm_mock(self, mocker: MockerFixture) -> MagicMock:
return mocker.patch(
- "kpops.components.base_components.kafka_connector.Helm"
+ "kpops.components.base_components.helm_app.Helm"
).return_value
@pytest.fixture()
- def dry_run_handler(self, mocker: MockerFixture) -> MagicMock:
+ def dry_run_handler_mock(self, mocker: MockerFixture) -> MagicMock:
return mocker.patch(
- "kpops.components.base_components.kafka_connector.DryRunHandler"
+ "kpops.components.base_components.helm_app.DryRunHandler"
).return_value
@pytest.fixture()
@@ -61,27 +64,35 @@ def connector_config(self) -> KafkaConnectorConfig:
}
)
- def test_connector_config_name_override(
+ @pytest.fixture()
+ def connector(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
- ):
- connector = KafkaConnector(
+ ) -> KafkaConnector:
+ return KafkaConnector(
name=CONNECTOR_NAME,
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
)
+
+ def test_connector_config_name_override(
+ self,
+ connector: KafkaConnector,
+ config: KpopsConfig,
+ handlers: ComponentHandlers,
+ ):
assert connector.app.name == CONNECTOR_FULL_NAME
connector = KafkaConnector(
name=CONNECTOR_NAME,
config=config,
handlers=handlers,
- app={"connector.class": CONNECTOR_CLASS}, # type: ignore[reportGeneralTypeIssues]
- namespace="test-namespace",
+ app={"connector.class": CONNECTOR_CLASS}, # type: ignore[reportGeneralTypeIssues], gets enriched
+ resetter_namespace=RESETTER_NAMESPACE,
)
assert connector.app.name == CONNECTOR_FULL_NAME
@@ -95,8 +106,7 @@ def test_connector_config_name_override(
name=CONNECTOR_NAME,
config=config,
handlers=handlers,
- app={"connector.class": CONNECTOR_CLASS, "name": "different-name"}, # type: ignore[reportGeneralTypeIssues]
- namespace="test-namespace",
+ app={"connector.class": CONNECTOR_CLASS, "name": "different-name"}, # type: ignore[reportGeneralTypeIssues], gets enriched
)
with pytest.raises(
@@ -109,6 +119,5 @@ def test_connector_config_name_override(
name=CONNECTOR_NAME,
config=config,
handlers=handlers,
- app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore[reportGeneralTypeIssues]
- namespace="test-namespace",
+ app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore[reportGeneralTypeIssues], gets enriched
)
diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py
index e8ed7aa22..ef4f7caa3 100644
--- a/tests/components/test_kafka_sink_connector.py
+++ b/tests/components/test_kafka_sink_connector.py
@@ -1,9 +1,8 @@
-from unittest.mock import MagicMock, call
+from unittest.mock import ANY, MagicMock, call
import pytest
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import (
HelmUpgradeInstallFlags,
@@ -14,6 +13,7 @@
KafkaConnectorType,
)
from kpops.components import KafkaSinkConnector
+from kpops.components.base_components.kafka_connector import KafkaConnectorResetter
from kpops.components.base_components.models.from_section import (
FromSection,
FromTopic,
@@ -25,14 +25,19 @@
TopicConfig,
ToSection,
)
+from kpops.config import KpopsConfig
from kpops.utils.colorify import magentaify
from tests.components.test_kafka_connector import (
CONNECTOR_CLEAN_FULL_NAME,
+ CONNECTOR_CLEAN_RELEASE_NAME,
CONNECTOR_FULL_NAME,
CONNECTOR_NAME,
+ RESETTER_NAMESPACE,
TestKafkaConnector,
)
+CONNECTOR_TYPE = KafkaConnectorType.SINK.value
+
class TestKafkaSinkConnector(TestKafkaConnector):
@pytest.fixture()
@@ -42,7 +47,7 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock:
@pytest.fixture()
def connector(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
) -> KafkaSinkConnector:
@@ -51,7 +56,7 @@ def connector(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
to=ToSection(
topics={
TopicName("${output_topic_name}"): TopicConfig(
@@ -61,9 +66,15 @@ def connector(
),
)
+ def test_resetter_release_name(self, connector: KafkaSinkConnector):
+ assert connector.app.name == CONNECTOR_FULL_NAME
+ resetter = connector._resetter
+ assert isinstance(resetter, KafkaConnectorResetter)
+ assert connector._resetter.helm_release_name == CONNECTOR_CLEAN_RELEASE_NAME
+
def test_connector_config_parsing(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
):
@@ -73,9 +84,9 @@ def test_connector_config_parsing(
config=config,
handlers=handlers,
app=KafkaConnectorConfig(
- **{**connector_config.dict(), "topics": topic_name}
+ **{**connector_config.model_dump(), "topics": topic_name}
),
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
)
assert getattr(connector.app, "topics") == topic_name
@@ -85,15 +96,15 @@ def test_connector_config_parsing(
config=config,
handlers=handlers,
app=KafkaConnectorConfig(
- **{**connector_config.dict(), "topics.regex": topic_pattern}
+ **{**connector_config.model_dump(), "topics.regex": topic_pattern}
),
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
)
assert getattr(connector.app, "topics.regex") == topic_pattern
def test_from_section_parsing_input_topic(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
):
@@ -104,7 +115,7 @@ def test_from_section_parsing_input_topic(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type
topics={
topic1: FromTopic(type=InputTopicTypes.INPUT),
@@ -120,7 +131,7 @@ def test_from_section_parsing_input_topic(
def test_from_section_parsing_input_pattern(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
):
@@ -130,7 +141,7 @@ def test_from_section_parsing_input_pattern(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type
topics={topic_pattern: FromTopic(type=InputTopicTypes.PATTERN)}
),
@@ -176,18 +187,18 @@ def test_destroy(
def test_reset_when_dry_run_is_true(
self,
connector: KafkaSinkConnector,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
):
dry_run = True
connector.reset(dry_run=dry_run)
- dry_run_handler.print_helm_diff.assert_called_once()
+ dry_run_handler_mock.print_helm_diff.assert_called_once()
def test_reset_when_dry_run_is_false(
self,
connector: KafkaSinkConnector,
+ dry_run_handler_mock: MagicMock,
helm_mock: MagicMock,
- dry_run_handler: MagicMock,
mocker: MockerFixture,
):
mock_delete_topics = mocker.patch.object(
@@ -196,71 +207,78 @@ def test_reset_when_dry_run_is_false(
mock_clean_connector = mocker.patch.object(
connector.handlers.connector_handler, "clean_connector"
)
+ mock_resetter_reset = mocker.spy(connector._resetter, "reset")
+
mock = mocker.MagicMock()
mock.attach_mock(mock_clean_connector, "mock_clean_connector")
mock.attach_mock(helm_mock, "helm")
dry_run = False
connector.reset(dry_run=dry_run)
-
- assert mock.mock_calls == [
- mocker.call.helm.add_repo(
- "bakdata-kafka-connect-resetter",
- "https://bakdata.github.io/kafka-connect-resetter/",
- RepoAuthFlags(),
- ),
- mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=dry_run,
- ),
- mocker.call.helm.upgrade_install(
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- namespace="test-namespace",
- chart="bakdata-kafka-connect-resetter/kafka-connect-resetter",
- dry_run=dry_run,
- flags=HelmUpgradeInstallFlags(
- version="1.0.4",
- wait=True,
- wait_for_jobs=True,
+ mock_resetter_reset.assert_called_once_with(dry_run)
+
+ mock.assert_has_calls(
+ [
+ mocker.call.helm.add_repo(
+ "bakdata-kafka-connect-resetter",
+ "https://bakdata.github.io/kafka-connect-resetter/",
+ RepoAuthFlags(),
),
- values={
- "connectorType": "sink",
- "config": {
- "brokers": "broker:9092",
- "connector": CONNECTOR_FULL_NAME,
- "deleteConsumerGroup": False,
+ mocker.call.helm.uninstall(
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ mocker.call.helm.upgrade_install(
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ "bakdata-kafka-connect-resetter/kafka-connect-resetter",
+ dry_run,
+ RESETTER_NAMESPACE,
+ {
+ "nameOverride": CONNECTOR_CLEAN_FULL_NAME,
+ "connectorType": CONNECTOR_TYPE,
+ "config": {
+ "brokers": "broker:9092",
+ "connector": CONNECTOR_FULL_NAME,
+ "deleteConsumerGroup": False,
+ },
},
- "nameOverride": CONNECTOR_FULL_NAME,
- },
- ),
- mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=dry_run,
- ),
- ]
+ HelmUpgradeInstallFlags(
+ version="1.0.4",
+ wait=True,
+ wait_for_jobs=True,
+ ),
+ ),
+ mocker.call.helm.uninstall(
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ ]
+ )
- dry_run_handler.print_helm_diff.assert_not_called()
+ dry_run_handler_mock.print_helm_diff.assert_not_called()
mock_delete_topics.assert_not_called()
def test_clean_when_dry_run_is_true(
self,
connector: KafkaSinkConnector,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
):
dry_run = True
connector.clean(dry_run=dry_run)
- dry_run_handler.print_helm_diff.assert_called_once()
+ dry_run_handler_mock.print_helm_diff.assert_called_once()
def test_clean_when_dry_run_is_false(
self,
connector: KafkaSinkConnector,
- config: PipelineConfig,
- handlers: ComponentHandlers,
helm_mock: MagicMock,
log_info_mock: MagicMock,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
mocker: MockerFixture,
):
mock_delete_topics = mocker.patch.object(
@@ -300,43 +318,47 @@ def test_clean_when_dry_run_is_false(
RepoAuthFlags(),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=dry_run,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
mocker.call.helm.upgrade_install(
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- namespace="test-namespace",
- chart="bakdata-kafka-connect-resetter/kafka-connect-resetter",
- dry_run=dry_run,
- flags=HelmUpgradeInstallFlags(
- version="1.0.4",
- wait=True,
- wait_for_jobs=True,
- ),
- values={
- "connectorType": "sink",
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ "bakdata-kafka-connect-resetter/kafka-connect-resetter",
+ dry_run,
+ RESETTER_NAMESPACE,
+ {
+ "nameOverride": CONNECTOR_CLEAN_FULL_NAME,
+ "connectorType": CONNECTOR_TYPE,
"config": {
"brokers": "broker:9092",
"connector": CONNECTOR_FULL_NAME,
"deleteConsumerGroup": True,
},
- "nameOverride": CONNECTOR_FULL_NAME,
},
+ HelmUpgradeInstallFlags(
+ version="1.0.4",
+ wait=True,
+ wait_for_jobs=True,
+ ),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=dry_run,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
]
- dry_run_handler.print_helm_diff.assert_not_called()
+ dry_run_handler_mock.print_helm_diff.assert_not_called()
def test_clean_without_to_when_dry_run_is_true(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
connector_config: KafkaConnectorConfig,
):
connector = KafkaSinkConnector(
@@ -344,19 +366,19 @@ def test_clean_without_to_when_dry_run_is_true(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
)
dry_run = True
connector.clean(dry_run)
- dry_run_handler.print_helm_diff.assert_called_once()
+ dry_run_handler_mock.print_helm_diff.assert_called_once()
def test_clean_without_to_when_dry_run_is_false(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
helm_mock: MagicMock,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
mocker: MockerFixture,
connector_config: KafkaConnectorConfig,
):
@@ -365,7 +387,7 @@ def test_clean_without_to_when_dry_run_is_false(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
)
mock_delete_topics = mocker.patch.object(
@@ -394,36 +416,40 @@ def test_clean_without_to_when_dry_run_is_false(
),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=dry_run,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
mocker.call.helm.upgrade_install(
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- namespace="test-namespace",
- chart="bakdata-kafka-connect-resetter/kafka-connect-resetter",
- dry_run=dry_run,
- flags=HelmUpgradeInstallFlags(
- version="1.0.4",
- wait=True,
- wait_for_jobs=True,
- ),
- values={
- "connectorType": "sink",
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ "bakdata-kafka-connect-resetter/kafka-connect-resetter",
+ dry_run,
+ RESETTER_NAMESPACE,
+ {
+ "nameOverride": CONNECTOR_CLEAN_FULL_NAME,
+ "connectorType": CONNECTOR_TYPE,
"config": {
"brokers": "broker:9092",
"connector": CONNECTOR_FULL_NAME,
"deleteConsumerGroup": True,
},
- "nameOverride": CONNECTOR_FULL_NAME,
},
+ HelmUpgradeInstallFlags(
+ version="1.0.4",
+ wait=True,
+ wait_for_jobs=True,
+ ),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=dry_run,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
]
- dry_run_handler.print_helm_diff.assert_not_called()
+ dry_run_handler_mock.print_helm_diff.assert_not_called()
mock_delete_topics.assert_not_called()
diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py
index 169111ed3..31511e81f 100644
--- a/tests/components/test_kafka_source_connector.py
+++ b/tests/components/test_kafka_source_connector.py
@@ -1,16 +1,21 @@
-from unittest.mock import MagicMock
+from unittest.mock import ANY, MagicMock
import pytest
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import (
HelmUpgradeInstallFlags,
RepoAuthFlags,
)
-from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig
-from kpops.components.base_components.kafka_connector import KafkaSourceConnector
+from kpops.component_handlers.kafka_connect.model import (
+ KafkaConnectorConfig,
+ KafkaConnectorType,
+)
+from kpops.components.base_components.kafka_connector import (
+ KafkaConnectorResetter,
+ KafkaSourceConnector,
+)
from kpops.components.base_components.models.from_section import (
FromSection,
FromTopic,
@@ -22,20 +27,27 @@
TopicConfig,
ToSection,
)
+from kpops.config import KpopsConfig
from kpops.utils.environment import ENV
from tests.components.test_kafka_connector import (
CONNECTOR_CLEAN_FULL_NAME,
+ CONNECTOR_CLEAN_RELEASE_NAME,
CONNECTOR_FULL_NAME,
CONNECTOR_NAME,
+ RESETTER_NAMESPACE,
TestKafkaConnector,
)
+CONNECTOR_TYPE = KafkaConnectorType.SOURCE.value
+CLEAN_SUFFIX = "-clean"
+OFFSETS_TOPIC = "kafka-connect-offsets"
+
class TestKafkaSourceConnector(TestKafkaConnector):
@pytest.fixture()
def connector(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
) -> KafkaSourceConnector:
@@ -44,7 +56,7 @@ def connector(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
to=ToSection(
topics={
TopicName("${output_topic_name}"): TopicConfig(
@@ -52,12 +64,18 @@ def connector(
),
}
),
- offset_topic="kafka-connect-offsets",
+ offset_topic=OFFSETS_TOPIC,
)
+ def test_resetter_release_name(self, connector: KafkaSourceConnector):
+ assert connector.app.name == CONNECTOR_FULL_NAME
+ resetter = connector._resetter
+ assert isinstance(resetter, KafkaConnectorResetter)
+ assert connector._resetter.helm_release_name == CONNECTOR_CLEAN_RELEASE_NAME
+
def test_from_section_raises_exception(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
connector_config: KafkaConnectorConfig,
):
@@ -67,7 +85,7 @@ def test_from_section_raises_exception(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
+ resetter_namespace=RESETTER_NAMESPACE,
from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type
topics={
TopicName("connector-topic"): FromTopic(
@@ -104,7 +122,7 @@ def test_destroy(
connector: KafkaSourceConnector,
mocker: MockerFixture,
):
- ENV["KPOPS_KAFKA_CONNECT_RESETTER_OFFSET_TOPIC"] = "kafka-connect-offsets"
+ ENV["KPOPS_KAFKA_CONNECT_RESETTER_OFFSET_TOPIC"] = OFFSETS_TOPIC
assert connector.handlers.connector_handler
mock_destroy_connector = mocker.patch.object(
@@ -120,18 +138,18 @@ def test_destroy(
def test_reset_when_dry_run_is_true(
self,
connector: KafkaSourceConnector,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
):
assert connector.handlers.connector_handler
connector.reset(dry_run=True)
- dry_run_handler.print_helm_diff.assert_called_once()
+ dry_run_handler_mock.print_helm_diff.assert_called_once()
def test_reset_when_dry_run_is_false(
self,
connector: KafkaSourceConnector,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
helm_mock: MagicMock,
mocker: MockerFixture,
):
@@ -156,55 +174,59 @@ def test_reset_when_dry_run_is_false(
RepoAuthFlags(),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=False,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ False,
),
+ ANY, # __bool__
+ ANY, # __str__
mocker.call.helm.upgrade_install(
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- namespace="test-namespace",
- chart="bakdata-kafka-connect-resetter/kafka-connect-resetter",
- dry_run=False,
- flags=HelmUpgradeInstallFlags(
- version="1.0.4",
- wait=True,
- wait_for_jobs=True,
- ),
- values={
- "connectorType": "source",
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ "bakdata-kafka-connect-resetter/kafka-connect-resetter",
+ False,
+ RESETTER_NAMESPACE,
+ {
+ "connectorType": CONNECTOR_TYPE,
"config": {
"brokers": "broker:9092",
"connector": CONNECTOR_FULL_NAME,
- "offsetTopic": "kafka-connect-offsets",
+ "offsetTopic": OFFSETS_TOPIC,
},
- "nameOverride": CONNECTOR_FULL_NAME,
+ "nameOverride": CONNECTOR_CLEAN_FULL_NAME,
},
+ HelmUpgradeInstallFlags(
+ version="1.0.4",
+ wait=True,
+ wait_for_jobs=True,
+ ),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=False,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ False,
),
+ ANY, # __bool__
+ ANY, # __str__
]
mock_delete_topics.assert_not_called()
- dry_run_handler.print_helm_diff.assert_not_called()
+ dry_run_handler_mock.print_helm_diff.assert_not_called()
def test_clean_when_dry_run_is_true(
self,
connector: KafkaSourceConnector,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
):
assert connector.handlers.connector_handler
connector.clean(dry_run=True)
- dry_run_handler.print_helm_diff.assert_called_once()
+ dry_run_handler_mock.print_helm_diff.assert_called_once()
def test_clean_when_dry_run_is_false(
self,
connector: KafkaSourceConnector,
helm_mock: MagicMock,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
mocker: MockerFixture,
):
assert connector.handlers.connector_handler
@@ -221,55 +243,60 @@ def test_clean_when_dry_run_is_false(
mock.attach_mock(mock_clean_connector, "mock_clean_connector")
mock.attach_mock(helm_mock, "helm")
- connector.clean(dry_run=False)
+ dry_run = False
+ connector.clean(dry_run)
assert mock.mock_calls == [
- mocker.call.mock_delete_topics(connector.to, dry_run=False),
+ mocker.call.mock_delete_topics(connector.to, dry_run=dry_run),
mocker.call.helm.add_repo(
"bakdata-kafka-connect-resetter",
"https://bakdata.github.io/kafka-connect-resetter/",
RepoAuthFlags(),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=False,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
mocker.call.helm.upgrade_install(
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- namespace="test-namespace",
- chart="bakdata-kafka-connect-resetter/kafka-connect-resetter",
- dry_run=False,
- flags=HelmUpgradeInstallFlags(
- version="1.0.4",
- wait=True,
- wait_for_jobs=True,
- ),
- values={
- "connectorType": "source",
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ "bakdata-kafka-connect-resetter/kafka-connect-resetter",
+ dry_run,
+ RESETTER_NAMESPACE,
+ {
+ "nameOverride": CONNECTOR_CLEAN_FULL_NAME,
+ "connectorType": CONNECTOR_TYPE,
"config": {
"brokers": "broker:9092",
"connector": CONNECTOR_FULL_NAME,
- "offsetTopic": "kafka-connect-offsets",
+ "offsetTopic": OFFSETS_TOPIC,
},
- "nameOverride": CONNECTOR_FULL_NAME,
},
+ HelmUpgradeInstallFlags(
+ version="1.0.4",
+ wait=True,
+ wait_for_jobs=True,
+ ),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=False,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
]
- dry_run_handler.print_helm_diff.assert_not_called()
+ dry_run_handler_mock.print_helm_diff.assert_not_called()
def test_clean_without_to_when_dry_run_is_false(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
helm_mock: MagicMock,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
mocker: MockerFixture,
connector_config: KafkaConnectorConfig,
):
@@ -278,8 +305,8 @@ def test_clean_without_to_when_dry_run_is_false(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
- offset_topic="kafka-connect-offsets",
+ resetter_namespace=RESETTER_NAMESPACE,
+ offset_topic=OFFSETS_TOPIC,
)
assert connector.to is None
@@ -297,7 +324,8 @@ def test_clean_without_to_when_dry_run_is_false(
mock.attach_mock(mock_clean_connector, "mock_clean_connector")
mock.attach_mock(helm_mock, "helm")
- connector.clean(dry_run=False)
+ dry_run = False
+ connector.clean(dry_run)
assert mock.mock_calls == [
mocker.call.helm.add_repo(
@@ -306,45 +334,49 @@ def test_clean_without_to_when_dry_run_is_false(
RepoAuthFlags(),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=False,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
mocker.call.helm.upgrade_install(
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- namespace="test-namespace",
- chart="bakdata-kafka-connect-resetter/kafka-connect-resetter",
- dry_run=False,
- flags=HelmUpgradeInstallFlags(
- version="1.0.4",
- wait=True,
- wait_for_jobs=True,
- ),
- values={
- "connectorType": "source",
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ "bakdata-kafka-connect-resetter/kafka-connect-resetter",
+ dry_run,
+ RESETTER_NAMESPACE,
+ {
+ "nameOverride": CONNECTOR_CLEAN_FULL_NAME,
+ "connectorType": CONNECTOR_TYPE,
"config": {
"brokers": "broker:9092",
"connector": CONNECTOR_FULL_NAME,
- "offsetTopic": "kafka-connect-offsets",
+ "offsetTopic": OFFSETS_TOPIC,
},
- "nameOverride": CONNECTOR_FULL_NAME,
},
+ HelmUpgradeInstallFlags(
+ version="1.0.4",
+ wait=True,
+ wait_for_jobs=True,
+ ),
),
mocker.call.helm.uninstall(
- namespace="test-namespace",
- release_name=CONNECTOR_CLEAN_FULL_NAME,
- dry_run=False,
+ RESETTER_NAMESPACE,
+ CONNECTOR_CLEAN_RELEASE_NAME,
+ dry_run,
),
+ ANY, # __bool__
+ ANY, # __str__
]
mock_delete_topics.assert_not_called()
- dry_run_handler.print_helm_diff.assert_not_called()
+ dry_run_handler_mock.print_helm_diff.assert_not_called()
def test_clean_without_to_when_dry_run_is_true(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
- dry_run_handler: MagicMock,
+ dry_run_handler_mock: MagicMock,
connector_config: KafkaConnectorConfig,
):
connector = KafkaSourceConnector(
@@ -352,8 +384,8 @@ def test_clean_without_to_when_dry_run_is_true(
config=config,
handlers=handlers,
app=connector_config,
- namespace="test-namespace",
- offset_topic="kafka-connect-offsets",
+ resetter_namespace=RESETTER_NAMESPACE,
+ offset_topic=OFFSETS_TOPIC,
)
assert connector.to is None
@@ -361,4 +393,4 @@ def test_clean_without_to_when_dry_run_is_true(
connector.clean(dry_run=True)
- dry_run_handler.print_helm_diff.assert_called_once()
+ dry_run_handler_mock.print_helm_diff.assert_called_once()
diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py
index 6583ac4bf..c949f9832 100644
--- a/tests/components/test_kubernetes_app.py
+++ b/tests/components/test_kubernetes_app.py
@@ -3,37 +3,31 @@
import pytest
from pytest_mock import MockerFixture
-from typing_extensions import override
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import (
- HelmDiffConfig,
HelmRepoConfig,
- HelmUpgradeInstallFlags,
- RepoAuthFlags,
)
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
from kpops.components.base_components.kubernetes_app import (
KubernetesApp,
- KubernetesAppConfig,
+ KubernetesAppValues,
)
-from kpops.utils.colorify import magentaify
+from kpops.config import KpopsConfig
+
+HELM_RELEASE_NAME = create_helm_release_name("${pipeline.name}-test-kubernetes-app")
DEFAULTS_PATH = Path(__file__).parent / "resources"
-class KubernetesTestValue(KubernetesAppConfig):
- name_override: str
+class KubernetesTestValues(KubernetesAppValues):
+ foo: str
class TestKubernetesApp:
@pytest.fixture()
- def config(self) -> PipelineConfig:
- return PipelineConfig(
- defaults_path=DEFAULTS_PATH,
- environment="development",
- helm_diff_config=HelmDiffConfig(),
- )
+ def config(self) -> KpopsConfig:
+ return KpopsConfig(defaults_path=DEFAULTS_PATH)
@pytest.fixture()
def handlers(self) -> ComponentHandlers:
@@ -43,19 +37,13 @@ def handlers(self) -> ComponentHandlers:
topic_handler=MagicMock(),
)
- @pytest.fixture()
- def helm_mock(self, mocker: MockerFixture) -> MagicMock:
- return mocker.patch(
- "kpops.components.base_components.kubernetes_app.Helm"
- ).return_value
-
@pytest.fixture()
def log_info_mock(self, mocker: MockerFixture) -> MagicMock:
return mocker.patch("kpops.components.base_components.kubernetes_app.log.info")
@pytest.fixture()
- def app_value(self) -> KubernetesTestValue:
- return KubernetesTestValue(**{"name_override": "test-value"})
+ def app_values(self) -> KubernetesTestValues:
+ return KubernetesTestValues(foo="foo")
@pytest.fixture()
def repo_config(self) -> HelmRepoConfig:
@@ -64,164 +52,23 @@ def repo_config(self) -> HelmRepoConfig:
@pytest.fixture()
def kubernetes_app(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
- app_value: KubernetesTestValue,
- repo_config: HelmRepoConfig,
+ app_values: KubernetesTestValues,
) -> KubernetesApp:
return KubernetesApp(
name="test-kubernetes-app",
config=config,
handlers=handlers,
- app=app_value,
- namespace="test-namespace",
- repo_config=repo_config,
- )
-
- def test_should_lazy_load_helm_wrapper_and_not_repo_add(
- self,
- kubernetes_app: KubernetesApp,
- mocker: MockerFixture,
- helm_mock: MagicMock,
- ):
- helm_mock.add_repo.assert_not_called()
-
- mocker.patch.object(
- KubernetesApp,
- "helm_chart",
- return_value="test/test-chart",
- new_callable=mocker.PropertyMock,
- )
-
- kubernetes_app.deploy(False)
-
- helm_mock.upgrade_install.assert_called_once_with(
- "${pipeline_name}-test-kubernetes-app",
- "test/test-chart",
- False,
- "test-namespace",
- {"nameOverride": "test-value"},
- HelmUpgradeInstallFlags(),
- )
-
- def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented(
- self,
- config: PipelineConfig,
- handlers: ComponentHandlers,
- helm_mock: MagicMock,
- mocker: MockerFixture,
- app_value: KubernetesTestValue,
- ):
- repo_config = HelmRepoConfig(
- repository_name="test-repo", url="https://test.com/charts/"
- )
- kubernetes_app = KubernetesApp(
- name="test-kubernetes-app",
- config=config,
- handlers=handlers,
- app=app_value,
+ app=app_values,
namespace="test-namespace",
- repo_config=repo_config,
- version="3.4.5",
- )
-
- mocker.patch.object(
- KubernetesApp,
- "helm_chart",
- return_value="test/test-chart",
- new_callable=mocker.PropertyMock,
)
- kubernetes_app.deploy(dry_run=False)
-
- assert helm_mock.mock_calls == [
- mocker.call.add_repo(
- "test-repo",
- "https://test.com/charts/",
- RepoAuthFlags(),
- ),
- mocker.call.upgrade_install(
- "${pipeline_name}-test-kubernetes-app",
- "test/test-chart",
- False,
- "test-namespace",
- {"nameOverride": "test-value"},
- HelmUpgradeInstallFlags(version="3.4.5"),
- ),
- ]
-
- def test_should_deploy_app_with_local_helm_chart(
- self,
- config: PipelineConfig,
- handlers: ComponentHandlers,
- helm_mock: MagicMock,
- app_value: KubernetesTestValue,
- ):
- class AppWithLocalChart(KubernetesApp):
- repo_config: None = None
-
- @property
- @override
- def helm_chart(self) -> str:
- return "path/to/helm/charts/"
-
- app_with_local_chart = AppWithLocalChart(
- name="test-app-with-local-chart",
- config=config,
- handlers=handlers,
- app=app_value,
- namespace="test-namespace",
- )
-
- app_with_local_chart.deploy(dry_run=False)
-
- helm_mock.add_repo.assert_not_called()
-
- helm_mock.upgrade_install.assert_called_once_with(
- "${pipeline_name}-test-app-with-local-chart",
- "path/to/helm/charts/",
- False,
- "test-namespace",
- {"nameOverride": "test-value"},
- HelmUpgradeInstallFlags(),
- )
-
- def test_should_raise_not_implemented_error_when_helm_chart_is_not_set(
- self,
- kubernetes_app: KubernetesApp,
- helm_mock: MagicMock,
- ):
- with pytest.raises(NotImplementedError) as error:
- kubernetes_app.deploy(True)
- helm_mock.add_repo.assert_called()
- assert (
- str(error.value)
- == "Please implement the helm_chart property of the kpops.components.base_components.kubernetes_app module."
- )
-
- def test_should_call_helm_uninstall_when_destroying_kubernetes_app(
- self,
- kubernetes_app: KubernetesApp,
- helm_mock: MagicMock,
- log_info_mock: MagicMock,
- ):
- stdout = 'KubernetesAppComponent - release "test-kubernetes-app" uninstalled'
- helm_mock.uninstall.return_value = stdout
-
- kubernetes_app.destroy(True)
-
- helm_mock.uninstall.assert_called_once_with(
- "test-namespace", "${pipeline_name}-test-kubernetes-app", True
- )
-
- log_info_mock.assert_called_once_with(magentaify(stdout))
-
def test_should_raise_value_error_when_name_is_not_valid(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
- app_value: KubernetesTestValue,
- repo_config: HelmRepoConfig,
+ app_values: KubernetesTestValues,
):
with pytest.raises(
ValueError, match=r"The component name .* is invalid for Kubernetes."
@@ -230,9 +77,8 @@ def test_should_raise_value_error_when_name_is_not_valid(
name="Not-Compatible*",
config=config,
handlers=handlers,
- app=app_value,
+ app=app_values,
namespace="test-namespace",
- repo_config=repo_config,
)
with pytest.raises(
@@ -242,16 +88,14 @@ def test_should_raise_value_error_when_name_is_not_valid(
name="snake_case*",
config=config,
handlers=handlers,
- app=app_value,
+ app=app_values,
namespace="test-namespace",
- repo_config=repo_config,
)
assert KubernetesApp(
name="valid-name",
config=config,
handlers=handlers,
- app=app_value,
+ app=app_values,
namespace="test-namespace",
- repo_config=repo_config,
)
diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py
index 84f9f86c6..e143e3f74 100644
--- a/tests/components/test_producer_app.py
+++ b/tests/components/test_producer_app.py
@@ -5,21 +5,30 @@
import pytest
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import HelmUpgradeInstallFlags
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
from kpops.components import ProducerApp
from kpops.components.base_components.models.to_section import (
OutputTopicTypes,
TopicConfig,
)
+from kpops.config import KpopsConfig, TopicNameConfig
DEFAULTS_PATH = Path(__file__).parent / "resources"
+PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop"
+PRODUCER_APP_FULL_NAME = "${pipeline.name}-" + PRODUCER_APP_NAME
+PRODUCER_APP_RELEASE_NAME = create_helm_release_name(PRODUCER_APP_FULL_NAME)
+PRODUCER_APP_CLEAN_FULL_NAME = PRODUCER_APP_FULL_NAME + "-clean"
+PRODUCER_APP_CLEAN_RELEASE_NAME = create_helm_release_name(
+ PRODUCER_APP_CLEAN_FULL_NAME, "-clean"
+)
+
class TestProducerApp:
- PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop"
- PRODUCER_APP_CLEAN_NAME = "test-producer-app-with-long-n-clean"
+ def test_release_name(self):
+ assert PRODUCER_APP_CLEAN_RELEASE_NAME.endswith("-clean")
@pytest.fixture()
def handlers(self) -> ComponentHandlers:
@@ -30,10 +39,9 @@ def handlers(self) -> ComponentHandlers:
)
@pytest.fixture()
- def config(self) -> PipelineConfig:
- return PipelineConfig(
+ def config(self) -> KpopsConfig:
+ return KpopsConfig(
defaults_path=DEFAULTS_PATH,
- environment="development",
topic_name_config=TopicNameConfig(
default_error_topic_name="${component_type}-error-topic",
default_output_topic_name="${component_type}-output-topic",
@@ -42,10 +50,10 @@ def config(self) -> PipelineConfig:
@pytest.fixture()
def producer_app(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
) -> ProducerApp:
return ProducerApp(
- name=self.PRODUCER_APP_NAME,
+ name=PRODUCER_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -65,9 +73,9 @@ def producer_app(
},
)
- def test_output_topics(self, config: PipelineConfig, handlers: ComponentHandlers):
+ def test_output_topics(self, config: KpopsConfig, handlers: ComponentHandlers):
producer_app = ProducerApp(
- name=self.PRODUCER_APP_NAME,
+ name=PRODUCER_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -116,11 +124,12 @@ def test_deploy_order_when_dry_run_is_false(
assert mock.mock_calls == [
mocker.call.mock_create_topics(to_section=producer_app.to, dry_run=False),
mocker.call.mock_helm_upgrade_install(
- "${pipeline_name}-" + self.PRODUCER_APP_NAME,
+ PRODUCER_APP_RELEASE_NAME,
"bakdata-streams-bootstrap/producer-app",
False,
"test-namespace",
{
+ "nameOverride": PRODUCER_APP_FULL_NAME,
"streams": {
"brokers": "fake-broker:9092",
"outputTopic": "${output_topic_name}",
@@ -150,7 +159,7 @@ def test_destroy(
producer_app.destroy(dry_run=True)
mock_helm_uninstall.assert_called_once_with(
- "test-namespace", "${pipeline_name}-" + self.PRODUCER_APP_NAME, True
+ "test-namespace", PRODUCER_APP_RELEASE_NAME, True
)
def test_should_not_reset_producer_app(
@@ -159,11 +168,13 @@ def test_should_not_reset_producer_app(
mocker: MockerFixture,
):
mock_helm_upgrade_install = mocker.patch.object(
- producer_app.helm, "upgrade_install"
+ producer_app._cleaner.helm, "upgrade_install"
+ )
+ mock_helm_uninstall = mocker.patch.object(
+ producer_app._cleaner.helm, "uninstall"
)
- mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall")
mock_helm_print_helm_diff = mocker.patch.object(
- producer_app.dry_run_handler, "print_helm_diff"
+ producer_app._cleaner.dry_run_handler, "print_helm_diff"
)
mock = mocker.MagicMock()
@@ -173,44 +184,55 @@ def test_should_not_reset_producer_app(
producer_app.clean(dry_run=True)
- assert mock.mock_calls == [
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- True,
- ),
- mocker.call.helm_upgrade_install(
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- "bakdata-streams-bootstrap/producer-app-cleanup-job",
- True,
- "test-namespace",
- {
- "streams": {
- "brokers": "fake-broker:9092",
- "outputTopic": "${output_topic_name}",
+ mock.assert_has_calls(
+ [
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ True,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ mocker.call.helm_upgrade_install(
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ "bakdata-streams-bootstrap/producer-app-cleanup-job",
+ True,
+ "test-namespace",
+ {
+ "nameOverride": PRODUCER_APP_FULL_NAME,
+ "streams": {
+ "brokers": "fake-broker:9092",
+ "outputTopic": "${output_topic_name}",
+ },
},
- },
- HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True),
- ),
- mocker.call.print_helm_diff(
- ANY,
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- logging.getLogger("KafkaApp"),
- ),
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- True,
- ),
- ]
+ HelmUpgradeInstallFlags(
+ version="2.4.2", wait=True, wait_for_jobs=True
+ ),
+ ),
+ mocker.call.print_helm_diff(
+ ANY,
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ logging.getLogger("HelmApp"),
+ ),
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ True,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ ]
+ )
def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false(
self, mocker: MockerFixture, producer_app: ProducerApp
):
mock_helm_upgrade_install = mocker.patch.object(
- producer_app.helm, "upgrade_install"
+ producer_app._cleaner.helm, "upgrade_install"
+ )
+ mock_helm_uninstall = mocker.patch.object(
+ producer_app._cleaner.helm, "uninstall"
)
- mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall")
mock = mocker.MagicMock()
mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install")
@@ -218,28 +240,37 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w
producer_app.clean(dry_run=False)
- assert mock.mock_calls == [
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- False,
- ),
- mocker.call.helm_upgrade_install(
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- "bakdata-streams-bootstrap/producer-app-cleanup-job",
- False,
- "test-namespace",
- {
- "streams": {
- "brokers": "fake-broker:9092",
- "outputTopic": "${output_topic_name}",
+ mock.assert_has_calls(
+ [
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ False,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ mocker.call.helm_upgrade_install(
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ "bakdata-streams-bootstrap/producer-app-cleanup-job",
+ False,
+ "test-namespace",
+ {
+ "nameOverride": PRODUCER_APP_FULL_NAME,
+ "streams": {
+ "brokers": "fake-broker:9092",
+ "outputTopic": "${output_topic_name}",
+ },
},
- },
- HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True),
- ),
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME,
- False,
- ),
- ]
+ HelmUpgradeInstallFlags(
+ version="2.4.2", wait=True, wait_for_jobs=True
+ ),
+ ),
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ PRODUCER_APP_CLEAN_RELEASE_NAME,
+ False,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ ]
+ )
diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py
index 0d9135b54..e76973773 100644
--- a/tests/components/test_streams_app.py
+++ b/tests/components/test_streams_app.py
@@ -1,15 +1,15 @@
from pathlib import Path
-from unittest.mock import MagicMock
+from unittest.mock import ANY, MagicMock
import pytest
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import (
HelmDiffConfig,
HelmUpgradeInstallFlags,
)
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
from kpops.components import StreamsApp
from kpops.components.base_components.models import TopicName
from kpops.components.base_components.models.to_section import (
@@ -17,13 +17,23 @@
TopicConfig,
ToSection,
)
+from kpops.components.streams_bootstrap.streams.streams_app import StreamsAppCleaner
+from kpops.config import KpopsConfig, TopicNameConfig
DEFAULTS_PATH = Path(__file__).parent / "resources"
+STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop"
+STREAMS_APP_FULL_NAME = "${pipeline.name}-" + STREAMS_APP_NAME
+STREAMS_APP_RELEASE_NAME = create_helm_release_name(STREAMS_APP_FULL_NAME)
+STREAMS_APP_CLEAN_FULL_NAME = STREAMS_APP_FULL_NAME + "-clean"
+STREAMS_APP_CLEAN_RELEASE_NAME = create_helm_release_name(
+ STREAMS_APP_CLEAN_FULL_NAME, "-clean"
+)
+
class TestStreamsApp:
- STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop"
- STREAMS_APP_CLEAN_NAME = "test-streams-app-with-long-na-clean"
+ def test_release_name(self):
+ assert STREAMS_APP_CLEAN_RELEASE_NAME.endswith("-clean")
@pytest.fixture()
def handlers(self) -> ComponentHandlers:
@@ -34,10 +44,9 @@ def handlers(self) -> ComponentHandlers:
)
@pytest.fixture()
- def config(self) -> PipelineConfig:
- return PipelineConfig(
+ def config(self) -> KpopsConfig:
+ return KpopsConfig(
defaults_path=DEFAULTS_PATH,
- environment="development",
topic_name_config=TopicNameConfig(
default_error_topic_name="${component_type}-error-topic",
default_output_topic_name="${component_type}-output-topic",
@@ -47,10 +56,10 @@ def config(self) -> PipelineConfig:
@pytest.fixture()
def streams_app(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
) -> StreamsApp:
return StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -68,9 +77,9 @@ def streams_app(
},
)
- def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers):
+ def test_set_topics(self, config: KpopsConfig, handlers: ComponentHandlers):
streams_app = StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -113,10 +122,10 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers):
assert "extraInputPatterns" in streams_config
def test_no_empty_input_topic(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
):
streams_app = StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -143,13 +152,13 @@ def test_no_empty_input_topic(
assert "inputPattern" in streams_config
assert "extraInputPatterns" not in streams_config
- def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandlers):
+ def test_should_validate(self, config: KpopsConfig, handlers: ComponentHandlers):
# An exception should be raised when both role and type are defined and type is input
with pytest.raises(
ValueError, match="Define role only if `type` is `pattern` or `None`"
):
StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -173,7 +182,7 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle
ValueError, match="Define `role` only if `type` is undefined"
):
StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -193,10 +202,10 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle
)
def test_set_streams_output_from_to(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
):
streams_app = StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -232,10 +241,10 @@ def test_set_streams_output_from_to(
assert streams_app.app.streams.error_topic == "${error_topic_name}"
def test_weave_inputs_from_prev_component(
- self, config: PipelineConfig, handlers: ComponentHandlers
+ self, config: KpopsConfig, handlers: ComponentHandlers
):
streams_app = StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -269,12 +278,12 @@ def test_weave_inputs_from_prev_component(
def test_deploy_order_when_dry_run_is_false(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
mocker: MockerFixture,
):
streams_app = StreamsApp(
- name=self.STREAMS_APP_NAME,
+ name=STREAMS_APP_NAME,
config=config,
handlers=handlers,
**{
@@ -319,11 +328,12 @@ def test_deploy_order_when_dry_run_is_false(
assert mock.mock_calls == [
mocker.call.mock_create_topics(to_section=streams_app.to, dry_run=dry_run),
mocker.call.mock_helm_upgrade_install(
- "${pipeline_name}-" + self.STREAMS_APP_NAME,
+ STREAMS_APP_RELEASE_NAME,
"bakdata-streams-bootstrap/streams-app",
dry_run,
"test-namespace",
{
+ "nameOverride": STREAMS_APP_FULL_NAME,
"streams": {
"brokers": "fake-broker:9092",
"extraOutputTopics": {
@@ -332,7 +342,7 @@ def test_deploy_order_when_dry_run_is_false(
},
"outputTopic": "${output_topic_name}",
"errorTopic": "${error_topic_name}",
- }
+ },
},
HelmUpgradeInstallFlags(
create_namespace=False,
@@ -355,16 +365,17 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture):
streams_app.destroy(dry_run=True)
mock_helm_uninstall.assert_called_once_with(
- "test-namespace", "${pipeline_name}-" + self.STREAMS_APP_NAME, True
+ "test-namespace", STREAMS_APP_RELEASE_NAME, True
)
def test_reset_when_dry_run_is_false(
self, streams_app: StreamsApp, mocker: MockerFixture
):
- mock_helm_upgrade_install = mocker.patch.object(
- streams_app.helm, "upgrade_install"
- )
- mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall")
+ cleaner = streams_app._cleaner
+ assert isinstance(cleaner, StreamsAppCleaner)
+
+ mock_helm_upgrade_install = mocker.patch.object(cleaner.helm, "upgrade_install")
+ mock_helm_uninstall = mocker.patch.object(cleaner.helm, "uninstall")
mock = mocker.MagicMock()
mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install")
@@ -373,32 +384,41 @@ def test_reset_when_dry_run_is_false(
dry_run = False
streams_app.reset(dry_run=dry_run)
- assert mock.mock_calls == [
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME,
- dry_run,
- ),
- mocker.call.helm_upgrade_install(
- "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME,
- "bakdata-streams-bootstrap/streams-app-cleanup-job",
- dry_run,
- "test-namespace",
- {
- "streams": {
- "brokers": "fake-broker:9092",
- "outputTopic": "${output_topic_name}",
- "deleteOutput": False,
+ mock.assert_has_calls(
+ [
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ STREAMS_APP_CLEAN_RELEASE_NAME,
+ dry_run,
+ ),
+ ANY, # __bool__ # FIXME: why is this in the call stack?
+ ANY, # __str__
+ mocker.call.helm_upgrade_install(
+ STREAMS_APP_CLEAN_RELEASE_NAME,
+ "bakdata-streams-bootstrap/streams-app-cleanup-job",
+ dry_run,
+ "test-namespace",
+ {
+ "nameOverride": STREAMS_APP_FULL_NAME,
+ "streams": {
+ "brokers": "fake-broker:9092",
+ "outputTopic": "${output_topic_name}",
+ "deleteOutput": False,
+ },
},
- },
- HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True),
- ),
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME,
- dry_run,
- ),
- ]
+ HelmUpgradeInstallFlags(
+ version="2.9.0", wait=True, wait_for_jobs=True
+ ),
+ ),
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ STREAMS_APP_CLEAN_RELEASE_NAME,
+ dry_run,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ ]
+ )
def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up(
self,
@@ -406,9 +426,11 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up(
mocker: MockerFixture,
):
mock_helm_upgrade_install = mocker.patch.object(
- streams_app.helm, "upgrade_install"
+ streams_app._cleaner.helm, "upgrade_install"
+ )
+ mock_helm_uninstall = mocker.patch.object(
+ streams_app._cleaner.helm, "uninstall"
)
- mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall")
mock = mocker.MagicMock()
mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install")
@@ -417,29 +439,38 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up(
dry_run = False
streams_app.clean(dry_run=dry_run)
- assert mock.mock_calls == [
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME,
- dry_run,
- ),
- mocker.call.helm_upgrade_install(
- "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME,
- "bakdata-streams-bootstrap/streams-app-cleanup-job",
- dry_run,
- "test-namespace",
- {
- "streams": {
- "brokers": "fake-broker:9092",
- "outputTopic": "${output_topic_name}",
- "deleteOutput": True,
+ mock.assert_has_calls(
+ [
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ STREAMS_APP_CLEAN_RELEASE_NAME,
+ dry_run,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ mocker.call.helm_upgrade_install(
+ STREAMS_APP_CLEAN_RELEASE_NAME,
+ "bakdata-streams-bootstrap/streams-app-cleanup-job",
+ dry_run,
+ "test-namespace",
+ {
+ "nameOverride": STREAMS_APP_FULL_NAME,
+ "streams": {
+ "brokers": "fake-broker:9092",
+ "outputTopic": "${output_topic_name}",
+ "deleteOutput": True,
+ },
},
- },
- HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True),
- ),
- mocker.call.helm_uninstall(
- "test-namespace",
- "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME,
- dry_run,
- ),
- ]
+ HelmUpgradeInstallFlags(
+ version="2.9.0", wait=True, wait_for_jobs=True
+ ),
+ ),
+ mocker.call.helm_uninstall(
+ "test-namespace",
+ STREAMS_APP_CLEAN_RELEASE_NAME,
+ dry_run,
+ ),
+ ANY, # __bool__
+ ANY, # __str__
+ ]
+ )
diff --git a/tests/components/test_kafka_app.py b/tests/components/test_streams_bootstrap.py
similarity index 64%
rename from tests/components/test_kafka_app.py
rename to tests/components/test_streams_bootstrap.py
index 8fd0d98ec..127485e30 100644
--- a/tests/components/test_kafka_app.py
+++ b/tests/components/test_streams_bootstrap.py
@@ -4,24 +4,24 @@
import pytest
from pytest_mock import MockerFixture
-from kpops.cli.pipeline_config import PipelineConfig
from kpops.component_handlers import ComponentHandlers
from kpops.component_handlers.helm_wrapper.model import (
HelmDiffConfig,
HelmRepoConfig,
HelmUpgradeInstallFlags,
)
-from kpops.components.base_components import KafkaApp
+from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name
+from kpops.components.streams_bootstrap import StreamsBootstrap
+from kpops.config import KpopsConfig
DEFAULTS_PATH = Path(__file__).parent / "resources"
-class TestKafkaApp:
+class TestStreamsBootstrap:
@pytest.fixture()
- def config(self) -> PipelineConfig:
- return PipelineConfig(
+ def config(self) -> KpopsConfig:
+ return KpopsConfig(
defaults_path=DEFAULTS_PATH,
- environment="development",
helm_diff_config=HelmDiffConfig(),
)
@@ -33,37 +33,30 @@ def handlers(self) -> ComponentHandlers:
topic_handler=MagicMock(),
)
- def test_default_configs(self, config: PipelineConfig, handlers: ComponentHandlers):
- kafka_app = KafkaApp(
+ def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers):
+ streams_bootstrap = StreamsBootstrap(
name="example-name",
config=config,
handlers=handlers,
**{
"namespace": "test-namespace",
- "app": {
- "streams": {
- "outputTopic": "test",
- "brokers": "fake-broker:9092",
- },
- },
+ "app": {},
},
)
- assert kafka_app.app.streams.brokers == "fake-broker:9092"
-
- assert kafka_app.repo_config == HelmRepoConfig(
+ assert streams_bootstrap.repo_config == HelmRepoConfig(
repository_name="bakdata-streams-bootstrap",
url="https://bakdata.github.io/streams-bootstrap/",
)
- assert kafka_app.version == "2.9.0"
- assert kafka_app.namespace == "test-namespace"
+ assert streams_bootstrap.version == "2.9.0"
+ assert streams_bootstrap.namespace == "test-namespace"
- def test_should_deploy_kafka_app(
+ def test_should_deploy_streams_bootstrap_app(
self,
- config: PipelineConfig,
+ config: KpopsConfig,
handlers: ComponentHandlers,
mocker: MockerFixture,
):
- kafka_app = KafkaApp(
+ streams_bootstrap = StreamsBootstrap(
name="example-name",
config=config,
handlers=handlers,
@@ -78,26 +71,29 @@ def test_should_deploy_kafka_app(
"version": "1.2.3",
},
)
- helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install")
+ helm_upgrade_install = mocker.patch.object(
+ streams_bootstrap.helm, "upgrade_install"
+ )
print_helm_diff = mocker.patch.object(
- kafka_app.dry_run_handler, "print_helm_diff"
+ streams_bootstrap.dry_run_handler, "print_helm_diff"
)
mocker.patch.object(
- KafkaApp,
+ StreamsBootstrap,
"helm_chart",
return_value="test/test-chart",
new_callable=mocker.PropertyMock,
)
- kafka_app.deploy(dry_run=True)
+ streams_bootstrap.deploy(dry_run=True)
print_helm_diff.assert_called_once()
helm_upgrade_install.assert_called_once_with(
- "${pipeline_name}-example-name",
+ create_helm_release_name("${pipeline.name}-example-name"),
"test/test-chart",
True,
"test-namespace",
{
+ "nameOverride": "${pipeline.name}-example-name",
"streams": {"brokers": "fake-broker:9092", "outputTopic": "test"},
},
HelmUpgradeInstallFlags(version="1.2.3"),
diff --git a/tests/conftest.py b/tests/conftest.py
index cb88c2294..479672e86 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -4,6 +4,8 @@
import pytest
+from kpops.utils.yaml import load_yaml_file
+
@pytest.fixture()
def mock_env() -> Iterator[os._Environ[str]]:
@@ -14,3 +16,9 @@ def mock_env() -> Iterator[os._Environ[str]]:
"""
with mock.patch.dict(os.environ, clear=True):
yield os.environ
+
+
+@pytest.fixture()
+def load_yaml_file_clear_cache() -> Iterator[None]:
+ yield
+ load_yaml_file.cache.clear()
diff --git a/tests/defaults.yaml b/tests/defaults.yaml
new file mode 100644
index 000000000..09fd863b3
--- /dev/null
+++ b/tests/defaults.yaml
@@ -0,0 +1,2 @@
+streams-app:
+ namespace: "namespace"
diff --git a/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml b/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml
index e01434ceb..17eba50a2 100644
--- a/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml
+++ b/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml
@@ -1,6 +1,6 @@
- type: converter
app:
labels:
- l_1: ${component_app_labels_l_2}
- l_2: ${component_app_labels_l_1}
- infinite_nesting: ${component_app_labels}
+ l_1: ${component.app.labels.l_2}
+ l_2: ${component.app.labels.l_1}
+ infinite_nesting: ${component.app.labels}
diff --git a/tests/pipeline/resources/component-type-substitution/pipeline.yaml b/tests/pipeline/resources/component-type-substitution/pipeline.yaml
index 16e042586..8a4cf60ea 100644
--- a/tests/pipeline/resources/component-type-substitution/pipeline.yaml
+++ b/tests/pipeline/resources/component-type-substitution/pipeline.yaml
@@ -1,9 +1,9 @@
- type: scheduled-producer
app:
labels:
- app_type: "${component_type}"
- app_name: "${component_name}"
- app_schedule: "${component_app_schedule}"
+ app_type: "${component.type}"
+ app_name: "${component.name}"
+ app_schedule: "${component.app.schedule}"
commandLine:
FAKE_ARG: "fake-arg-value"
schedule: "30 3/8 * * *"
@@ -20,11 +20,11 @@
name: "filter-app"
app:
labels:
- app_type: "${component_type}"
- app_name: "${component_name}"
- app_resources_requests_memory: "${component_app_resources_requests_memory}"
- ${component_type}: "${component_app_labels_app_name}-${component_app_labels_app_type}"
- test_placeholder_in_placeholder: "${component_app_labels_${component_type}}"
+ app_type: "${component.type}"
+ app_name: "${component.name}"
+ app_resources_requests_memory: "${component.app.resources.requests.memory}"
+ ${component.type}: "${component.app.labels.app_name}-${component.app.labels.app_type}"
+ test_placeholder_in_placeholder: "${component.app.labels.${component.type}}"
commandLine:
TYPE: "nothing"
resources:
diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml
index 2707ee0fa..aa9f84287 100644
--- a/tests/pipeline/resources/custom-config/config.yaml
+++ b/tests/pipeline/resources/custom-config/config.yaml
@@ -1,11 +1,15 @@
-environment: development
defaults_path: ../no-topics-defaults
topic_name_config:
- default_error_topic_name: "${component_name}-dead-letter-topic"
- default_output_topic_name: "${component_name}-test-topic"
-brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
-kafka_connect_host: "http://localhost:8083"
-kafka_rest_host: "http://localhost:8082"
-schema_registry_url: "http://localhost:8081"
+ default_error_topic_name: "${component.name}-dead-letter-topic"
+ default_output_topic_name: "${component.name}-test-topic"
+kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+kafka_connect:
+ url: "http://localhost:8083"
+kafka_rest:
+ url: "http://localhost:8082"
+schema_registry:
+ enabled: true
+ url: "http://localhost:8081"
helm_config:
api_version: "2.1.1"
+pipeline_base_dir: tests/pipeline
diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml
index c4e2aa259..810e6b5f8 100644
--- a/tests/pipeline/resources/defaults.yaml
+++ b/tests/pipeline/resources/defaults.yaml
@@ -1,12 +1,14 @@
+pipeline-component:
+ name: ${component.type}
+
kubernetes-app:
- name: "${component_type}"
namespace: example-namespace
kafka-app:
app:
streams:
- brokers: "${brokers}"
- schema_registry_url: "${schema_registry_url}"
+ brokers: ${config.kafka_brokers}
+ schema_registry_url: ${config.schema_registry.url}
version: "2.4.2"
producer-app: {} # inherits from kafka-app
@@ -49,7 +51,7 @@ converter:
enabled: true
consumerGroup: converter-${output_topic_name}
maxReplicas: 1
- lagThreshold: "10000"
+ lagThreshold: 10000
to:
topics:
${output_topic_name}:
@@ -72,7 +74,7 @@ filter:
autoscaling:
enabled: true
maxReplicas: 1
- lagThreshold: "10000"
+ lagThreshold: 10000
consumerGroup: filter-${output_topic_name}
topics:
- "${output_topic_name}"
@@ -91,7 +93,7 @@ should-inflate:
autoscaling:
enabled: true
maxReplicas: 1
- lagThreshold: "10000"
+ lagThreshold: 10000
consumerGroup: filter-${output_topic_name}
topics:
- "${output_topic_name}"
@@ -103,9 +105,7 @@ should-inflate:
configs:
retention.ms: "-1"
-kafka-connector:
- name: "sink-connector"
- namespace: "example-namespace"
+kafka-sink-connector:
app:
batch.size: "2000"
behavior.on.malformed.documents: "warn"
diff --git a/tests/pipeline/resources/dotenv/.env b/tests/pipeline/resources/dotenv/.env
new file mode 100644
index 000000000..9829b1218
--- /dev/null
+++ b/tests/pipeline/resources/dotenv/.env
@@ -0,0 +1,3 @@
+KPOPS_environment="default"
+KPOPS_schema_registry__enabled="true"
+KPOPS_schema_registry__url="http://localhost:8081"
diff --git a/tests/pipeline/resources/dotenv/config.yaml b/tests/pipeline/resources/dotenv/config.yaml
new file mode 100644
index 000000000..3abfdffd4
--- /dev/null
+++ b/tests/pipeline/resources/dotenv/config.yaml
@@ -0,0 +1,8 @@
+defaults_path: ../defaults.yaml
+kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+kafka_connect:
+ url: "http://localhost:8083"
+kafka_rest:
+ url: "http://localhost:8082"
+helm_config:
+ api_version: "2.1.1"
diff --git a/tests/pipeline/resources/dotenv/custom.env b/tests/pipeline/resources/dotenv/custom.env
new file mode 100644
index 000000000..3e5371e98
--- /dev/null
+++ b/tests/pipeline/resources/dotenv/custom.env
@@ -0,0 +1,3 @@
+KPOPS_environment="custom"
+KPOPS_schema_registry__enabled="false"
+KPOPS_schema_registry__url="http://notlocalhost:8081"
diff --git a/tests/pipeline/resources/env-specific-config-only/config_production.yaml b/tests/pipeline/resources/env-specific-config-only/config_production.yaml
new file mode 100644
index 000000000..2e40128d4
--- /dev/null
+++ b/tests/pipeline/resources/env-specific-config-only/config_production.yaml
@@ -0,0 +1,11 @@
+defaults_path: ../no-topics-defaults
+kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+kafka_connect:
+ url: "http://localhost:8083"
+kafka_rest:
+ url: "http://localhost:8082"
+schema_registry:
+ enabled: true
+ url: "http://localhost:8081"
+helm_config:
+ api_version: "2.1.1"
diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml
index 6b7c754ab..572b695c7 100644
--- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml
+++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml
@@ -1,10 +1,11 @@
-environment: development
defaults_path: ..
-brokers: "broker:9092"
+kafka_brokers: "broker:9092"
topic_name_config:
- default_error_topic_name: ${component_type}-error-topic
- default_output_topic_name: ${component_type}-output-topic
+ default_error_topic_name: ${component.type}-error-topic
+ default_output_topic_name: ${component.type}-output-topic
helm_diff_config:
enable: false
-kafka_connect_host: "kafka_connect_host:8083"
-kafka_rest_host: "kafka_rest_host:8082"
+kafka_connect:
+ url: "http://kafka_connect_url:8083"
+kafka_rest:
+ url: "http://kafka_rest_url:8082"
diff --git a/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml b/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml
index 02a28015a..fc012737a 100644
--- a/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml
+++ b/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml
@@ -12,7 +12,6 @@
type: output
- type: kafka-sink-connector
- namespace: example-namespace
name: es-sink-connector
app:
connector.class: io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
diff --git a/tests/pipeline/resources/multi-config/config.yaml b/tests/pipeline/resources/multi-config/config.yaml
new file mode 100644
index 000000000..2e40128d4
--- /dev/null
+++ b/tests/pipeline/resources/multi-config/config.yaml
@@ -0,0 +1,11 @@
+defaults_path: ../no-topics-defaults
+kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092"
+kafka_connect:
+ url: "http://localhost:8083"
+kafka_rest:
+ url: "http://localhost:8082"
+schema_registry:
+ enabled: true
+ url: "http://localhost:8081"
+helm_config:
+ api_version: "2.1.1"
diff --git a/tests/pipeline/resources/multi-config/config_development.yaml b/tests/pipeline/resources/multi-config/config_development.yaml
new file mode 100644
index 000000000..faa196075
--- /dev/null
+++ b/tests/pipeline/resources/multi-config/config_development.yaml
@@ -0,0 +1,3 @@
+schema_registry:
+ enabled: true
+ url: "http://development:8081"
diff --git a/tests/pipeline/resources/multi-config/config_production.yaml b/tests/pipeline/resources/multi-config/config_production.yaml
new file mode 100644
index 000000000..3d2b6cc98
--- /dev/null
+++ b/tests/pipeline/resources/multi-config/config_production.yaml
@@ -0,0 +1,3 @@
+schema_registry:
+ enabled: true
+ url: "http://production:8081"
diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml
index 47de626e6..7820898a3 100644
--- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml
+++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml
@@ -1,8 +1,8 @@
kafka-app:
app:
streams:
- brokers: "${brokers}"
- schemaRegistryUrl: "${schema_registry_url}"
+ brokers: "${config.kafka_brokers}"
+ schemaRegistryUrl: "${config.schema_registry.url}"
producer-app:
to:
@@ -14,7 +14,7 @@ producer-app:
streams-app:
app:
labels:
- pipeline: ${pipeline_name}
+ pipeline: ${pipeline.name}
to:
topics:
${error_topic_name}:
diff --git a/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml b/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml
index 035691c2e..b6a05220f 100644
--- a/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml
+++ b/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml
@@ -1,3 +1,3 @@
kubernetes-app:
- name: "${component_type}-development"
+ name: "${component.type}-development"
namespace: development-namespace
diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml
index dfbe23db9..ff053e990 100644
--- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml
+++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml
@@ -7,5 +7,5 @@ kubernetes-app:
kafka-app:
app:
streams:
- brokers: ${brokers}
- schemaRegistryUrl: ${schema_registry_url}
+ brokers: ${config.kafka_brokers}
+ schemaRegistryUrl: ${config.schema_registry.url}
diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml
index 8faa86ad5..b8aeb6137 100644
--- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml
+++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml
@@ -1,11 +1,11 @@
kubernetes-app:
- name: ${component_type}
+ name: ${component.type}
namespace: example-namespace
kafka-app:
app:
streams:
- brokers: "${brokers}"
- schemaRegistryUrl: "${schema_registry_url}"
+ brokers: "${config.kafka_brokers}"
+ schemaRegistryUrl: "${config.schema_registry.url}"
producer-app: {} # inherits from kafka-app
diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml
index c7b863a92..80987e36e 100644
--- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml
+++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml
@@ -1,3 +1,3 @@
kubernetes-app:
- name: ${component_type}-development
+ name: ${component.type}-development
namespace: development-namespace
diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml
index 00b3b2673..cf3b4831b 100644
--- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml
+++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml
@@ -5,7 +5,7 @@ kubernetes-app:
kafka-app:
app:
streams:
- brokers: "${broker}"
+ brokers: "${config.kafka_brokers}"
schema_registry_url: "${schema_registry_url}"
version: "2.4.2"
diff --git a/tests/pipeline/resources/read-from-component/pipeline.yaml b/tests/pipeline/resources/read-from-component/pipeline.yaml
index 902e8edd9..cc6bf72c7 100644
--- a/tests/pipeline/resources/read-from-component/pipeline.yaml
+++ b/tests/pipeline/resources/read-from-component/pipeline.yaml
@@ -44,7 +44,7 @@
name: consumer3
from:
topics:
- ${pipeline_name}-producer1:
+ ${pipeline.name}-producer1:
type: input
components:
producer2:
diff --git a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml
new file mode 100644
index 000000000..c895105b7
--- /dev/null
+++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml
@@ -0,0 +1,23 @@
+kubernetes-app:
+ namespace: example-namespace
+
+kafka-app:
+ app:
+ streams:
+ brokers: "${config.kafka_brokers}"
+ schema_registry_url: "${schema_registry_url}"
+ version: "2.4.2"
+
+streams-app: # inherits from kafka-app
+ app:
+ streams:
+ config:
+ large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator
+ to:
+ topics:
+ ${error_topic_name}:
+ type: error
+ value_schema: com.bakdata.kafka.DeadLetter
+ partitions_count: 1
+ configs:
+ cleanup.policy: compact,delete
diff --git a/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml
new file mode 100644
index 000000000..d61d6c9ba
--- /dev/null
+++ b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml
@@ -0,0 +1,6 @@
+- type: streams-app
+ name: in-order-to-have-len-fifty-two-name-should-end--here
+ app:
+ streams:
+ config:
+ max.poll.records: 100
diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py
index 95d63ab70..77ba66496 100644
--- a/tests/pipeline/snapshots/snap_test_example.py
+++ b/tests/pipeline/snapshots/snap_test_example.py
@@ -7,333 +7,311 @@
snapshots = Snapshot()
-snapshots['TestExample.test_atm_fraud atm-fraud-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'debug': True,
- 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer',
- 'imageTag': '1.0.0',
- 'nameOverride': 'account-producer',
- 'prometheus': {
- 'jmx': {
- 'enabled': False
- }
- },
- 'replicaCount': 1,
- 'schedule': '0 12 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'optimizeLeaveGroupBehavior': False,
- 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic',
- 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
- },
- 'suspend': True
+snapshots['TestExample.test_atm_fraud atm-fraud-pipeline'] = [
+ {
+ 'app': {
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer',
+ 'imageTag': '1.0.0',
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
+ }
},
- 'name': 'account-producer',
- 'namespace': '${NAMESPACE}',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
+ 'replicaCount': 1,
+ 'schedule': '0 12 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
},
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ 'optimizeLeaveGroupBehavior': False,
+ 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic',
+ 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/'
},
- 'to': {
- 'models': {
- },
- 'topics': {
- 'bakdata-atm-fraud-detection-account-producer-topic': {
- 'configs': {
- },
- 'partitions_count': 3
- }
- }
+ 'suspend': True
+ },
+ 'name': 'account-producer',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
},
- 'type': 'producer-app',
- 'version': '2.9.0'
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
},
- {
- 'app': {
- 'commandLine': {
- 'ITERATION': 20,
- 'REAL_TX': 19
- },
- 'debug': True,
- 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer',
- 'imageTag': '1.0.0',
- 'nameOverride': 'transaction-avro-producer',
- 'prometheus': {
- 'jmx': {
- 'enabled': False
- }
- },
- 'replicaCount': 1,
- 'schedule': '0 12 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092',
- 'extraOutputTopics': {
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'bakdata-atm-fraud-detection-account-producer-topic': {
+ 'configs': {
},
- 'optimizeLeaveGroupBehavior': False,
- 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic',
- 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
- },
- 'suspend': True
+ 'partitions_count': 3
+ }
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'commandLine': {
+ 'ITERATION': 20,
+ 'REAL_TX': 19
},
- 'name': 'transaction-avro-producer',
- 'namespace': '${NAMESPACE}',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer',
+ 'imageTag': '1.0.0',
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
+ }
},
- 'to': {
- 'models': {
+ 'replicaCount': 1,
+ 'schedule': '0 12 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
},
- 'topics': {
- 'bakdata-atm-fraud-detection-transaction-avro-producer-topic': {
- 'configs': {
- },
- 'partitions_count': 3
- }
- }
+ 'optimizeLeaveGroupBehavior': False,
+ 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic',
+ 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/'
},
- 'type': 'producer-app',
- 'version': '2.9.0'
+ 'suspend': True
},
- {
- 'app': {
- 'annotations': {
- 'consumerGroup': 'atm-transactionjoiner-atm-fraud-joinedtransactions-topic'
- },
- 'commandLine': {
- 'PRODUCTIVE': False
- },
- 'debug': True,
- 'image': '${DOCKER_REGISTRY}/atm-demo-transactionjoiner',
- 'imageTag': '1.0.0',
- 'labels': {
- 'pipeline': 'bakdata-atm-fraud-detection'
- },
- 'nameOverride': 'transaction-joiner',
- 'prometheus': {
- 'jmx': {
- 'enabled': False
- }
- },
- 'replicaCount': 1,
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092',
- 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic',
- 'inputTopics': [
- 'bakdata-atm-fraud-detection-transaction-avro-producer-topic'
- ],
- 'optimizeLeaveGroupBehavior': False,
- 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic',
- 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
- }
+ 'name': 'transaction-avro-producer',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
},
- 'name': 'transaction-joiner',
- 'namespace': '${NAMESPACE}',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'to': {
- 'models': {
- },
- 'topics': {
- 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic': {
- 'configs': {
- },
- 'partitions_count': 1,
- 'type': 'error'
+ 'topics': {
+ 'bakdata-atm-fraud-detection-transaction-avro-producer-topic': {
+ 'configs': {
},
- 'bakdata-atm-fraud-detection-transaction-joiner-topic': {
- 'configs': {
- },
- 'partitions_count': 3
- }
+ 'partitions_count': 3
}
- },
- 'type': 'streams-app',
- 'version': '2.9.0'
+ }
},
- {
- 'app': {
- 'annotations': {
- 'consumerGroup': 'atm-frauddetector-atm-fraud-possiblefraudtransactions-topic'
- },
- 'commandLine': {
- 'PRODUCTIVE': False
- },
- 'debug': True,
- 'image': '${DOCKER_REGISTRY}/atm-demo-frauddetector',
- 'imageTag': '1.0.0',
- 'labels': {
- 'pipeline': 'bakdata-atm-fraud-detection'
- },
- 'nameOverride': 'fraud-detector',
- 'prometheus': {
- 'jmx': {
- 'enabled': False
- }
- },
- 'replicaCount': 1,
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092',
- 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic',
- 'inputTopics': [
- 'bakdata-atm-fraud-detection-transaction-joiner-topic'
- ],
- 'optimizeLeaveGroupBehavior': False,
- 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic',
- 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'annotations': {
+ 'consumerGroup': 'atm-transactionjoiner-atm-fraud-joinedtransactions-topic'
+ },
+ 'commandLine': {
+ 'PRODUCTIVE': False
+ },
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-transactionjoiner',
+ 'imageTag': '1.0.0',
+ 'labels': {
+ 'pipeline': 'bakdata-atm-fraud-detection'
+ },
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
}
},
- 'name': 'fraud-detector',
- 'namespace': '${NAMESPACE}',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ 'replicaCount': 1,
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic',
+ 'inputTopics': [
+ 'bakdata-atm-fraud-detection-transaction-avro-producer-topic'
+ ],
+ 'optimizeLeaveGroupBehavior': False,
+ 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic',
+ 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/'
+ }
+ },
+ 'name': 'transaction-joiner',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'to': {
- 'models': {
+ 'topics': {
+ 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
},
- 'topics': {
- 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic': {
- 'configs': {
- },
- 'partitions_count': 1,
- 'type': 'error'
+ 'bakdata-atm-fraud-detection-transaction-joiner-topic': {
+ 'configs': {
},
- 'bakdata-atm-fraud-detection-fraud-detector-topic': {
- 'configs': {
- },
- 'partitions_count': 3
- }
+ 'partitions_count': 3
}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'annotations': {
+ 'consumerGroup': 'atm-frauddetector-atm-fraud-possiblefraudtransactions-topic'
+ },
+ 'commandLine': {
+ 'PRODUCTIVE': False
},
- 'type': 'streams-app',
- 'version': '2.9.0'
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-frauddetector',
+ 'imageTag': '1.0.0',
+ 'labels': {
+ 'pipeline': 'bakdata-atm-fraud-detection'
+ },
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
+ }
+ },
+ 'replicaCount': 1,
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic',
+ 'inputTopics': [
+ 'bakdata-atm-fraud-detection-transaction-joiner-topic'
+ ],
+ 'optimizeLeaveGroupBehavior': False,
+ 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic',
+ 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/'
+ }
},
- {
- 'app': {
- 'annotations': {
- 'consumerGroup': 'atm-accountlinker-atm-fraud-output-topic'
- },
- 'commandLine': {
- 'PRODUCTIVE': False
- },
- 'debug': True,
- 'image': '${DOCKER_REGISTRY}/atm-demo-accountlinker',
- 'imageTag': '1.0.0',
- 'labels': {
- 'pipeline': 'bakdata-atm-fraud-detection'
- },
- 'nameOverride': 'account-linker',
- 'prometheus': {
- 'jmx': {
- 'enabled': False
- }
+ 'name': 'fraud-detector',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
},
- 'replicaCount': 1,
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092',
- 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic',
- 'extraInputTopics': {
- 'accounts': [
- 'bakdata-atm-fraud-detection-account-producer-topic'
- ]
+ 'bakdata-atm-fraud-detection-fraud-detector-topic': {
+ 'configs': {
},
- 'inputTopics': [
- 'bakdata-atm-fraud-detection-fraud-detector-topic'
- ],
- 'optimizeLeaveGroupBehavior': False,
- 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic',
- 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
+ 'partitions_count': 3
}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'annotations': {
+ 'consumerGroup': 'atm-accountlinker-atm-fraud-output-topic'
},
- 'from': {
- 'components': {
- 'account-producer': {
- 'role': 'accounts'
- },
- 'fraud-detector': {
- 'type': 'input'
- }
- },
- 'topics': {
+ 'commandLine': {
+ 'PRODUCTIVE': False
+ },
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-accountlinker',
+ 'imageTag': '1.0.0',
+ 'labels': {
+ 'pipeline': 'bakdata-atm-fraud-detection'
+ },
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
}
},
- 'name': 'account-linker',
- 'namespace': '${NAMESPACE}',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
+ 'replicaCount': 1,
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic',
+ 'extraInputTopics': {
+ 'accounts': [
+ 'bakdata-atm-fraud-detection-account-producer-topic'
+ ]
},
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ 'inputTopics': [
+ 'bakdata-atm-fraud-detection-fraud-detector-topic'
+ ],
+ 'optimizeLeaveGroupBehavior': False,
+ 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic',
+ 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'account-producer': {
+ 'role': 'accounts'
+ },
+ 'fraud-detector': {
+ 'type': 'input'
+ }
},
- 'to': {
- 'models': {
+ 'topics': {
+ }
+ },
+ 'name': 'account-linker',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
},
- 'topics': {
- 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic': {
- 'configs': {
- },
- 'partitions_count': 1,
- 'type': 'error'
+ 'bakdata-atm-fraud-detection-account-linker-topic': {
+ 'configs': {
},
- 'bakdata-atm-fraud-detection-account-linker-topic': {
- 'configs': {
- },
- 'partitions_count': 3
- }
+ 'partitions_count': 3
}
- },
- 'type': 'streams-app',
- 'version': '2.9.0'
+ }
},
- {
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ },
+ {
+ '_resetter': {
'app': {
- 'auto.create': True,
- 'connection.ds.pool.size': 5,
- 'connection.password': 'AppPassword',
- 'connection.url': 'jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db',
- 'connection.user': 'app1',
- 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector',
- 'errors.deadletterqueue.context.headers.enable': True,
- 'errors.deadletterqueue.topic.name': 'postgres-request-sink-dead-letters',
- 'errors.deadletterqueue.topic.replication.factor': 1,
- 'errors.tolerance': 'all',
- 'insert.mode': 'insert',
- 'insert.mode.databaselevel': True,
- 'key.converter': 'org.apache.kafka.connect.storage.StringConverter',
- 'name': 'postgresql-connector',
- 'pk.mode': 'record_value',
- 'table.name.format': 'fraud_transactions',
- 'tasks.max': 1,
- 'topics': 'bakdata-atm-fraud-detection-account-linker-topic',
- 'transforms': 'flatten',
- 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value',
- 'value.converter': 'io.confluent.connect.avro.AvroConverter',
- 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
+ 'config': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'connector': 'postgresql-connector'
+ },
+ 'connectorType': 'sink'
},
'name': 'postgresql-connector',
'namespace': '${NAMESPACE}',
@@ -345,10 +323,38 @@
'repository_name': 'bakdata-kafka-connect-resetter',
'url': 'https://bakdata.github.io/kafka-connect-resetter/'
},
- 'resetter_values': {
- },
- 'type': 'kafka-sink-connector',
+ 'suffix': '-clean',
+ 'type': 'kafka-connector-resetter',
'version': '1.0.4'
- }
- ]
-}
+ },
+ 'app': {
+ 'auto.create': True,
+ 'connection.ds.pool.size': 5,
+ 'connection.password': 'AppPassword',
+ 'connection.url': 'jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db',
+ 'connection.user': 'app1',
+ 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector',
+ 'errors.deadletterqueue.context.headers.enable': True,
+ 'errors.deadletterqueue.topic.name': 'postgres-request-sink-dead-letters',
+ 'errors.deadletterqueue.topic.replication.factor': 1,
+ 'errors.tolerance': 'all',
+ 'insert.mode': 'insert',
+ 'insert.mode.databaselevel': True,
+ 'key.converter': 'org.apache.kafka.connect.storage.StringConverter',
+ 'name': 'postgresql-connector',
+ 'pk.mode': 'record_value',
+ 'table.name.format': 'fraud_transactions',
+ 'tasks.max': 1,
+ 'topics': 'bakdata-atm-fraud-detection-account-linker-topic',
+ 'transforms': 'flatten',
+ 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value',
+ 'value.converter': 'io.confluent.connect.avro.AvroConverter',
+ 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081'
+ },
+ 'name': 'postgresql-connector',
+ 'prefix': '',
+ 'resetter_values': {
+ },
+ 'type': 'kafka-sink-connector'
+ }
+]
diff --git a/tests/pipeline/snapshots/snap_test_generate.py b/tests/pipeline/snapshots/snap_test_generate.py
new file mode 100644
index 000000000..436d7e9a2
--- /dev/null
+++ b/tests/pipeline/snapshots/snap_test_generate.py
@@ -0,0 +1,2357 @@
+# -*- coding: utf-8 -*-
+# snapshottest: v1 - https://goo.gl/zC4yUc
+from __future__ import unicode_literals
+
+from snapshottest import Snapshot
+
+
+snapshots = Snapshot()
+
+snapshots['TestGenerate.test_default_config test-pipeline'] = [
+ {
+ 'app': {
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-custom-config-app1',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-custom-config-app1': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'image': 'some-image',
+ 'labels': {
+ 'pipeline': 'resources-custom-config'
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'resources-custom-config-app2-error',
+ 'inputTopics': [
+ 'resources-custom-config-app1'
+ ],
+ 'outputTopic': 'resources-custom-config-app2',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-custom-config-app2': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ },
+ 'resources-custom-config-app2-error': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ }
+]
+
+snapshots['TestGenerate.test_inflate_pipeline test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'fake-arg-value'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'schedule': '30 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'scheduled-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
+ },
+ 'topics': {
+ 'resources-pipeline-with-inflate-scheduled-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
+ }
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-inflate-converter-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-inflate-scheduled-producer'
+ ],
+ 'outputTopic': 'resources-pipeline-with-inflate-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-pipeline-with-inflate-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-inflate-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-pipeline-with-inflate-should-inflate'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-inflate-converter'
+ ],
+ 'outputTopic': 'resources-pipeline-with-inflate-should-inflate',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'should-inflate',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-pipeline-with-inflate-should-inflate': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-inflate-should-inflate-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'should-inflate',
+ 'version': '2.4.2'
+ },
+ {
+ '_resetter': {
+ 'app': {
+ 'config': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'connector': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector'
+ },
+ 'connectorType': 'sink'
+ },
+ 'name': 'should-inflate-inflated-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'suffix': '-clean',
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'should-inflate-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ }
+ }
+ },
+ 'type': 'kafka-connector-resetter',
+ 'version': '1.0.4'
+ },
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'resources-pipeline-with-inflate-should-inflate',
+ 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1'
+ },
+ 'name': 'should-inflate-inflated-sink-connector',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'resetter_values': {
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'should-inflate-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ }
+ }
+ },
+ 'type': 'kafka-sink-connector'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error',
+ 'inputTopics': [
+ 'kafka-sink-connector'
+ ],
+ 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'should-inflate-inflated-streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ },
+ 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_kafka_connect_sink_weave_from_topics test-pipeline'] = [
+ {
+ 'app': {
+ 'image': 'fake-image',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-kafka-connect-sink-streams-app-error',
+ 'inputTopics': [
+ 'example-topic'
+ ],
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ },
+ 'topics': {
+ 'example-topic': {
+ 'type': 'input'
+ }
+ }
+ },
+ 'name': 'streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-kafka-connect-sink-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ '_resetter': {
+ 'app': {
+ 'config': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'connector': 'resources-kafka-connect-sink-es-sink-connector'
+ },
+ 'connectorType': 'sink'
+ },
+ 'name': 'es-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'suffix': '-clean',
+ 'type': 'kafka-connector-resetter',
+ 'version': '1.0.4'
+ },
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-kafka-connect-sink-es-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'example-output'
+ },
+ 'name': 'es-sink-connector',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'resetter_values': {
+ },
+ 'type': 'kafka-sink-connector'
+ }
+]
+
+snapshots['TestGenerate.test_load_pipeline test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'fake-arg-value'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'schedule': '30 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-first-pipeline-scheduled-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'scheduled-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-first-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
+ },
+ 'topics': {
+ 'resources-first-pipeline-scheduled-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
+ }
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-first-pipeline-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-first-pipeline-converter-error',
+ 'inputTopics': [
+ 'resources-first-pipeline-scheduled-producer'
+ ],
+ 'outputTopic': 'resources-first-pipeline-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-first-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-first-pipeline-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-first-pipeline-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error',
+ 'inputTopics': [
+ 'resources-first-pipeline-converter'
+ ],
+ 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-first-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'filter',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_model_serialization test-pipeline'] = [
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'test',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'out',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'account-producer',
+ 'namespace': 'test',
+ 'prefix': 'resources-pipeline-with-paths-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'ca_file': 'my-cert.cert',
+ 'insecure_skip_tls_verify': False,
+ 'password': '$CI_JOB_TOKEN',
+ 'username': 'masked'
+ },
+ 'repository_name': 'masked',
+ 'url': 'masked'
+ },
+ 'type': 'producer-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_no_input_topic test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-no-input-topic-pipeline-app1-error',
+ 'inputPattern': '.*',
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ },
+ 'topics': {
+ '.*': {
+ 'type': 'pattern'
+ }
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-no-input-topic-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-no-input-topic-pipeline-app1-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-no-input-topic-pipeline-app2-error',
+ 'extraOutputTopics': {
+ 'extra': 'example-output-extra',
+ 'test-output': 'test-output-extra'
+ },
+ 'inputTopics': [
+ 'example-output'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-no-input-topic-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'example-output-extra': {
+ 'configs': {
+ },
+ 'role': 'extra'
+ },
+ 'resources-no-input-topic-pipeline-app2-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ },
+ 'test-output-extra': {
+ 'configs': {
+ },
+ 'role': 'test-output'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_no_user_defined_components test-pipeline'] = [
+ {
+ 'app': {
+ 'image': 'fake-image',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-no-user-defined-components-streams-app-error',
+ 'inputTopics': [
+ 'example-topic'
+ ],
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ },
+ 'topics': {
+ 'example-topic': {
+ 'type': 'input'
+ }
+ }
+ },
+ 'name': 'streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-no-user-defined-components-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-no-user-defined-components-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_pipelines_with_env_values test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'override-arg'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'schedule': '20 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-pipeline-with-envs-input-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'input-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-envs-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
+ },
+ 'topics': {
+ 'resources-pipeline-with-envs-input-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
+ }
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-pipeline-with-envs-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-envs-converter-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-envs-input-producer'
+ ],
+ 'outputTopic': 'resources-pipeline-with-envs-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-envs-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-pipeline-with-envs-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-envs-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-pipeline-with-envs-filter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-pipeline-with-envs-filter'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-envs-filter-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-envs-converter'
+ ],
+ 'outputTopic': 'resources-pipeline-with-envs-filter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'filter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-envs-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-pipeline-with-envs-filter': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-envs-filter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'filter',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_prefix_pipeline_component test-pipeline'] = [
+ {
+ 'app': {
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer',
+ 'imageTag': '1.0.0',
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
+ }
+ },
+ 'replicaCount': 1,
+ 'schedule': '0 12 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ },
+ 'suspend': True
+ },
+ 'name': 'account-producer',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': 'from-pipeline-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ }
+]
+
+snapshots['TestGenerate.test_read_from_component test-pipeline'] = [
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-read-from-component-producer1',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'producer1',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-producer1': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-read-from-component-producer2',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'producer2',
+ 'namespace': 'example-namespace',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-producer2': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-read-from-component-inflate-step',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-read-from-component-inflate-step'
+ ]
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer2'
+ ],
+ 'outputTopic': 'resources-read-from-component-inflate-step',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-inflate-step': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'should-inflate',
+ 'version': '2.4.2'
+ },
+ {
+ '_resetter': {
+ 'app': {
+ 'config': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'connector': 'resources-read-from-component-inflate-step-inflated-sink-connector'
+ },
+ 'connectorType': 'sink'
+ },
+ 'name': 'inflate-step-inflated-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'suffix': '-clean',
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'inflate-step-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ },
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'kafka-connector-resetter',
+ 'version': '1.0.4'
+ },
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'resources-read-from-component-inflate-step',
+ 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1'
+ },
+ 'name': 'inflate-step-inflated-sink-connector',
+ 'prefix': 'resources-read-from-component-',
+ 'resetter_values': {
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'inflate-step-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ },
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'kafka-sink-connector'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error',
+ 'inputTopics': [
+ 'kafka-sink-connector'
+ ],
+ 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step-inflated-streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-inflated-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-read-from-component-inflate-step-without-prefix'
+ ]
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error',
+ 'inputTopics': [
+ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app'
+ ],
+ 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step-without-prefix',
+ 'namespace': 'example-namespace',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-inflate-step-without-prefix': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-without-prefix-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'should-inflate',
+ 'version': '2.4.2'
+ },
+ {
+ '_resetter': {
+ 'app': {
+ 'config': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'connector': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector'
+ },
+ 'connectorType': 'sink'
+ },
+ 'name': 'inflate-step-without-prefix-inflated-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'suffix': '-clean',
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'inflate-step-without-prefix-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ },
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'kafka-connector-resetter',
+ 'version': '1.0.4'
+ },
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'resources-read-from-component-inflate-step-without-prefix',
+ 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1'
+ },
+ 'name': 'inflate-step-without-prefix-inflated-sink-connector',
+ 'prefix': 'resources-read-from-component-',
+ 'resetter_values': {
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'inflate-step-without-prefix-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ },
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'kafka-sink-connector'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error',
+ 'inputTopics': [
+ 'kafka-sink-connector'
+ ],
+ 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step-without-prefix-inflated-streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer1-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer1'
+ ],
+ 'outputTopic': 'resources-read-from-component-consumer1',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'producer1': {
+ 'type': 'input'
+ }
+ },
+ 'topics': {
+ }
+ },
+ 'name': 'consumer1',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-consumer1': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-read-from-component-consumer1-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer2-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer1',
+ 'resources-read-from-component-consumer1'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'consumer1': {
+ 'type': 'input'
+ },
+ 'producer1': {
+ 'type': 'input'
+ }
+ },
+ 'topics': {
+ }
+ },
+ 'name': 'consumer2',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-consumer2-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer3-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer1',
+ 'resources-read-from-component-producer2'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'producer2': {
+ 'type': 'input'
+ }
+ },
+ 'topics': {
+ 'resources-read-from-component-producer1': {
+ 'type': 'input'
+ }
+ }
+ },
+ 'name': 'consumer3',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-consumer3-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer4-error',
+ 'inputTopics': [
+ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'inflate-step': {
+ 'type': 'input'
+ }
+ },
+ 'topics': {
+ }
+ },
+ 'name': 'consumer4',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-consumer4-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer5-error',
+ 'inputTopics': [
+ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'inflate-step-without-prefix': {
+ 'type': 'input'
+ }
+ },
+ 'topics': {
+ }
+ },
+ 'name': 'consumer5',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-read-from-component-consumer5-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_substitute_in_component test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'fake-arg-value'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'labels': {
+ 'app_name': 'scheduled-producer',
+ 'app_schedule': '30 3/8 * * *',
+ 'app_type': 'scheduled-producer'
+ },
+ 'schedule': '30 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-component-type-substitution-scheduled-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'scheduled-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-component-type-substitution-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
+ },
+ 'topics': {
+ 'resources-component-type-substitution-scheduled-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
+ }
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-component-type-substitution-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-component-type-substitution-converter-error',
+ 'inputTopics': [
+ 'resources-component-type-substitution-scheduled-producer'
+ ],
+ 'outputTopic': 'resources-component-type-substitution-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-component-type-substitution-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-component-type-substitution-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-component-type-substitution-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-component-type-substitution-filter-app',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-component-type-substitution-filter-app'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'labels': {
+ 'app_name': 'filter-app',
+ 'app_resources_requests_memory': '3G',
+ 'app_type': 'filter',
+ 'filter': 'filter-app-filter',
+ 'test_placeholder_in_placeholder': 'filter-app-filter'
+ },
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-component-type-substitution-filter-app-error',
+ 'inputTopics': [
+ 'resources-component-type-substitution-converter'
+ ],
+ 'outputTopic': 'resources-component-type-substitution-filter-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'filter-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-component-type-substitution-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'resources-component-type-substitution-filter-app': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-component-type-substitution-filter-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'filter',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestGenerate.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = [
+ {
+ 'app': {
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'app1-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'app1-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'image': 'some-image',
+ 'labels': {
+ 'pipeline': 'resources-custom-config'
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'app2-dead-letter-topic',
+ 'inputTopics': [
+ 'app1-test-topic'
+ ],
+ 'outputTopic': 'app2-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'app2-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
+ },
+ 'app2-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ }
+]
+
+snapshots['TestGenerate.test_with_custom_config_with_relative_defaults_path test-pipeline'] = [
+ {
+ 'app': {
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'app1-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'app1-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'image': 'some-image',
+ 'labels': {
+ 'pipeline': 'resources-custom-config'
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'app2-dead-letter-topic',
+ 'inputTopics': [
+ 'app1-test-topic'
+ ],
+ 'outputTopic': 'app2-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'app2-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
+ },
+ 'app2-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ }
+]
+
+snapshots['TestGenerate.test_with_env_defaults test-pipeline'] = [
+ {
+ 'app': {
+ 'image': 'fake-image',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error',
+ 'inputTopics': [
+ 'example-topic'
+ ],
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ },
+ 'topics': {
+ 'example-topic': {
+ 'type': 'input'
+ }
+ }
+ },
+ 'name': 'streams-app-development',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ },
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-kafka-connect-sink-streams-app-development-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ },
+ {
+ '_resetter': {
+ 'app': {
+ 'config': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'connector': 'resources-kafka-connect-sink-es-sink-connector'
+ },
+ 'connectorType': 'sink'
+ },
+ 'name': 'es-sink-connector',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'suffix': '-clean',
+ 'type': 'kafka-connector-resetter',
+ 'version': '1.0.4'
+ },
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-kafka-connect-sink-es-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'example-output'
+ },
+ 'name': 'es-sink-connector',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'resetter_values': {
+ },
+ 'type': 'kafka-sink-connector'
+ }
+]
diff --git a/tests/pipeline/snapshots/snap_test_manifest.py b/tests/pipeline/snapshots/snap_test_manifest.py
new file mode 100644
index 000000000..044b51c92
--- /dev/null
+++ b/tests/pipeline/snapshots/snap_test_manifest.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+# snapshottest: v1 - https://goo.gl/zC4yUc
+from __future__ import unicode_literals
+
+from snapshottest import GenericRepr, Snapshot
+
+
+snapshots = Snapshot()
+
+snapshots['TestManifest.test_python_api resource 0'] = [
+ GenericRepr("{'apiVersion': 'batch/v1', 'kind': 'Job', 'metadata': {'name': 'resources-custom-config-app1', 'labels': {'app': 'resources-custom-config-app1', 'chart': 'producer-app-2.9.0', 'release': 'resources-custom-config-app1'}}, 'spec': {'template': {'metadata': {'labels': {'app': 'resources-custom-config-app1', 'release': 'resources-custom-config-app1'}}, 'spec': {'restartPolicy': 'OnFailure', 'affinity': None, 'containers': [{'name': 'resources-custom-config-app1', 'image': 'producerApp:latest', 'imagePullPolicy': 'Always', 'resources': {'limits': {'cpu': '500m', 'memory': '2G'}, 'requests': {'cpu': '200m', 'memory': '2G'}}, 'env': [{'name': 'ENV_PREFIX', 'value': 'APP_'}, {'name': 'APP_BROKERS', 'value': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092'}, {'name': 'APP_SCHEMA_REGISTRY_URL', 'value': 'http://localhost:8081/'}, {'name': 'APP_DEBUG', 'value': 'false'}, {'name': 'APP_OUTPUT_TOPIC', 'value': 'resources-custom-config-app1'}, {'name': 'JAVA_TOOL_OPTIONS', 'value': '-XX:MaxRAMPercentage=75.0 '}]}]}}, 'backoffLimit': 6}}")
+]
+
+snapshots['TestManifest.test_python_api resource 1'] = [
+ GenericRepr('{\'apiVersion\': \'v1\', \'kind\': \'ConfigMap\', \'metadata\': {\'name\': \'resources-custom-config-app2-jmx-configmap\', \'labels\': {\'app\': \'resources-custom-config-app2\', \'chart\': \'streams-app-2.9.0\', \'release\': \'resources-custom-config-app2\', \'heritage\': \'Helm\'}}, \'data\': {\'jmx-kafka-streams-app-prometheus.yml\': \'jmxUrl: service:jmx:rmi:///jndi/rmi://localhost:5555/jmxrmi\\nlowercaseOutputName: true\\nlowercaseOutputLabelNames: true\\nssl: false\\nrules:\\n - pattern: ".*"\\n\'}}'),
+ GenericRepr("{'apiVersion': 'apps/v1', 'kind': 'Deployment', 'metadata': {'name': 'resources-custom-config-app2', 'labels': {'app': 'resources-custom-config-app2', 'chart': 'streams-app-2.9.0', 'release': 'resources-custom-config-app2', 'pipeline': 'resources-custom-config'}}, 'spec': {'replicas': 1, 'selector': {'matchLabels': {'app': 'resources-custom-config-app2', 'release': 'resources-custom-config-app2'}}, 'template': {'metadata': {'annotations': {'prometheus.io/scrape': 'true', 'prometheus.io/port': '5556'}, 'labels': {'app': 'resources-custom-config-app2', 'release': 'resources-custom-config-app2', 'pipeline': 'resources-custom-config'}}, 'spec': {'affinity': {'podAntiAffinity': {'preferredDuringSchedulingIgnoredDuringExecution': [{'weight': 1, 'podAffinityTerm': {'topologyKey': 'kubernetes.io/hostname', 'labelSelector': {'matchExpressions': [{'key': 'app', 'operator': 'In', 'values': ['resources-custom-config-app2']}]}}}]}}, 'containers': [{'name': 'resources-custom-config-app2', 'image': 'some-image:latest', 'imagePullPolicy': 'Always', 'resources': {'limits': {'cpu': '500m', 'memory': '2G'}, 'requests': {'cpu': '200m', 'memory': '300Mi'}}, 'env': [{'name': 'ENV_PREFIX', 'value': 'APP_'}, {'name': 'KAFKA_JMX_PORT', 'value': '5555'}, {'name': 'APP_VOLATILE_GROUP_INSTANCE_ID', 'value': 'true'}, {'name': 'APP_BROKERS', 'value': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092'}, {'name': 'APP_SCHEMA_REGISTRY_URL', 'value': 'http://localhost:8081/'}, {'name': 'APP_DEBUG', 'value': 'false'}, {'name': 'APP_INPUT_TOPICS', 'value': 'resources-custom-config-app1'}, {'name': 'APP_OUTPUT_TOPIC', 'value': 'resources-custom-config-app2'}, {'name': 'APP_ERROR_TOPIC', 'value': 'resources-custom-config-app2-error'}, {'name': 'JAVA_TOOL_OPTIONS', 'value': '-Dcom.sun.management.jmxremote.port=5555 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 '}], 'ports': [{'containerPort': 5555, 'name': 'jmx'}]}, {'name': 'prometheus-jmx-exporter', 'image': 'solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143', 'command': ['java', '-XX:+UnlockExperimentalVMOptions', '-XX:+UseCGroupMemoryLimitForHeap', '-XX:MaxRAMFraction=1', '-XshowSettings:vm', '-jar', 'jmx_prometheus_httpserver.jar', '5556', '/etc/jmx-streams-app/jmx-kafka-streams-app-prometheus.yml'], 'ports': [{'containerPort': 5556}], 'resources': {'limits': {'cpu': '300m', 'memory': '2G'}, 'requests': {'cpu': '100m', 'memory': '500Mi'}}, 'volumeMounts': [{'name': 'jmx-config', 'mountPath': '/etc/jmx-streams-app'}]}], 'volumes': [{'name': 'jmx-config', 'configMap': {'name': 'resources-custom-config-app2-jmx-configmap'}}]}}}}")
+]
diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py
index c2e339fbc..0da4f9260 100644
--- a/tests/pipeline/snapshots/snap_test_pipeline.py
+++ b/tests/pipeline/snapshots/snap_test_pipeline.py
@@ -7,2303 +7,2238 @@
snapshots = Snapshot()
-snapshots['TestPipeline.test_default_config test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'nameOverride': 'resources-custom-config-app1',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-custom-config-app1',
- 'schemaRegistryUrl': 'http://localhost:8081'
+snapshots['TestPipeline.test_default_config test-pipeline'] = [
+ {
+ 'app': {
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
}
},
- 'name': 'app1',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-custom-config-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-custom-config-app1': {
- 'configs': {
- },
- 'partitions_count': 3,
- 'type': 'output'
- }
- }
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-custom-config-app1',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'producer-app',
- 'version': '2.9.0'
- },
- {
- 'app': {
- 'image': 'some-image',
- 'labels': {
- 'pipeline': 'resources-custom-config'
- },
- 'nameOverride': 'resources-custom-config-app2',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'errorTopic': 'resources-custom-config-app2-error',
- 'inputTopics': [
- 'resources-custom-config-app1'
- ],
- 'outputTopic': 'resources-custom-config-app2',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-custom-config-app1': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'image': 'some-image',
+ 'labels': {
+ 'pipeline': 'resources-custom-config'
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'resources-custom-config-app2-error',
+ 'inputTopics': [
+ 'resources-custom-config-app1'
+ ],
+ 'outputTopic': 'resources-custom-config-app2',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'app2',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-custom-config-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-custom-config-app2': {
- 'configs': {
- },
- 'partitions_count': 3,
- 'type': 'output'
- },
- 'resources-custom-config-app2-error': {
- 'configs': {
- },
- 'partitions_count': 1,
- 'type': 'error'
- }
+ 'topics': {
+ 'resources-custom-config-app2': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
+ },
+ 'resources-custom-config-app2-error': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
}
- },
- 'type': 'streams-app',
- 'version': '2.9.0'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ }
+]
-snapshots['TestPipeline.test_inflate_pipeline test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'commandLine': {
- 'FAKE_ARG': 'fake-arg-value'
- },
- 'image': 'example-registry/fake-image',
- 'imageTag': '0.0.1',
- 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer',
- 'schedule': '30 3/8 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'scheduled-producer',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-inflate-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- 'com/bakdata/kafka/fake': '1.0.0'
- },
- 'topics': {
- 'resources-pipeline-with-inflate-scheduled-producer': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 12,
- 'type': 'output',
- 'value_schema': 'com.bakdata.fake.Produced'
- }
- }
- },
- 'type': 'scheduled-producer',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 1,
- 'minReplicas': 0,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- ]
- },
- 'commandLine': {
- 'CONVERT_XML': True
- },
- 'nameOverride': 'resources-pipeline-with-inflate-converter',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-pipeline-with-inflate-converter-error',
- 'inputTopics': [
- 'resources-pipeline-with-inflate-scheduled-producer'
- ],
- 'outputTopic': 'resources-pipeline-with-inflate-converter',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'converter',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-inflate-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-pipeline-with-inflate-converter': {
- 'configs': {
- 'cleanup.policy': 'compact,delete',
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-pipeline-with-inflate-converter-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 10,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
- },
- 'type': 'converter',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 4,
- 'minReplicas': 4,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- 'resources-pipeline-with-inflate-should-inflate'
- ]
- },
- 'commandLine': {
- 'TYPE': 'nothing'
- },
- 'image': 'fake-registry/filter',
- 'imageTag': '2.4.1',
- 'nameOverride': 'resources-pipeline-with-inflate-should-inflate',
- 'replicaCount': 4,
- 'resources': {
- 'requests': {
- 'memory': '3G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error',
- 'inputTopics': [
- 'resources-pipeline-with-inflate-converter'
- ],
- 'outputTopic': 'resources-pipeline-with-inflate-should-inflate',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'should-inflate',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-inflate-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-pipeline-with-inflate-should-inflate': {
- 'configs': {
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-pipeline-with-inflate-should-inflate-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
- },
- 'type': 'should-inflate',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'batch.size': '2000',
- 'behavior.on.malformed.documents': 'warn',
- 'behavior.on.null.values': 'delete',
- 'connection.compression': 'true',
- 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
- 'key.ignore': 'false',
- 'linger.ms': '5000',
- 'max.buffered.records': '20000',
- 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector',
- 'read.timeout.ms': '120000',
- 'tasks.max': '1',
- 'topics': 'resources-pipeline-with-inflate-should-inflate',
- 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1'
- },
- 'name': 'should-inflate-inflated-sink-connector',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-inflate-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-kafka-connect-resetter',
- 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
- },
- 'resetter_values': {
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'kafka-sink-connector': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'should-inflate-inflated-sink-connector': {
- 'configs': {
- },
- 'role': 'test'
- }
- }
+snapshots['TestPipeline.test_inflate_pipeline test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'fake-arg-value'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'schedule': '30 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'scheduled-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
},
- 'type': 'kafka-sink-connector',
- 'version': '1.0.4'
- },
- {
- 'app': {
- 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error',
- 'inputTopics': [
- 'kafka-sink-connector'
- ],
- 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-pipeline-with-inflate-scheduled-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
}
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-inflate-converter-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-inflate-scheduled-producer'
+ ],
+ 'outputTopic': 'resources-pipeline-with-inflate-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'should-inflate-inflated-streams-app',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-inflate-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- },
- 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': {
- 'configs': {
- },
- 'type': 'output'
- }
+ 'topics': {
+ 'resources-pipeline-with-inflate-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-inflate-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-pipeline-with-inflate-should-inflate'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-inflate-converter'
+ ],
+ 'outputTopic': 'resources-pipeline-with-inflate-should-inflate',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'should-inflate',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- }
- ]
-}
-
-snapshots['TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'image': 'fake-image',
- 'nameOverride': 'resources-kafka-connect-sink-streams-app',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-kafka-connect-sink-streams-app-error',
- 'inputTopics': [
- 'example-topic'
- ],
- 'outputTopic': 'example-output',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-pipeline-with-inflate-should-inflate': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-inflate-should-inflate-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'should-inflate',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'resources-pipeline-with-inflate-should-inflate',
+ 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1'
+ },
+ 'name': 'should-inflate-inflated-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'resetter_values': {
+ },
+ 'to': {
+ 'models': {
},
- 'from': {
- 'components': {
+ 'topics': {
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
},
- 'topics': {
- 'example-topic': {
- 'type': 'input'
- }
+ 'should-inflate-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
}
+ }
+ },
+ 'type': 'kafka-sink-connector',
+ 'version': '1.0.4'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error',
+ 'inputTopics': [
+ 'kafka-sink-connector'
+ ],
+ 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'should-inflate-inflated-streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-inflate-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'streams-app',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-kafka-connect-sink-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'example-output': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-kafka-connect-sink-streams-app-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ },
+ 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': {
+ 'configs': {
+ },
+ 'type': 'output'
}
- },
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'batch.size': '2000',
- 'behavior.on.malformed.documents': 'warn',
- 'behavior.on.null.values': 'delete',
- 'connection.compression': 'true',
- 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
- 'key.ignore': 'false',
- 'linger.ms': '5000',
- 'max.buffered.records': '20000',
- 'name': 'resources-kafka-connect-sink-es-sink-connector',
- 'read.timeout.ms': '120000',
- 'tasks.max': '1',
- 'topics': 'example-output'
- },
- 'name': 'es-sink-connector',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-kafka-connect-sink-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-kafka-connect-resetter',
- 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
- },
- 'resetter_values': {
- },
- 'type': 'kafka-sink-connector',
- 'version': '1.0.4'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
-snapshots['TestPipeline.test_load_pipeline test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'commandLine': {
- 'FAKE_ARG': 'fake-arg-value'
- },
- 'image': 'example-registry/fake-image',
- 'imageTag': '0.0.1',
- 'nameOverride': 'resources-first-pipeline-scheduled-producer',
- 'schedule': '30 3/8 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-first-pipeline-scheduled-producer',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'scheduled-producer',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-first-pipeline-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- 'com/bakdata/kafka/fake': '1.0.0'
- },
- 'topics': {
- 'resources-first-pipeline-scheduled-producer': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 12,
- 'type': 'output',
- 'value_schema': 'com.bakdata.fake.Produced'
- }
- }
- },
- 'type': 'scheduled-producer',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'converter-resources-first-pipeline-converter',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 1,
- 'minReplicas': 0,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- ]
- },
- 'commandLine': {
- 'CONVERT_XML': True
- },
- 'nameOverride': 'resources-first-pipeline-converter',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-first-pipeline-converter-error',
- 'inputTopics': [
- 'resources-first-pipeline-scheduled-producer'
- ],
- 'outputTopic': 'resources-first-pipeline-converter',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'converter',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-first-pipeline-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-first-pipeline-converter': {
- 'configs': {
- 'cleanup.policy': 'compact,delete',
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-first-pipeline-converter-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 10,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
+snapshots['TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline'] = [
+ {
+ 'app': {
+ 'image': 'fake-image',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-kafka-connect-sink-streams-app-error',
+ 'inputTopics': [
+ 'example-topic'
+ ],
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
},
- 'type': 'converter',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 4,
- 'minReplicas': 4,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name'
- ]
- },
- 'commandLine': {
- 'TYPE': 'nothing'
- },
- 'image': 'fake-registry/filter',
- 'imageTag': '2.4.1',
- 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
- 'replicaCount': 4,
- 'resources': {
- 'requests': {
- 'memory': '3G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error',
- 'inputTopics': [
- 'resources-first-pipeline-converter'
- ],
- 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'example-topic': {
+ 'type': 'input'
}
+ }
+ },
+ 'name': 'streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-first-pipeline-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': {
- 'configs': {
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-kafka-connect-sink-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'type': 'filter',
- 'version': '2.4.2'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-kafka-connect-sink-es-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'example-output'
+ },
+ 'name': 'es-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'resetter_values': {
+ },
+ 'type': 'kafka-sink-connector',
+ 'version': '1.0.4'
+ }
+]
-snapshots['TestPipeline.test_model_serialization test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'nameOverride': 'resources-pipeline-with-paths-account-producer',
- 'streams': {
- 'brokers': 'test',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'out',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
+snapshots['TestPipeline.test_load_pipeline test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'fake-arg-value'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'schedule': '30 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-first-pipeline-scheduled-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'scheduled-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-first-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
},
- 'name': 'account-producer',
- 'namespace': 'test',
- 'prefix': 'resources-pipeline-with-paths-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'ca_file': 'my-cert.cert',
- 'insecure_skip_tls_verify': False,
- 'password': '$CI_JOB_TOKEN',
- 'username': 'masked'
- },
- 'repository_name': 'masked',
- 'url': 'masked'
- },
- 'type': 'producer-app',
- 'version': '2.4.2'
- }
- ]
-}
-
-snapshots['TestPipeline.test_no_input_topic test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'commandLine': {
- 'CONVERT_XML': True
- },
- 'nameOverride': 'resources-no-input-topic-pipeline-app1',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-no-input-topic-pipeline-app1-error',
- 'inputPattern': '.*',
- 'outputTopic': 'example-output',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-first-pipeline-scheduled-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
}
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-first-pipeline-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-first-pipeline-converter-error',
+ 'inputTopics': [
+ 'resources-first-pipeline-scheduled-producer'
+ ],
+ 'outputTopic': 'resources-first-pipeline-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-first-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'from': {
- 'components': {
+ 'topics': {
+ 'resources-first-pipeline-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
},
- 'topics': {
- '.*': {
- 'type': 'pattern'
- }
- }
- },
- 'name': 'app1',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-no-input-topic-pipeline-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'example-output': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-no-input-topic-pipeline-app1-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
- },
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'nameOverride': 'resources-no-input-topic-pipeline-app2',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-no-input-topic-pipeline-app2-error',
- 'extraOutputTopics': {
- 'extra': 'example-output-extra',
- 'test-output': 'test-output-extra'
- },
- 'inputTopics': [
- 'example-output'
- ],
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'resources-first-pipeline-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error',
+ 'inputTopics': [
+ 'resources-first-pipeline-converter'
+ ],
+ 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-first-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'app2',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-no-input-topic-pipeline-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'example-output-extra': {
- 'configs': {
- },
- 'role': 'extra'
- },
- 'resources-no-input-topic-pipeline-app2-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- },
- 'test-output-extra': {
- 'configs': {
- },
- 'role': 'test-output'
- }
+ 'topics': {
+ 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'type': 'streams-app',
- 'version': '2.4.2'
- }
- ]
-}
+ }
+ },
+ 'type': 'filter',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestPipeline.test_model_serialization test-pipeline'] = [
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'test',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'out',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'account-producer',
+ 'namespace': 'test',
+ 'prefix': 'resources-pipeline-with-paths-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'ca_file': 'my-cert.cert',
+ 'insecure_skip_tls_verify': False,
+ 'password': '$CI_JOB_TOKEN',
+ 'username': 'masked'
+ },
+ 'repository_name': 'masked',
+ 'url': 'masked'
+ },
+ 'type': 'producer-app',
+ 'version': '2.4.2'
+ }
+]
-snapshots['TestPipeline.test_no_user_defined_components test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'image': 'fake-image',
- 'nameOverride': 'resources-no-user-defined-components-streams-app',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-no-user-defined-components-streams-app-error',
- 'inputTopics': [
- 'example-topic'
- ],
- 'outputTopic': 'example-output',
- 'schemaRegistryUrl': 'http://localhost:8081'
+snapshots['TestPipeline.test_no_input_topic test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-no-input-topic-pipeline-app1-error',
+ 'inputPattern': '.*',
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ },
+ 'topics': {
+ '.*': {
+ 'type': 'pattern'
}
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-no-input-topic-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'from': {
- 'components': {
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
},
- 'topics': {
- 'example-topic': {
- 'type': 'input'
- }
+ 'resources-no-input-topic-pipeline-app1-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-no-input-topic-pipeline-app2-error',
+ 'extraOutputTopics': {
+ 'extra': 'example-output-extra',
+ 'test-output': 'test-output-extra'
+ },
+ 'inputTopics': [
+ 'example-output'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-no-input-topic-pipeline-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'streams-app',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-no-user-defined-components-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'example-output': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-no-user-defined-components-streams-app-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'example-output-extra': {
+ 'configs': {
+ },
+ 'role': 'extra'
+ },
+ 'resources-no-input-topic-pipeline-app2-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ },
+ 'test-output-extra': {
+ 'configs': {
+ },
+ 'role': 'test-output'
}
- },
- 'type': 'streams-app',
- 'version': '2.4.2'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
-snapshots['TestPipeline.test_pipelines_with_env_values test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'commandLine': {
- 'FAKE_ARG': 'override-arg'
- },
- 'image': 'example-registry/fake-image',
- 'imageTag': '0.0.1',
- 'nameOverride': 'resources-pipeline-with-envs-input-producer',
- 'schedule': '20 3/8 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-pipeline-with-envs-input-producer',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
+snapshots['TestPipeline.test_no_user_defined_components test-pipeline'] = [
+ {
+ 'app': {
+ 'image': 'fake-image',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-no-user-defined-components-streams-app-error',
+ 'inputTopics': [
+ 'example-topic'
+ ],
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
},
- 'name': 'input-producer',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-envs-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- 'com/bakdata/kafka/fake': '1.0.0'
- },
- 'topics': {
- 'resources-pipeline-with-envs-input-producer': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 12,
- 'type': 'output',
- 'value_schema': 'com.bakdata.fake.Produced'
- }
+ 'topics': {
+ 'example-topic': {
+ 'type': 'input'
}
+ }
+ },
+ 'name': 'streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-no-user-defined-components-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'scheduled-producer',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'converter-resources-pipeline-with-envs-converter',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 1,
- 'minReplicas': 0,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- ]
- },
- 'commandLine': {
- 'CONVERT_XML': True
- },
- 'nameOverride': 'resources-pipeline-with-envs-converter',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-pipeline-with-envs-converter-error',
- 'inputTopics': [
- 'resources-pipeline-with-envs-input-producer'
- ],
- 'outputTopic': 'resources-pipeline-with-envs-converter',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-no-user-defined-components-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestPipeline.test_pipelines_with_env_values test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'override-arg'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'schedule': '20 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-pipeline-with-envs-input-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'input-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-envs-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
},
- 'name': 'converter',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-envs-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-pipeline-with-envs-converter': {
- 'configs': {
- 'cleanup.policy': 'compact,delete',
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-pipeline-with-envs-converter-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 10,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-pipeline-with-envs-input-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
}
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-pipeline-with-envs-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-envs-converter-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-envs-input-producer'
+ ],
+ 'outputTopic': 'resources-pipeline-with-envs-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-envs-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'converter',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'filter-resources-pipeline-with-envs-filter',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 4,
- 'minReplicas': 4,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- 'resources-pipeline-with-envs-filter'
- ]
- },
- 'commandLine': {
- 'TYPE': 'nothing'
- },
- 'image': 'fake-registry/filter',
- 'imageTag': '2.4.1',
- 'nameOverride': 'resources-pipeline-with-envs-filter',
- 'replicaCount': 4,
- 'resources': {
- 'requests': {
- 'memory': '3G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-pipeline-with-envs-filter-error',
- 'inputTopics': [
- 'resources-pipeline-with-envs-converter'
- ],
- 'outputTopic': 'resources-pipeline-with-envs-filter',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-pipeline-with-envs-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-envs-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-pipeline-with-envs-filter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-pipeline-with-envs-filter'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-pipeline-with-envs-filter-error',
+ 'inputTopics': [
+ 'resources-pipeline-with-envs-converter'
+ ],
+ 'outputTopic': 'resources-pipeline-with-envs-filter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'filter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-pipeline-with-envs-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'filter',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-pipeline-with-envs-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-pipeline-with-envs-filter': {
- 'configs': {
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-pipeline-with-envs-filter-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-pipeline-with-envs-filter': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-pipeline-with-envs-filter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'type': 'filter',
- 'version': '2.4.2'
- }
- ]
-}
+ }
+ },
+ 'type': 'filter',
+ 'version': '2.4.2'
+ }
+]
-snapshots['TestPipeline.test_prefix_pipeline_component test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'debug': True,
- 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer',
- 'imageTag': '1.0.0',
- 'nameOverride': 'from-pipeline-component-account-producer',
- 'prometheus': {
- 'jmx': {
- 'enabled': False
- }
- },
- 'replicaCount': 1,
- 'schedule': '0 12 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'schemaRegistryUrl': 'http://localhost:8081'
- },
- 'suspend': True
- },
- 'name': 'account-producer',
- 'namespace': '${NAMESPACE}',
- 'prefix': 'from-pipeline-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'type': 'producer-app',
- 'version': '2.9.0'
- }
- ]
-}
+snapshots['TestPipeline.test_prefix_pipeline_component test-pipeline'] = [
+ {
+ 'app': {
+ 'debug': True,
+ 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer',
+ 'imageTag': '1.0.0',
+ 'prometheus': {
+ 'jmx': {
+ 'enabled': False
+ }
+ },
+ 'replicaCount': 1,
+ 'schedule': '0 12 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ },
+ 'suspend': True
+ },
+ 'name': 'account-producer',
+ 'namespace': '${NAMESPACE}',
+ 'prefix': 'from-pipeline-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ }
+]
-snapshots['TestPipeline.test_read_from_component test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-producer1',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-read-from-component-producer1',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'producer1',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-producer1': {
- 'configs': {
- },
- 'type': 'output'
- }
- }
- },
- 'type': 'producer-app',
- 'version': '2.4.2'
+snapshots['TestPipeline.test_read_from_component test-pipeline'] = [
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-read-from-component-producer1',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
},
- {
- 'app': {
- 'nameOverride': 'producer2',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-read-from-component-producer2',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'producer2',
- 'namespace': 'example-namespace',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-producer2': {
- 'configs': {
- },
- 'type': 'output'
- }
- }
- },
- 'type': 'producer-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'filter-resources-read-from-component-inflate-step',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 1,
- 'minReplicas': 0,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- 'resources-read-from-component-inflate-step'
- ]
- },
- 'image': 'fake-registry/filter',
- 'imageTag': '2.4.1',
- 'nameOverride': 'resources-read-from-component-inflate-step',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-inflate-step-error',
- 'inputTopics': [
- 'resources-read-from-component-producer2'
- ],
- 'outputTopic': 'resources-read-from-component-inflate-step',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
- },
- 'name': 'inflate-step',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-inflate-step': {
- 'configs': {
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-read-from-component-inflate-step-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
- },
- 'type': 'should-inflate',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'batch.size': '2000',
- 'behavior.on.malformed.documents': 'warn',
- 'behavior.on.null.values': 'delete',
- 'connection.compression': 'true',
- 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
- 'key.ignore': 'false',
- 'linger.ms': '5000',
- 'max.buffered.records': '20000',
- 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector',
- 'read.timeout.ms': '120000',
- 'tasks.max': '1',
- 'topics': 'resources-read-from-component-inflate-step',
- 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1'
- },
- 'name': 'inflate-step-inflated-sink-connector',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-kafka-connect-resetter',
- 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
- },
- 'resetter_values': {
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'inflate-step-inflated-sink-connector': {
- 'configs': {
- },
- 'role': 'test'
- },
- 'kafka-sink-connector': {
- 'configs': {
- },
- 'type': 'output'
- }
- }
- },
- 'type': 'kafka-sink-connector',
- 'version': '1.0.4'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error',
- 'inputTopics': [
- 'kafka-sink-connector'
- ],
- 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
+ 'name': 'producer1',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'inflate-step-inflated-streams-app',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-read-from-component-inflate-step-inflated-streams-app-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-read-from-component-producer1': {
+ 'configs': {
+ },
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-read-from-component-producer2',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'producer2',
+ 'namespace': 'example-namespace',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 1,
- 'minReplicas': 0,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- 'resources-read-from-component-inflate-step-without-prefix'
- ]
- },
- 'image': 'fake-registry/filter',
- 'imageTag': '2.4.1',
- 'nameOverride': 'inflate-step-without-prefix',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error',
- 'inputTopics': [
- 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app'
- ],
- 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-read-from-component-producer2': {
+ 'configs': {
+ },
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-read-from-component-inflate-step',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-read-from-component-inflate-step'
+ ]
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer2'
+ ],
+ 'outputTopic': 'resources-read-from-component-inflate-step',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'inflate-step-without-prefix',
- 'namespace': 'example-namespace',
- 'prefix': '',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-inflate-step-without-prefix': {
- 'configs': {
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-read-from-component-inflate-step-without-prefix-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-read-from-component-inflate-step': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'should-inflate',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'resources-read-from-component-inflate-step',
+ 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1'
+ },
+ 'name': 'inflate-step-inflated-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'resetter_values': {
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'should-inflate',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'batch.size': '2000',
- 'behavior.on.malformed.documents': 'warn',
- 'behavior.on.null.values': 'delete',
- 'connection.compression': 'true',
- 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
- 'key.ignore': 'false',
- 'linger.ms': '5000',
- 'max.buffered.records': '20000',
- 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector',
- 'read.timeout.ms': '120000',
- 'tasks.max': '1',
- 'topics': 'resources-read-from-component-inflate-step-without-prefix',
- 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1'
- },
- 'name': 'inflate-step-without-prefix-inflated-sink-connector',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-kafka-connect-resetter',
- 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
- },
- 'resetter_values': {
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'inflate-step-without-prefix-inflated-sink-connector': {
- 'configs': {
- },
- 'role': 'test'
- },
- 'kafka-sink-connector': {
- 'configs': {
- },
- 'type': 'output'
- }
+ 'topics': {
+ 'inflate-step-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ },
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'kafka-sink-connector',
+ 'version': '1.0.4'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error',
+ 'inputTopics': [
+ 'kafka-sink-connector'
+ ],
+ 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step-inflated-streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'kafka-sink-connector',
- 'version': '1.0.4'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error',
- 'inputTopics': [
- 'kafka-sink-connector'
- ],
- 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-inflated-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-read-from-component-inflate-step-without-prefix'
+ ]
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error',
+ 'inputTopics': [
+ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app'
+ ],
+ 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step-without-prefix',
+ 'namespace': 'example-namespace',
+ 'prefix': '',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'inflate-step-without-prefix-inflated-streams-app',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-read-from-component-inflate-step-without-prefix': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-read-from-component-inflate-step-without-prefix-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'should-inflate',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'resources-read-from-component-inflate-step-without-prefix',
+ 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1'
+ },
+ 'name': 'inflate-step-without-prefix-inflated-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'resetter_values': {
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-consumer1',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-consumer1-error',
- 'inputTopics': [
- 'resources-read-from-component-producer1'
- ],
- 'outputTopic': 'resources-read-from-component-consumer1',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'inflate-step-without-prefix-inflated-sink-connector': {
+ 'configs': {
+ },
+ 'role': 'test'
+ },
+ 'kafka-sink-connector': {
+ 'configs': {
+ },
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'kafka-sink-connector',
+ 'version': '1.0.4'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error',
+ 'inputTopics': [
+ 'kafka-sink-connector'
+ ],
+ 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'inflate-step-without-prefix-inflated-streams-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'from': {
- 'components': {
- 'producer1': {
- 'type': 'input'
- }
+ 'topics': {
+ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': {
+ 'configs': {
+ },
+ 'type': 'output'
},
- 'topics': {
+ 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'name': 'consumer1',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-consumer1': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-read-from-component-consumer1-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer1-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer1'
+ ],
+ 'outputTopic': 'resources-read-from-component-consumer1',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'producer1': {
+ 'type': 'input'
}
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-consumer2',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-consumer2-error',
- 'inputTopics': [
- 'resources-read-from-component-producer1',
- 'resources-read-from-component-consumer1'
- ],
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
+ 'topics': {
+ }
+ },
+ 'name': 'consumer1',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'from': {
- 'components': {
- 'consumer1': {
- 'type': 'input'
+ 'topics': {
+ 'resources-read-from-component-consumer1': {
+ 'configs': {
},
- 'producer1': {
- 'type': 'input'
- }
+ 'type': 'output'
},
- 'topics': {
- }
- },
- 'name': 'consumer2',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-consumer2-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
- },
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-consumer3',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-consumer3-error',
- 'inputTopics': [
- 'resources-read-from-component-producer1',
- 'resources-read-from-component-producer2'
- ],
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'resources-read-from-component-consumer1-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'from': {
- 'components': {
- 'producer2': {
- 'type': 'input'
- }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer2-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer1',
+ 'resources-read-from-component-consumer1'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'consumer1': {
+ 'type': 'input'
},
- 'topics': {
- 'resources-read-from-component-producer1': {
- 'type': 'input'
- }
+ 'producer1': {
+ 'type': 'input'
}
},
- 'name': 'consumer3',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-consumer3-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
+ 'topics': {
+ }
+ },
+ 'name': 'consumer2',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-consumer4',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-consumer4-error',
- 'inputTopics': [
- 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app'
- ],
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-read-from-component-consumer2-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'from': {
- 'components': {
- 'inflate-step': {
- 'type': 'input'
- }
- },
- 'topics': {
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer3-error',
+ 'inputTopics': [
+ 'resources-read-from-component-producer1',
+ 'resources-read-from-component-producer2'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'producer2': {
+ 'type': 'input'
}
},
- 'name': 'consumer4',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-consumer4-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-read-from-component-producer1': {
+ 'type': 'input'
}
+ }
+ },
+ 'name': 'consumer3',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'nameOverride': 'resources-read-from-component-consumer5',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-read-from-component-consumer5-error',
- 'inputTopics': [
- 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app'
- ],
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-read-from-component-consumer3-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'from': {
- 'components': {
- 'inflate-step-without-prefix': {
- 'type': 'input'
- }
- },
- 'topics': {
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer4-error',
+ 'inputTopics': [
+ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'inflate-step': {
+ 'type': 'input'
}
},
- 'name': 'consumer5',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-read-from-component-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-read-from-component-consumer5-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
- }
+ 'topics': {
+ }
+ },
+ 'name': 'consumer4',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'streams-app',
- 'version': '2.4.2'
- }
- ]
-}
-
-snapshots['TestPipeline.test_substitute_in_component test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'commandLine': {
- 'FAKE_ARG': 'fake-arg-value'
- },
- 'image': 'example-registry/fake-image',
- 'imageTag': '0.0.1',
- 'labels': {
- 'app_name': 'scheduled-producer',
- 'app_schedule': '30 3/8 * * *',
- 'app_type': 'scheduled-producer'
- },
- 'nameOverride': 'resources-component-type-substitution-scheduled-producer',
- 'schedule': '30 3/8 * * *',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'resources-component-type-substitution-scheduled-producer',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-read-from-component-consumer4-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'name': 'scheduled-producer',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-component-type-substitution-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- 'com/bakdata/kafka/fake': '1.0.0'
- },
- 'topics': {
- 'resources-component-type-substitution-scheduled-producer': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 12,
- 'type': 'output',
- 'value_schema': 'com.bakdata.fake.Produced'
- }
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-read-from-component-consumer5-error',
+ 'inputTopics': [
+ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app'
+ ],
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
+ 'inflate-step-without-prefix': {
+ 'type': 'input'
}
},
- 'type': 'scheduled-producer',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'converter-resources-component-type-substitution-converter',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 1,
- 'minReplicas': 0,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- ]
- },
- 'commandLine': {
- 'CONVERT_XML': True
- },
- 'nameOverride': 'resources-component-type-substitution-converter',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-component-type-substitution-converter-error',
- 'inputTopics': [
- 'resources-component-type-substitution-scheduled-producer'
- ],
- 'outputTopic': 'resources-component-type-substitution-converter',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
+ 'topics': {
+ }
+ },
+ 'name': 'consumer5',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-read-from-component-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'converter',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-component-type-substitution-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-component-type-substitution-converter': {
- 'configs': {
- 'cleanup.policy': 'compact,delete',
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-component-type-substitution-converter-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 10,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-read-from-component-consumer5-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.4.2'
+ }
+]
+
+snapshots['TestPipeline.test_substitute_in_component test-pipeline'] = [
+ {
+ 'app': {
+ 'commandLine': {
+ 'FAKE_ARG': 'fake-arg-value'
+ },
+ 'image': 'example-registry/fake-image',
+ 'imageTag': '0.0.1',
+ 'labels': {
+ 'app_name': 'scheduled-producer',
+ 'app_schedule': '30 3/8 * * *',
+ 'app_type': 'scheduled-producer'
+ },
+ 'schedule': '30 3/8 * * *',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'resources-component-type-substitution-scheduled-producer',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'scheduled-producer',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-component-type-substitution-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
+ 'com/bakdata/kafka/fake': '1.0.0'
},
- 'type': 'converter',
- 'version': '2.4.2'
- },
- {
- 'app': {
- 'autoscaling': {
- 'consumerGroup': 'filter-resources-component-type-substitution-filter-app',
- 'cooldownPeriod': 300,
- 'enabled': True,
- 'lagThreshold': 10000,
- 'maxReplicas': 4,
- 'minReplicas': 4,
- 'offsetResetPolicy': 'earliest',
- 'pollingInterval': 30,
- 'topics': [
- 'resources-component-type-substitution-filter-app'
- ]
- },
- 'commandLine': {
- 'TYPE': 'nothing'
- },
- 'image': 'fake-registry/filter',
- 'imageTag': '2.4.1',
- 'labels': {
- 'app_name': 'filter-app',
- 'app_resources_requests_memory': '3G',
- 'app_type': 'filter',
- 'filter': 'filter-app-filter',
- 'test_placeholder_in_placeholder': 'filter-app-filter'
- },
- 'nameOverride': 'resources-component-type-substitution-filter-app',
- 'replicaCount': 4,
- 'resources': {
- 'requests': {
- 'memory': '3G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-component-type-substitution-filter-app-error',
- 'inputTopics': [
- 'resources-component-type-substitution-converter'
- ],
- 'outputTopic': 'resources-component-type-substitution-filter-app',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'resources-component-type-substitution-scheduled-producer': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 12,
+ 'type': 'output',
+ 'value_schema': 'com.bakdata.fake.Produced'
}
+ }
+ },
+ 'type': 'scheduled-producer',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'converter-resources-component-type-substitution-converter',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 1,
+ 'minReplicas': 0,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ ]
+ },
+ 'commandLine': {
+ 'CONVERT_XML': True
+ },
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-component-type-substitution-converter-error',
+ 'inputTopics': [
+ 'resources-component-type-substitution-scheduled-producer'
+ ],
+ 'outputTopic': 'resources-component-type-substitution-converter',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'converter',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-component-type-substitution-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'filter-app',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-component-type-substitution-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'resources-component-type-substitution-filter-app': {
- 'configs': {
- 'retention.ms': '-1'
- },
- 'partitions_count': 50,
- 'type': 'output'
- },
- 'resources-component-type-substitution-filter-app-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'resources-component-type-substitution-converter': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete',
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-component-type-substitution-converter-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 10,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
+ }
+ },
+ 'type': 'converter',
+ 'version': '2.4.2'
+ },
+ {
+ 'app': {
+ 'autoscaling': {
+ 'consumerGroup': 'filter-resources-component-type-substitution-filter-app',
+ 'cooldownPeriod': 300,
+ 'enabled': True,
+ 'lagThreshold': 10000,
+ 'maxReplicas': 4,
+ 'minReplicas': 4,
+ 'offsetResetPolicy': 'earliest',
+ 'pollingInterval': 30,
+ 'topics': [
+ 'resources-component-type-substitution-filter-app'
+ ]
+ },
+ 'commandLine': {
+ 'TYPE': 'nothing'
+ },
+ 'image': 'fake-registry/filter',
+ 'imageTag': '2.4.1',
+ 'labels': {
+ 'app_name': 'filter-app',
+ 'app_resources_requests_memory': '3G',
+ 'app_type': 'filter',
+ 'filter': 'filter-app-filter',
+ 'test_placeholder_in_placeholder': 'filter-app-filter'
+ },
+ 'replicaCount': 4,
+ 'resources': {
+ 'requests': {
+ 'memory': '3G'
+ }
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-component-type-substitution-filter-app-error',
+ 'inputTopics': [
+ 'resources-component-type-substitution-converter'
+ ],
+ 'outputTopic': 'resources-component-type-substitution-filter-app',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'filter-app',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-component-type-substitution-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'filter',
- 'version': '2.4.2'
- }
- ]
-}
+ 'topics': {
+ 'resources-component-type-substitution-filter-app': {
+ 'configs': {
+ 'retention.ms': '-1'
+ },
+ 'partitions_count': 50,
+ 'type': 'output'
+ },
+ 'resources-component-type-substitution-filter-app-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
+ }
+ }
+ },
+ 'type': 'filter',
+ 'version': '2.4.2'
+ }
+]
-snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'nameOverride': 'resources-custom-config-app1',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'app1-test-topic',
- 'schemaRegistryUrl': 'http://localhost:8081'
+snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = [
+ {
+ 'app': {
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
}
},
- 'name': 'app1',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-custom-config-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'app1-test-topic': {
- 'configs': {
- },
- 'partitions_count': 3,
- 'type': 'output'
- }
- }
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'app1-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'producer-app',
- 'version': '2.9.0'
- },
- {
- 'app': {
- 'image': 'some-image',
- 'labels': {
- 'pipeline': 'resources-custom-config'
- },
- 'nameOverride': 'resources-custom-config-app2',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'errorTopic': 'app2-dead-letter-topic',
- 'inputTopics': [
- 'app1-test-topic'
- ],
- 'outputTopic': 'app2-test-topic',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'app1-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'image': 'some-image',
+ 'labels': {
+ 'pipeline': 'resources-custom-config'
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'app2-dead-letter-topic',
+ 'inputTopics': [
+ 'app1-test-topic'
+ ],
+ 'outputTopic': 'app2-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'app2',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-custom-config-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'app2-dead-letter-topic': {
- 'configs': {
- },
- 'partitions_count': 1,
- 'type': 'error'
- },
- 'app2-test-topic': {
- 'configs': {
- },
- 'partitions_count': 3,
- 'type': 'output'
- }
+ 'topics': {
+ 'app2-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
+ },
+ 'app2-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
}
- },
- 'type': 'streams-app',
- 'version': '2.9.0'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ }
+]
-snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'nameOverride': 'resources-custom-config-app1',
- 'resources': {
- 'limits': {
- 'memory': '2G'
- },
- 'requests': {
- 'memory': '2G'
- }
- },
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'extraOutputTopics': {
- },
- 'outputTopic': 'app1-test-topic',
- 'schemaRegistryUrl': 'http://localhost:8081'
+snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = [
+ {
+ 'app': {
+ 'resources': {
+ 'limits': {
+ 'memory': '2G'
+ },
+ 'requests': {
+ 'memory': '2G'
}
},
- 'name': 'app1',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-custom-config-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'app1-test-topic': {
- 'configs': {
- },
- 'partitions_count': 3,
- 'type': 'output'
- }
- }
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'extraOutputTopics': {
+ },
+ 'outputTopic': 'app1-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app1',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'type': 'producer-app',
- 'version': '2.9.0'
- },
- {
- 'app': {
- 'image': 'some-image',
- 'labels': {
- 'pipeline': 'resources-custom-config'
- },
- 'nameOverride': 'resources-custom-config-app2',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'errorTopic': 'app2-dead-letter-topic',
- 'inputTopics': [
- 'app1-test-topic'
- ],
- 'outputTopic': 'app2-test-topic',
- 'schemaRegistryUrl': 'http://localhost:8081'
+ 'topics': {
+ 'app1-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
}
+ }
+ },
+ 'type': 'producer-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'image': 'some-image',
+ 'labels': {
+ 'pipeline': 'resources-custom-config'
+ },
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'errorTopic': 'app2-dead-letter-topic',
+ 'inputTopics': [
+ 'app1-test-topic'
+ ],
+ 'outputTopic': 'app2-test-topic',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'name': 'app2',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-custom-config-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'app2',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-custom-config-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'app2-dead-letter-topic': {
- 'configs': {
- },
- 'partitions_count': 1,
- 'type': 'error'
- },
- 'app2-test-topic': {
- 'configs': {
- },
- 'partitions_count': 3,
- 'type': 'output'
- }
+ 'topics': {
+ 'app2-dead-letter-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 1,
+ 'type': 'error'
+ },
+ 'app2-test-topic': {
+ 'configs': {
+ },
+ 'partitions_count': 3,
+ 'type': 'output'
}
- },
- 'type': 'streams-app',
- 'version': '2.9.0'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ }
+]
-snapshots['TestPipeline.test_with_env_defaults test-pipeline'] = {
- 'components': [
- {
- 'app': {
- 'image': 'fake-image',
- 'nameOverride': 'resources-kafka-connect-sink-streams-app-development',
- 'streams': {
- 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
- 'config': {
- 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
- },
- 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error',
- 'inputTopics': [
- 'example-topic'
- ],
- 'outputTopic': 'example-output',
- 'schemaRegistryUrl': 'http://localhost:8081'
- }
+snapshots['TestPipeline.test_with_env_defaults test-pipeline'] = [
+ {
+ 'app': {
+ 'image': 'fake-image',
+ 'streams': {
+ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092',
+ 'config': {
+ 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator'
+ },
+ 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error',
+ 'inputTopics': [
+ 'example-topic'
+ ],
+ 'outputTopic': 'example-output',
+ 'schemaRegistryUrl': 'http://localhost:8081/'
+ }
+ },
+ 'from': {
+ 'components': {
},
- 'from': {
- 'components': {
- },
- 'topics': {
- 'example-topic': {
- 'type': 'input'
- }
+ 'topics': {
+ 'example-topic': {
+ 'type': 'input'
}
+ }
+ },
+ 'name': 'streams-app-development',
+ 'namespace': 'development-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-streams-bootstrap',
+ 'url': 'https://bakdata.github.io/streams-bootstrap/'
+ },
+ 'to': {
+ 'models': {
},
- 'name': 'streams-app-development',
- 'namespace': 'development-namespace',
- 'prefix': 'resources-kafka-connect-sink-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-streams-bootstrap',
- 'url': 'https://bakdata.github.io/streams-bootstrap/'
- },
- 'to': {
- 'models': {
- },
- 'topics': {
- 'example-output': {
- 'configs': {
- },
- 'type': 'output'
- },
- 'resources-kafka-connect-sink-streams-app-development-error': {
- 'configs': {
- 'cleanup.policy': 'compact,delete'
- },
- 'partitions_count': 1,
- 'type': 'error',
- 'value_schema': 'com.bakdata.kafka.DeadLetter'
- }
+ 'topics': {
+ 'example-output': {
+ 'configs': {
+ },
+ 'type': 'output'
+ },
+ 'resources-kafka-connect-sink-streams-app-development-error': {
+ 'configs': {
+ 'cleanup.policy': 'compact,delete'
+ },
+ 'partitions_count': 1,
+ 'type': 'error',
+ 'value_schema': 'com.bakdata.kafka.DeadLetter'
}
- },
- 'type': 'streams-app',
- 'version': '2.9.0'
- },
- {
- 'app': {
- 'batch.size': '2000',
- 'behavior.on.malformed.documents': 'warn',
- 'behavior.on.null.values': 'delete',
- 'connection.compression': 'true',
- 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
- 'key.ignore': 'false',
- 'linger.ms': '5000',
- 'max.buffered.records': '20000',
- 'name': 'resources-kafka-connect-sink-es-sink-connector',
- 'read.timeout.ms': '120000',
- 'tasks.max': '1',
- 'topics': 'example-output'
- },
- 'name': 'es-sink-connector',
- 'namespace': 'example-namespace',
- 'prefix': 'resources-kafka-connect-sink-',
- 'repo_config': {
- 'repo_auth_flags': {
- 'insecure_skip_tls_verify': False
- },
- 'repository_name': 'bakdata-kafka-connect-resetter',
- 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
- },
- 'resetter_values': {
- },
- 'type': 'kafka-sink-connector',
- 'version': '1.0.4'
- }
- ]
-}
+ }
+ },
+ 'type': 'streams-app',
+ 'version': '2.9.0'
+ },
+ {
+ 'app': {
+ 'batch.size': '2000',
+ 'behavior.on.malformed.documents': 'warn',
+ 'behavior.on.null.values': 'delete',
+ 'connection.compression': 'true',
+ 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
+ 'key.ignore': 'false',
+ 'linger.ms': '5000',
+ 'max.buffered.records': '20000',
+ 'name': 'resources-kafka-connect-sink-es-sink-connector',
+ 'read.timeout.ms': '120000',
+ 'tasks.max': '1',
+ 'topics': 'example-output'
+ },
+ 'name': 'es-sink-connector',
+ 'namespace': 'example-namespace',
+ 'prefix': 'resources-kafka-connect-sink-',
+ 'repo_config': {
+ 'repo_auth_flags': {
+ 'insecure_skip_tls_verify': False
+ },
+ 'repository_name': 'bakdata-kafka-connect-resetter',
+ 'url': 'https://bakdata.github.io/kafka-connect-resetter/'
+ },
+ 'resetter_values': {
+ },
+ 'type': 'kafka-sink-connector',
+ 'version': '1.0.4'
+ }
+]
diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py
index 86e2c8b8e..7964b2102 100644
--- a/tests/pipeline/test_components/components.py
+++ b/tests/pipeline/test_components/components.py
@@ -5,15 +5,18 @@
Schema,
SchemaProvider,
)
-from kpops.components import KafkaSinkConnector
-from kpops.components.base_components import PipelineComponent
+from kpops.components import (
+ KafkaSinkConnector,
+ PipelineComponent,
+ ProducerApp,
+ StreamsApp,
+)
from kpops.components.base_components.models import ModelName, ModelVersion, TopicName
from kpops.components.base_components.models.to_section import (
OutputTopicTypes,
TopicConfig,
ToSection,
)
-from kpops.components.streams_bootstrap import ProducerApp, StreamsApp
class ScheduledProducer(ProducerApp):
@@ -43,18 +46,16 @@ def inflate(self) -> list[PipelineComponent]:
name=f"{self.name}-inflated-sink-connector",
config=self.config,
handlers=self.handlers,
- namespace="example-namespace",
- # FIXME
- app={ # type: ignore[reportGeneralTypeIssues]
+ app={ # type: ignore[reportGeneralTypeIssues], required `connector.class` comes from defaults during enrichment
"topics": topic_name,
"transforms.changeTopic.replacement": f"{topic_name}-index-v1",
},
to=ToSection(
topics={
- TopicName("${component_type}"): TopicConfig(
+ TopicName("${component.type}"): TopicConfig(
type=OutputTopicTypes.OUTPUT
),
- TopicName("${component_name}"): TopicConfig(
+ TopicName("${component.name}"): TopicConfig(
type=None, role="test"
),
}
@@ -68,10 +69,10 @@ def inflate(self) -> list[PipelineComponent]:
to=ToSection( # type: ignore[reportGeneralTypeIssues]
topics={
TopicName(
- f"{self.full_name}-" + "${component_name}"
+ f"{self.full_name}-" + "${component.name}"
): TopicConfig(type=OutputTopicTypes.OUTPUT)
}
- ).dict(),
+ ).model_dump(),
)
inflate_steps.append(streams_app)
diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py
index d5684178c..c87c668a0 100644
--- a/tests/pipeline/test_components_without_schema_handler/components.py
+++ b/tests/pipeline/test_components_without_schema_handler/components.py
@@ -1,10 +1,13 @@
from typing_extensions import override
from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig
-from kpops.components import KafkaSinkConnector
-from kpops.components.base_components import PipelineComponent
+from kpops.components import (
+ KafkaSinkConnector,
+ PipelineComponent,
+ ProducerApp,
+ StreamsApp,
+)
from kpops.components.base_components.models.to_section import OutputTopicTypes
-from kpops.components.streams_bootstrap import ProducerApp, StreamsApp
class ScheduledProducer(ProducerApp):
@@ -28,7 +31,6 @@ def inflate(self) -> list[PipelineComponent]:
name="sink-connector",
config=self.config,
handlers=self.handlers,
- namespace="example-namespace",
app=KafkaConnectorConfig(
**{
"topics": topic_name,
diff --git a/tests/pipeline/test_example.py b/tests/pipeline/test_example.py
index e3b3e5286..5d6d587d0 100644
--- a/tests/pipeline/test_example.py
+++ b/tests/pipeline/test_example.py
@@ -16,15 +16,13 @@ def test_atm_fraud(self, snapshot: SnapshotTest):
[
"generate",
"./examples/bakdata/atm-fraud-detection/pipeline.yaml",
- "--pipeline-base-dir",
- "examples",
"--config",
- "./examples/bakdata/atm-fraud-detection/config.yaml",
+ "./examples/bakdata/atm-fraud-detection",
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "atm-fraud-pipeline")
diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_generate.py
similarity index 66%
rename from tests/pipeline/test_pipeline.py
rename to tests/pipeline/test_generate.py
index bc584dbfa..68e8efd92 100644
--- a/tests/pipeline/test_pipeline.py
+++ b/tests/pipeline/test_generate.py
@@ -1,4 +1,3 @@
-import logging
from pathlib import Path
import pytest
@@ -8,22 +7,20 @@
import kpops
from kpops.cli.main import app
-from kpops.pipeline_generator.pipeline import ParsingException, ValidationError
+from kpops.pipeline import ParsingException, ValidationError
runner = CliRunner()
RESOURCE_PATH = Path(__file__).parent / "resources"
-PIPELINE_BASE_DIR_PATH = RESOURCE_PATH.parent
-@pytest.mark.usefixtures("mock_env")
-class TestPipeline:
+@pytest.mark.usefixtures("mock_env", "load_yaml_file_clear_cache")
+class TestGenerate:
def test_python_api(self):
pipeline = kpops.generate(
RESOURCE_PATH / "first-pipeline" / "pipeline.yaml",
- "tests.pipeline.test_components",
- pipeline_base_dir=PIPELINE_BASE_DIR_PATH,
defaults=RESOURCE_PATH,
+ output=False,
)
assert len(pipeline) == 3
@@ -32,90 +29,53 @@ def test_load_pipeline(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "first-pipeline/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
- def test_generate_with_steps_flag_should_write_log_warning(
- self, caplog: pytest.LogCaptureFixture
- ):
- result = runner.invoke(
- app,
- [
- "generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
- str(RESOURCE_PATH / "first-pipeline/pipeline.yaml"),
- "tests.pipeline.test_components",
- "--defaults",
- str(RESOURCE_PATH),
- "--steps",
- "a",
- ],
- catch_exceptions=False,
- )
-
- assert caplog.record_tuples == [
- (
- "root",
- logging.WARNING,
- "The following flags are considered only when `--template` is set: \n \
- '--steps'",
- )
- ]
-
- assert result.exit_code == 0
-
def test_name_equal_prefix_name_concatenation(self):
result = runner.invoke(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "name_prefix_concatenation/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
- assert enriched_pipeline["components"][0]["prefix"] == "my-fake-prefix-"
- assert enriched_pipeline["components"][0]["name"] == "my-streams-app"
+ assert enriched_pipeline[0]["prefix"] == "my-fake-prefix-"
+ assert enriched_pipeline[0]["name"] == "my-streams-app"
def test_pipelines_with_env_values(self, snapshot: SnapshotTest):
result = runner.invoke(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "pipeline-with-envs/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -125,17 +85,14 @@ def test_inflate_pipeline(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "pipeline-with-inflate/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -145,47 +102,39 @@ def test_substitute_in_component(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "component-type-substitution/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
assert (
- enriched_pipeline["components"][0]["prefix"]
- == "resources-component-type-substitution-"
+ enriched_pipeline[0]["prefix"] == "resources-component-type-substitution-"
)
- assert enriched_pipeline["components"][0]["name"] == "scheduled-producer"
+ assert enriched_pipeline[0]["name"] == "scheduled-producer"
- labels = enriched_pipeline["components"][0]["app"]["labels"]
+ labels = enriched_pipeline[0]["app"]["labels"]
assert labels["app_name"] == "scheduled-producer"
assert labels["app_type"] == "scheduled-producer"
assert labels["app_schedule"] == "30 3/8 * * *"
assert (
- enriched_pipeline["components"][2]["app"]["labels"][
- "app_resources_requests_memory"
- ]
+ enriched_pipeline[2]["app"]["labels"]["app_resources_requests_memory"]
== "3G"
)
assert (
"resources-component-type-substitution-scheduled-producer"
- in enriched_pipeline["components"][0]["to"]["topics"]
+ in enriched_pipeline[0]["to"]["topics"]
)
assert (
"resources-component-type-substitution-converter-error"
- in enriched_pipeline["components"][1]["to"]["topics"]
+ in enriched_pipeline[1]["to"]["topics"]
)
assert (
- enriched_pipeline["components"][2]["app"]["labels"][
- "test_placeholder_in_placeholder"
- ]
+ enriched_pipeline[2]["app"]["labels"]["test_placeholder_in_placeholder"]
== "filter-app-filter"
)
@@ -198,13 +147,10 @@ def test_substitute_in_component_infinite_loop(self):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(
RESOURCE_PATH
/ "component-type-substitution/infinite_pipeline.yaml",
),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
@@ -216,18 +162,16 @@ def test_kafka_connector_config_parsing(self):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "kafka-connect-sink-config/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH),
"--config",
- str(RESOURCE_PATH / "kafka-connect-sink-config/config.yaml"),
+ str(RESOURCE_PATH / "kafka-connect-sink-config"),
],
catch_exceptions=False,
)
enriched_pipeline: dict = yaml.safe_load(result.stdout)
- sink_connector = enriched_pipeline["components"][0]
+ sink_connector = enriched_pipeline[0]
assert (
sink_connector["app"]["errors.deadletterqueue.topic.name"]
== "kafka-sink-connector-error-topic"
@@ -238,17 +182,14 @@ def test_no_input_topic(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "no-input-topic-pipeline/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -258,8 +199,6 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "no-user-defined-components/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH),
@@ -267,7 +206,7 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -278,8 +217,6 @@ def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "kafka-connect-sink/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH),
@@ -287,7 +224,7 @@ def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -297,17 +234,14 @@ def test_read_from_component(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "read-from-component/pipeline.yaml"),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -317,16 +251,16 @@ def test_with_env_defaults(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "kafka-connect-sink/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH / "pipeline-with-env-defaults"),
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -336,8 +270,6 @@ def test_prefix_pipeline_component(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(
RESOURCE_PATH
/ "pipeline-component-should-have-prefix/pipeline.yaml",
@@ -348,7 +280,7 @@ def test_prefix_pipeline_component(self, snapshot: SnapshotTest):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
@@ -361,23 +293,23 @@ def test_with_custom_config_with_relative_defaults_path(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
"--config",
- str(RESOURCE_PATH / "custom-config/config.yaml"),
+ str(RESOURCE_PATH / "custom-config"),
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
- producer_details = enriched_pipeline["components"][0]
+ producer_details = enriched_pipeline[0]
output_topic = producer_details["app"]["streams"]["outputTopic"]
assert output_topic == "app1-test-topic"
- streams_app_details = enriched_pipeline["components"][1]
+ streams_app_details = enriched_pipeline[1]
output_topic = streams_app_details["app"]["streams"]["outputTopic"]
assert output_topic == "app2-test-topic"
error_topic = streams_app_details["app"]["streams"]["errorTopic"]
@@ -396,7 +328,7 @@ def test_with_custom_config_with_absolute_defaults_path(
config_dict["defaults_path"] = str(
(RESOURCE_PATH / "no-topics-defaults").absolute(),
)
- temp_config_path = RESOURCE_PATH / "custom-config/temp_config.yaml"
+ temp_config_path = RESOURCE_PATH / "custom-config/config_custom.yaml"
try:
with temp_config_path.open("w") as abs_config_yaml:
yaml.dump(config_dict, abs_config_yaml)
@@ -404,23 +336,23 @@ def test_with_custom_config_with_absolute_defaults_path(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
"--config",
- str(temp_config_path),
+ str(temp_config_path.parent),
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
- producer_details = enriched_pipeline["components"][0]
+ producer_details = enriched_pipeline[0]
output_topic = producer_details["app"]["streams"]["outputTopic"]
assert output_topic == "app1-test-topic"
- streams_app_details = enriched_pipeline["components"][1]
+ streams_app_details = enriched_pipeline[1]
output_topic = streams_app_details["app"]["streams"]["outputTopic"]
assert output_topic == "app2-test-topic"
error_topic = streams_app_details["app"]["streams"]["errorTopic"]
@@ -435,23 +367,23 @@ def test_default_config(self, snapshot: SnapshotTest):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH / "no-topics-defaults"),
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
- producer_details = enriched_pipeline["components"][0]
+ producer_details = enriched_pipeline[0]
output_topic = producer_details["app"]["streams"]["outputTopic"]
assert output_topic == "resources-custom-config-app1"
- streams_app_details = enriched_pipeline["components"][1]
+ streams_app_details = enriched_pipeline[1]
output_topic = streams_app_details["app"]["streams"]["outputTopic"]
assert output_topic == "resources-custom-config-app2"
error_topic = streams_app_details["app"]["streams"]["errorTopic"]
@@ -459,40 +391,131 @@ def test_default_config(self, snapshot: SnapshotTest):
snapshot.assert_match(enriched_pipeline, "test-pipeline")
- def test_env_vars_precedence_over_config(
- self,
- monkeypatch: pytest.MonkeyPatch,
- snapshot: SnapshotTest,
- ):
+ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch):
monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker")
result = runner.invoke(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
"--config",
- str(RESOURCE_PATH / "custom-config/config.yaml"),
+ str(RESOURCE_PATH / "custom-config"),
+ "--environment",
+ "development",
+ ],
+ catch_exceptions=False,
+ )
+ assert result.exit_code == 0, result.stdout
+ enriched_pipeline: dict = yaml.safe_load(result.stdout)
+ assert enriched_pipeline[0]["app"]["streams"]["brokers"] == "env_broker"
+
+ def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch):
+ monkeypatch.setenv(
+ name="KPOPS_SCHEMA_REGISTRY__URL", value="http://somename:1234"
+ )
+
+ result = runner.invoke(
+ app,
+ [
+ "generate",
+ str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
+ "--config",
+ str(RESOURCE_PATH / "custom-config"),
+ "--environment",
+ "development",
+ ],
+ catch_exceptions=False,
+ )
+ assert result.exit_code == 0, result.stdout
+ enriched_pipeline: dict = yaml.safe_load(result.stdout)
+ assert (
+ enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"]
+ == "http://somename:1234/"
+ )
+
+ def test_env_specific_config_env_def_in_env_var(
+ self, monkeypatch: pytest.MonkeyPatch
+ ):
+ monkeypatch.setenv(name="KPOPS_ENVIRONMENT", value="production")
+ config_path = str(RESOURCE_PATH / "multi-config")
+ result = runner.invoke(
+ app,
+ [
+ "generate",
+ str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
+ "--config",
+ config_path,
+ "--defaults",
+ str(RESOURCE_PATH),
],
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
assert (
- enriched_pipeline["components"][0]["app"]["streams"]["brokers"]
- == "env_broker"
+ enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"]
+ == "http://production:8081/"
)
+ @pytest.mark.parametrize(
+ ("config_dir", "expected_url"),
+ [
+ pytest.param("multi-config", "http://production:8081/", id="multi-config"),
+ pytest.param(
+ "env-specific-config-only",
+ "http://localhost:8081/",
+ id="env-specific-config-only",
+ ),
+ ],
+ )
+ def test_env_specific_config_env_def_in_cli(
+ self, config_dir: str, expected_url: str
+ ):
+ config_path = str(RESOURCE_PATH / config_dir)
+ result = runner.invoke(
+ app,
+ [
+ "generate",
+ str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
+ "--config",
+ config_path,
+ "--defaults",
+ str(RESOURCE_PATH),
+ "--environment",
+ "production",
+ ],
+ catch_exceptions=False,
+ )
+ assert result.exit_code == 0, result.stdout
+ enriched_pipeline: dict = yaml.safe_load(result.stdout)
+ assert (
+ enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] == expected_url
+ )
+
+ def test_config_dir_doesnt_exist(self):
+ result = runner.invoke(
+ app,
+ [
+ "generate",
+ str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
+ "--config",
+ "./non-existent-dir",
+ "--defaults",
+ str(RESOURCE_PATH),
+ "--environment",
+ "production",
+ ],
+ catch_exceptions=False,
+ )
+ assert result.exit_code != 0
+
def test_model_serialization(self, snapshot: SnapshotTest):
"""Test model serialization of component containing pathlib.Path attribute."""
result = runner.invoke(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "pipeline-with-paths/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH),
@@ -500,18 +523,41 @@ def test_model_serialization(self, snapshot: SnapshotTest):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
snapshot.assert_match(enriched_pipeline, "test-pipeline")
+ def test_dotenv_support(self):
+ result = runner.invoke(
+ app,
+ [
+ "generate",
+ str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
+ "--defaults",
+ str(RESOURCE_PATH),
+ "--config",
+ str(RESOURCE_PATH / "dotenv"),
+ "--dotenv",
+ str(RESOURCE_PATH / "dotenv/.env"),
+ "--dotenv",
+ str(RESOURCE_PATH / "dotenv/custom.env"),
+ ],
+ catch_exceptions=False,
+ )
+ assert result.exit_code == 0, result.stdout
+
+ enriched_pipeline: dict = yaml.safe_load(result.stdout)
+ assert (
+ enriched_pipeline[1]["app"]["streams"]["schemaRegistryUrl"]
+ == "http://notlocalhost:8081/"
+ )
+
def test_short_topic_definition(self):
result = runner.invoke(
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "pipeline-with-short-topics/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH / "pipeline-with-short-topics"),
@@ -519,13 +565,13 @@ def test_short_topic_definition(self):
catch_exceptions=False,
)
- assert result.exit_code == 0
+ assert result.exit_code == 0, result.stdout
enriched_pipeline: dict = yaml.safe_load(result.stdout)
- output_topics = enriched_pipeline["components"][4]["to"]["topics"]
- input_topics = enriched_pipeline["components"][4]["from"]["topics"]
- input_components = enriched_pipeline["components"][4]["from"]["components"]
+ output_topics = enriched_pipeline[4]["to"]["topics"]
+ input_topics = enriched_pipeline[4]["from"]["topics"]
+ input_components = enriched_pipeline[4]["from"]["components"]
assert "type" not in output_topics["output-topic"]
assert output_topics["error-topic"]["type"] == "error"
assert "type" not in output_topics["extra-topic"]
@@ -557,13 +603,10 @@ def test_kubernetes_app_name_validation(self):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(
RESOURCE_PATH
/ "pipeline-with-illegal-kubernetes-name/pipeline.yaml",
),
- "tests.pipeline.test_components",
"--defaults",
str(RESOURCE_PATH),
],
@@ -579,11 +622,27 @@ def test_validate_unique_step_names(self):
app,
[
"generate",
- "--pipeline-base-dir",
- str(PIPELINE_BASE_DIR_PATH),
str(RESOURCE_PATH / "pipeline-duplicate-step-names/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH),
],
catch_exceptions=False,
)
+
+ def test_temp_trim_release_name(self):
+ result = runner.invoke(
+ app,
+ [
+ "generate",
+ str(RESOURCE_PATH / "temp-trim-release-name/pipeline.yaml"),
+ "--defaults",
+ str(RESOURCE_PATH / "temp-trim-release-name"),
+ ],
+ catch_exceptions=False,
+ )
+ assert result.exit_code == 0, result.stdout
+ enriched_pipeline: dict = yaml.safe_load(result.stdout)
+ assert (
+ enriched_pipeline[0]["name"]
+ == "in-order-to-have-len-fifty-two-name-should-end--here"
+ )
diff --git a/tests/pipeline/test_template.py b/tests/pipeline/test_manifest.py
similarity index 51%
rename from tests/pipeline/test_template.py
rename to tests/pipeline/test_manifest.py
index a43fbec5b..0910c2dac 100644
--- a/tests/pipeline/test_template.py
+++ b/tests/pipeline/test_manifest.py
@@ -3,40 +3,50 @@
import pytest
from pytest_mock import MockerFixture
+from snapshottest.module import SnapshotTest
from typer.testing import CliRunner
+import kpops
from kpops.cli.main import app
from kpops.component_handlers.helm_wrapper.helm import Helm
+from kpops.component_handlers.helm_wrapper.model import HelmConfig, Version
runner = CliRunner()
RESOURCE_PATH = Path(__file__).parent / "resources"
-PIPELINE_BASE_DIR = str(RESOURCE_PATH.parent)
-class TestTemplate:
+class TestManifest:
@pytest.fixture()
- def run_command(self, mocker: MockerFixture) -> MagicMock:
- return mocker.patch.object(Helm, "_Helm__execute")
+ def mock_execute(self, mocker: MockerFixture) -> MagicMock:
+ mock_execute = mocker.patch.object(Helm, "_Helm__execute")
+ mock_execute.return_value = "" # Helm Template
+ return mock_execute
- def test_default_template_config(self, run_command: MagicMock):
- run_command.return_value = "v3.12.0+gc9f554d"
+ @pytest.fixture()
+ def mock_get_version(self, mocker: MockerFixture) -> MagicMock:
+ mock_get_version = mocker.patch.object(Helm, "get_version")
+ mock_get_version.return_value = Version(major=3, minor=12, patch=0)
+ return mock_get_version
+
+ @pytest.fixture(autouse=True)
+ def helm(self, mock_get_version: MagicMock) -> Helm:
+ return Helm(helm_config=HelmConfig())
+ def test_default_config(self, mock_execute: MagicMock):
result = runner.invoke(
app,
[
- "generate",
- "--pipeline-base-dir",
- PIPELINE_BASE_DIR,
+ "manifest",
str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH / "no-topics-defaults"),
- "--template",
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
-
- run_command.assert_called_with(
+ mock_execute.assert_called_with(
[
"helm",
"template",
@@ -53,29 +63,24 @@ def test_default_template_config(self, run_command: MagicMock):
"--wait",
],
)
+ assert result.exit_code == 0, result.stdout
- assert result.exit_code == 0
-
- def test_template_config_with_flags(self, run_command: MagicMock):
- run_command.return_value = "v3.12.0+gc9f554d"
-
+ def test_custom_config(self, mock_execute: MagicMock):
result = runner.invoke(
app,
[
- "generate",
- "--pipeline-base-dir",
- PIPELINE_BASE_DIR,
+ "manifest",
str(RESOURCE_PATH / "custom-config/pipeline.yaml"),
"--defaults",
str(RESOURCE_PATH / "no-topics-defaults"),
"--config",
- str(RESOURCE_PATH / "custom-config/config.yaml"),
- "--template",
+ str(RESOURCE_PATH / "custom-config"),
+ "--environment",
+ "development",
],
catch_exceptions=False,
)
-
- run_command.assert_called_with(
+ mock_execute.assert_called_with(
[
"helm",
"template",
@@ -94,5 +99,16 @@ def test_template_config_with_flags(self, run_command: MagicMock):
"2.1.1",
],
)
+ assert result.exit_code == 0, result.stdout
- assert result.exit_code == 0
+ def test_python_api(self, snapshot: SnapshotTest):
+ resources = kpops.manifest(
+ RESOURCE_PATH / "custom-config/pipeline.yaml",
+ defaults=RESOURCE_PATH / "no-topics-defaults",
+ output=False,
+ environment="development",
+ )
+ assert isinstance(resources, list)
+ assert len(resources) == 2
+ for i, resource in enumerate(resources):
+ snapshot.assert_match(resource, f"resource {i}")
diff --git a/tests/utils/resources/nested_base_settings.py b/tests/utils/resources/nested_base_settings.py
index f7f92358a..97e755e71 100644
--- a/tests/utils/resources/nested_base_settings.py
+++ b/tests/utils/resources/nested_base_settings.py
@@ -1,4 +1,5 @@
-from pydantic import BaseSettings, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
class NestedSettings(BaseSettings):
@@ -10,5 +11,5 @@ class ParentSettings(BaseSettings):
nested_field: NestedSettings = Field(...)
field_with_env_defined: str = Field(
default=...,
- env="FIELD_WITH_ENV_DEFINED",
+ alias="FIELD_WITH_ENV_DEFINED",
)
diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py
index 1ea410770..197c1013a 100644
--- a/tests/utils/test_dict_ops.py
+++ b/tests/utils/test_dict_ops.py
@@ -4,6 +4,7 @@
from pydantic import BaseModel
from kpops.utils.dict_ops import generate_substitution, update_nested_pair
+from kpops.utils.types import JsonType
class TestDictOps:
@@ -47,7 +48,12 @@ class TestDictOps:
),
],
)
- def test_update_nested_pair(self, d1: dict, d2: dict, expected: dict):
+ def test_update_nested_pair(
+ self,
+ d1: dict[str, JsonType],
+ d2: dict[str, JsonType],
+ expected: dict[str, JsonType],
+ ):
assert update_nested_pair(d1, d2) == expected
def test_substitution_generation(self):
@@ -70,7 +76,7 @@ class SimpleModel(BaseModel):
},
},
problems=99,
- ).json()
+ ).model_dump_json()
)
existing_substitution = {
"key1": "Everything",
diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py
index d234bd79d..5ad065f2c 100644
--- a/tests/utils/test_doc_gen.py
+++ b/tests/utils/test_doc_gen.py
@@ -6,25 +6,13 @@
from hooks.gen_docs.gen_docs_env_vars import (
EnvVarAttrs,
append_csv_to_dotenv_file,
- collect_fields,
csv_append_env_var,
write_csv_to_md_file,
write_title_to_dotenv_file,
)
-from tests.utils.resources.nested_base_settings import ParentSettings
class TestEnvDocGen:
- def test_collect_fields(self):
- expected: list[Any] = [
- "not_nested_field",
- "attr",
- Ellipsis,
- Ellipsis,
- ]
- actual = [field.field_info.default for field in collect_fields(ParentSettings)]
- assert actual == expected
-
@pytest.mark.parametrize(
("var_name", "default_value", "description", "extra_args", "expected_outcome"),
[