From 025db7383d5e33120ce017461c7a24f0878b5e4a Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 13 Sep 2023 17:52:23 +0200 Subject: [PATCH 01/34] Move GitHub action to repository root (#356) Closes #355 --- .github/actions/update-docs/action.yml | 9 ++- README.md | 4 ++ .../kpops-runner/action.yaml => action.yaml | 0 actions/kpops-runner/README.md | 3 - docs/docs/user/migration-guide/v1-v2.md | 28 ++++----- docs/docs/user/migration-guide/v2-v3.md | 17 +++++ .../ci-integration/github-actions.md | 13 ++-- docs/mkdocs.yml | 62 +++++++++---------- 8 files changed, 75 insertions(+), 61 deletions(-) rename actions/kpops-runner/action.yaml => action.yaml (100%) delete mode 100644 actions/kpops-runner/README.md create mode 100644 docs/docs/user/migration-guide/v2-v3.md diff --git a/.github/actions/update-docs/action.yml b/.github/actions/update-docs/action.yml index bc7877497..fc9d17c96 100644 --- a/.github/actions/update-docs/action.yml +++ b/.github/actions/update-docs/action.yml @@ -15,10 +15,10 @@ inputs: required: true version: description: "Version name to be deployed by mike" - required: true + required: true release: description: "Determines if the set version is a stable and latest version, otherwise it is a dev version. (Default false)" - default: 'false' + default: "false" required: false runs: @@ -35,14 +35,13 @@ runs: run: | poetry install --with docs - - name: Update ${{ github.head_ref }} branch + - name: Update gh-pages branch shell: bash run: | git config --local user.name ${{ inputs.username }} git config --local user.email ${{ inputs.email }} git config --local user.password ${{ inputs.token }} - - git pull + git fetch origin gh-pages - name: Deploy ${{ inputs.version }} version of the documentation with mike shell: bash diff --git a/README.md b/README.md index 9d2aaca2e..1aeb656bc 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,10 @@ You can install it with [pip](https://github.com/pypa/pip): pip install kpops ``` +# GitHub action + +Please refer to the [GitHub Actions section](https://bakdata.github.io/kpops/latest/user/references/ci-integration/github-actions) for the documentation. + ## Contributing We are happy if you want to contribute to this project. diff --git a/actions/kpops-runner/action.yaml b/action.yaml similarity index 100% rename from actions/kpops-runner/action.yaml rename to action.yaml diff --git a/actions/kpops-runner/README.md b/actions/kpops-runner/README.md deleted file mode 100644 index 9ee35bfe1..000000000 --- a/actions/kpops-runner/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# KPOps runner - -Please refer to the [GitHub Actions section](https://bakdata.github.io/kpops/latest/user/references/ci-integration/github-actions) for the documentation. diff --git a/docs/docs/user/migration-guide/v1-v2.md b/docs/docs/user/migration-guide/v1-v2.md index c5936cbe5..e3edf9453 100644 --- a/docs/docs/user/migration-guide/v1-v2.md +++ b/docs/docs/user/migration-guide/v1-v2.md @@ -18,7 +18,7 @@ Because of this new convention `producer` has been renamed to `producer-app`. Th app: streams: outputTopic: output_topic - extraOutputTopics: + extraOutputTopics: output_role1: output_topic1 output_role2: output_topic2 ``` @@ -41,10 +41,10 @@ In the `to` section these have changed: role: "role-1" ... ${pipeline_name}-topic-2: -- type: output +- type: output ... ${pipeline_name}-topic-3: - type: error + type: error ... ``` @@ -68,11 +68,11 @@ In the `from` section these have changed: role: topic-role ... ${pipeline_name}-input-pattern-topic: -- type: input-pattern +- type: input-pattern + type: pattern ... ${pipeline_name}-extra-pattern-topic: -- type: extra-pattern +- type: extra-pattern + type: pattern role: some-role ... @@ -86,15 +86,15 @@ All the internal KPOps models are now snake_case, and only Helm/Kubernetes value ... type: streams-app name: streams-app - namespace: namespace - app: + namespace: namespace + app: streams: - brokers: ${brokers} + brokers: ${brokers} schemaRegistryUrl: ${schema_registry_url} autoscaling: consumerGroup: consumer-group lagThreshold: 0 - enabled: false + enabled: false pollingInterval: 30 to: @@ -117,8 +117,8 @@ type: streams-app - repositoryName: bakdata-streams-bootstrap + repository_name: bakdata-streams-bootstrap url: https://bakdata.github.io/streams-bootstrap/ -- repoAuthFlags: -+ repo_auth_flags: +- repoAuthFlags: ++ repo_auth_flags: username: user password: pass ca_file: /home/user/path/to/ca-file @@ -151,9 +151,9 @@ Since you can pass a comma separated string of broker address, the broker field environment: development - broker: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" + brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" - kafka_connect_host: "http://localhost:8083" - kafka_rest_host: "http://localhost:8082" - schema_registry_url: "http://localhost:8081" + kafka_connect_host: "http://localhost:8083" + kafka_rest_host: "http://localhost:8082" + schema_registry_url: "http://localhost:8081" ``` #### pipeline.yaml and default.yaml diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md new file mode 100644 index 000000000..e0d669859 --- /dev/null +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -0,0 +1,17 @@ +# Migrate from V2 to V3 + +## [Move GitHub action to repsitory root](https://github.com/bakdata/kpops/pull/356) + +The location of the GitHub action has changed and it's now available directly as `bakdata/kpops`. + +You'll need to change it in your GitHub CI workflows. + +```diff +steps: + - name: kpops deploy +- uses: bakdata/kpops/actions/kpops-runner@main ++ uses: bakdata/kpops@main + with: + command: deploy --execute + # ... +``` diff --git a/docs/docs/user/references/ci-integration/github-actions.md b/docs/docs/user/references/ci-integration/github-actions.md index 7284254cc..87f9098f1 100644 --- a/docs/docs/user/references/ci-integration/github-actions.md +++ b/docs/docs/user/references/ci-integration/github-actions.md @@ -1,8 +1,6 @@ # GitHub Actions integration -We provided a GitHub composite action called -[kpops-runner](https://github.com/bakdata/kpops/tree/main/actions/kpops-runner/action.yaml) -that installs all the necessary dependencies and runs KPOps commands with the given parameters. +We provided a GitHub composite action `bakdata/kpops` that installs and executes KPOps commands with the given parameters. ## Input Parameters @@ -28,7 +26,7 @@ steps: # ... # This step is useful for debugging reasons - name: Generate Kafka pipeline - uses: bakdata/kpops/actions/kpops-runner@main + uses: bakdata/kpops@main with: command: generate working-directory: home/my-kpops-root-dir @@ -36,7 +34,7 @@ steps: kpops-version: 1.2.3 - name: Deploy Kafka pipeline - uses: bakdata/kpops/actions/kpops-runner@main + uses: bakdata/kpops@main with: command: deploy --execute working-directory: home/my-kpops-root-dir @@ -45,13 +43,12 @@ steps: # ... ``` -It is possible to execute the KPOps runner on -a dev version from the [test.pypi](https://test.pypi.org/project/kpops/#history). +It is possible to use a pre-release KPOps version from [TestPyPI](https://test.pypi.org/project/kpops/#history). ```yaml steps: - name: Deploy Kafka pipeline - uses: bakdata/kpops/actions/kpops-runner@main + uses: bakdata/kpops@main with: command: deploy --execute working-directory: home/my-kpops-root-dir diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 02aa207dc..79cf83374 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -55,7 +55,7 @@ markdown_extensions: - pymdownx.keys - pymdownx.details - pymdownx.snippets: - base_path: 'docs/' + base_path: "docs/" url_download: true url_timeout: false - pymdownx.caret @@ -78,36 +78,36 @@ nav: - Home: KPOps Documentation: index.md - User Guide: - - What is KPOps: user/what-is-kpops.md - - Getting Started: - - Setup: user/getting-started/setup.md - - Quick start: user/getting-started/quick-start.md - - Teardown: user/getting-started/teardown.md - - Examples: - - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md - - Core Concepts: - - Components: - - Overview: user/core-concepts/components/overview.md - - KubernetesApp: user/core-concepts/components/kubernetes-app.md - - KafkaApp: user/core-concepts/components/kafka-app.md - - StreamsApp: user/core-concepts/components/streams-app.md - - ProducerApp: user/core-concepts/components/producer-app.md - - KafkaConnector: user/core-concepts/components/kafka-connector.md - - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md - - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md - - Config: user/core-concepts/config.md - - Defaults: user/core-concepts/defaults.md - - Variables: - - Environment variables: user/core-concepts/variables/environment_variables.md - - Substitution: user/core-concepts/variables/substitution.md - - References: - - Migration guide: - - Migrate from v1 to v2: user/migration-guide/v1-v2.md - - CLI usage: user/references/cli-commands.md - - Editor integration: user/references/editor-integration.md - - CI integration: - - GitHub Actions: user/references/ci-integration/github-actions.md + - What is KPOps: user/what-is-kpops.md + - Getting Started: + - Setup: user/getting-started/setup.md + - Quick start: user/getting-started/quick-start.md + - Teardown: user/getting-started/teardown.md + - Examples: + - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md + - Core Concepts: + - Components: + - Overview: user/core-concepts/components/overview.md + - KubernetesApp: user/core-concepts/components/kubernetes-app.md + - KafkaApp: user/core-concepts/components/kafka-app.md + - StreamsApp: user/core-concepts/components/streams-app.md + - ProducerApp: user/core-concepts/components/producer-app.md + - KafkaConnector: user/core-concepts/components/kafka-connector.md + - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md + - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md + - Config: user/core-concepts/config.md + - Defaults: user/core-concepts/defaults.md + - Variables: + - Environment variables: user/core-concepts/variables/environment_variables.md + - Substitution: user/core-concepts/variables/substitution.md + - References: + - Migration guide: + - Migrate from v1 to v2: user/migration-guide/v1-v2.md + - Migrate from v2 to v3: user/migration-guide/v2-v3.md + - CLI usage: user/references/cli-commands.md + - Editor integration: user/references/editor-integration.md + - CI integration: + - GitHub Actions: user/references/ci-integration/github-actions.md - Developer Guide: - Auto generation: developer/auto-generation.md - Formatting: developer/formatting.md - From b800dcc5b29f235e96ae91960a19bb0f1cc468af Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Thu, 5 Oct 2023 14:28:52 +0200 Subject: [PATCH 02/34] Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config (#354) --- config.yaml | 5 +- docs/docs/developer/auto-generation.md | 2 +- .../pipeline-components/kafka-app.yaml | 2 +- .../pipeline-components/pipeline.yaml | 6 +- .../pipeline-components/producer-app.yaml | 2 +- .../sections/app-kafka-app.yaml | 2 +- .../sections/app-producer-app.yaml | 2 +- .../sections/app-streams-app.yaml | 2 +- .../pipeline-components/streams-app.yaml | 2 +- .../pipeline-defaults/defaults-kafka-app.yaml | 2 +- .../defaults-producer-app.yaml | 2 +- .../defaults-streams-app.yaml | 2 +- .../resources/pipeline-defaults/defaults.yaml | 6 +- .../resources/variables/config_env_vars.env | 14 +- .../resources/variables/config_env_vars.md | 18 +- docs/docs/schema/config.json | 166 ++++++++++++++---- docs/docs/user/migration-guide/v2-v3.md | 50 +++++- .../bakdata/atm-fraud-detection/config.yaml | 12 +- .../bakdata/atm-fraud-detection/defaults.yaml | 2 +- hooks/gen_docs/gen_docs_env_vars.py | 6 +- kpops/cli/main.py | 48 ++--- .../kafka_connect/connect_wrapper.py | 31 ++-- .../kafka_connect/kafka_connect_handler.py | 14 +- .../schema_handler/schema_handler.py | 24 +-- kpops/component_handlers/topic/handler.py | 4 +- .../component_handlers/topic/proxy_wrapper.py | 35 ++-- .../base_defaults_component.py | 8 +- .../base_components/kafka_connector.py | 2 +- kpops/{cli/pipeline_config.py => config.py} | 76 +++++--- kpops/pipeline_generator/pipeline.py | 8 +- kpops/utils/gen_schema.py | 4 +- tests/cli/test_handlers.py | 25 ++- tests/cli/test_kpops_config.py | 67 +++++++ tests/compiler/test_pipeline_name.py | 4 +- .../kafka_connect/test_connect_wrapper.py | 58 +++--- .../schema_handler/test_schema_handler.py | 104 ++++++----- .../topic/test_proxy_wrapper.py | 46 ++--- .../test_base_defaults_component.py | 16 +- tests/components/test_kafka_app.py | 10 +- tests/components/test_kafka_connector.py | 10 +- tests/components/test_kafka_sink_connector.py | 16 +- .../components/test_kafka_source_connector.py | 10 +- tests/components/test_kubernetes_app.py | 14 +- tests/components/test_producer_app.py | 10 +- tests/components/test_streams_app.py | 20 +-- .../resources/custom-config/config.yaml | 12 +- tests/pipeline/resources/defaults.yaml | 2 +- .../kafka-connect-sink-config/config.yaml | 8 +- .../no-topics-defaults/defaults.yaml | 2 +- .../defaults.yaml | 2 +- .../pipeline-with-env-defaults/defaults.yaml | 2 +- .../pipeline-with-short-topics/defaults.yaml | 2 +- tests/pipeline/test_pipeline.py | 1 - 53 files changed, 620 insertions(+), 380 deletions(-) rename kpops/{cli/pipeline_config.py => config.py} (67%) create mode 100644 tests/cli/test_kpops_config.py diff --git a/config.yaml b/config.yaml index 46d0cf8b3..8fe8bb213 100644 --- a/config.yaml +++ b/config.yaml @@ -1,5 +1,2 @@ environment: development -brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -kafka_connect_host: "http://localhost:8083" -kafka_rest_host: "http://localhost:8082" -schema_registry_url: "http://localhost:8081" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" diff --git a/docs/docs/developer/auto-generation.md b/docs/docs/developer/auto-generation.md index 249f52b77..b87cbcad0 100644 --- a/docs/docs/developer/auto-generation.md +++ b/docs/docs/developer/auto-generation.md @@ -10,7 +10,7 @@ Auto generation happens mostly with [`pre-commit`](https://pre-commit.com/) hook - `cli_env_vars.env` -- All CLI environment variables in a `dotenv` file. - `cli_env_vars.md` -- All CLI environment variables in a table. -- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`PipelineConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/pipeline_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it. +- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`PipelineConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/kpops_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it. - `config_env_vars.env` -- Almost all pipeline config environment variables in a table. - `variable_substitution.yaml` -- A copy of `./tests/pipeline/resources/component-type-substitution/pipeline.yaml` used as an example of substitution. diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index 6d8045ad5..cdc49ef28 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -49,7 +49,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 27c5d45c1..eb7930376 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -49,7 +49,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app @@ -275,7 +275,7 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: @@ -346,7 +346,7 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 7a01ad24b..5be3551d8 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -32,7 +32,7 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: diff --git a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml index 991e862e0..73b70c59e 100644 --- a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml @@ -2,7 +2,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml index 5cd9b000b..0cbe04ded 100644 --- a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml @@ -2,7 +2,7 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: diff --git a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml index 44f6604aa..1c5f0849f 100644 --- a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml @@ -4,7 +4,7 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index 0dde5be5c..f77edf80c 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -51,7 +51,7 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index e0af3b7a7..bd6c9e2d9 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -7,7 +7,7 @@ kafka-app: # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml index 1d81f5ced..bfa5521c4 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml @@ -10,7 +10,7 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: diff --git a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml index 83ff13f14..ae1adab98 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml @@ -9,7 +9,7 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index e74272bdc..3a43d81e7 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -7,7 +7,7 @@ kafka-app: # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app @@ -170,7 +170,7 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: @@ -188,7 +188,7 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 308fb6334..00bef6a4c 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -9,18 +9,18 @@ # Suffix your environment files with this value (e.g. # defaults_development.yaml for environment=development). KPOPS_ENVIRONMENT # No default value, required -# brokers +# kafka_brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required -# schema_registry_url +# url # Address of the Schema Registry. -KPOPS_SCHEMA_REGISTRY_URL # No default value, not required -# kafka_rest_host +KPOPS_SCHEMA_REGISTRY_URL=http://localhost:8081 +# url # Address of the Kafka REST Proxy. -KPOPS_REST_PROXY_HOST # No default value, not required -# kafka_connect_host +KPOPS_KAFKA_REST_URL=http://localhost:8082 +# url # Address of Kafka Connect. -KPOPS_CONNECT_HOST # No default value, not required +KPOPS_KAFKA_CONNECT_URL=http://localhost:8083 # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 2928f2ccd..2419de11d 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,11 +1,11 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name |Default Value|Required| Description | Setting name | -|-------------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | -|KPOPS_SCHEMA_REGISTRY_URL| |False |Address of the Schema Registry. |schema_registry_url| -|KPOPS_REST_PROXY_HOST | |False |Address of the Kafka REST Proxy. |kafka_rest_host | -|KPOPS_CONNECT_HOST | |False |Address of Kafka Connect. |kafka_connect_host | -|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|-------------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | +|KPOPS_SCHEMA_REGISTRY_URL|http://localhost:8081|False |Address of the Schema Registry. |url | +|KPOPS_KAFKA_REST_URL |http://localhost:8082|False |Address of the Kafka REST Proxy. |url | +|KPOPS_KAFKA_CONNECT_URL |http://localhost:8083|False |Address of Kafka Connect. |url | +|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index b77b4e850..a0841dae3 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -1,5 +1,5 @@ { - "$ref": "#/definitions/PipelineConfig", + "$ref": "#/definitions/KpopsConfig", "definitions": { "HelmConfig": { "description": "Global Helm configuration", @@ -41,25 +41,57 @@ "title": "HelmDiffConfig", "type": "object" }, - "PipelineConfig": { + "KafkaConnectConfig": { "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", + "description": "Configuration for Kafka Connect.", "properties": { - "brokers": { - "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_KAFKA_BROKERS", + "url": { + "default": "http://localhost:8083", + "description": "Address of Kafka Connect.", + "env": "KPOPS_KAFKA_CONNECT_URL", "env_names": [ - "kpops_kafka_brokers" + "kpops_kafka_connect_url" ], - "example": "broker1:9092,broker2:9092,broker3:9092", - "title": "Brokers", + "format": "uri", + "maxLength": 65536, + "minLength": 1, + "title": "Url", "type": "string" - }, + } + }, + "title": "KafkaConnectConfig", + "type": "object" + }, + "KafkaRestConfig": { + "additionalProperties": false, + "description": "Configuration for Kafka REST Proxy.", + "properties": { + "url": { + "default": "http://localhost:8082", + "description": "Address of the Kafka REST Proxy.", + "env": "KPOPS_KAFKA_REST_URL", + "env_names": [ + "kpops_kafka_rest_url" + ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, + "title": "Url", + "type": "string" + } + }, + "title": "KafkaRestConfig", + "type": "object" + }, + "KpopsConfig": { + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { "create_namespace": { "default": false, "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", "env_names": [ - "create_namespace" + "kpops_create_namespace" ], "title": "Create Namespace", "type": "boolean" @@ -68,7 +100,7 @@ "default": "defaults", "description": "The name of the defaults file and the prefix of the defaults environment file.", "env_names": [ - "defaults_filename_prefix" + "kpops_defaults_filename_prefix" ], "title": "Defaults Filename Prefix", "type": "string" @@ -77,7 +109,7 @@ "default": ".", "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", "env_names": [ - "defaults_path" + "kpops_defaults_path" ], "example": "defaults", "format": "path", @@ -107,7 +139,7 @@ }, "description": "Global flags for Helm.", "env_names": [ - "helm_config" + "kpops_helm_config" ], "title": "Helm Config" }, @@ -122,29 +154,49 @@ }, "description": "Configure Helm Diff.", "env_names": [ - "helm_diff_config" + "kpops_helm_diff_config" ], "title": "Helm Diff Config" }, - "kafka_connect_host": { - "description": "Address of Kafka Connect.", - "env": "KPOPS_CONNECT_HOST", + "kafka_brokers": { + "description": "The comma separated Kafka brokers address.", + "env": "KPOPS_KAFKA_BROKERS", "env_names": [ - "kpops_connect_host" + "kpops_kafka_brokers" ], - "example": "http://localhost:8083", - "title": "Kafka Connect Host", + "example": "broker1:9092,broker2:9092,broker3:9092", + "title": "Kafka Brokers", "type": "string" }, - "kafka_rest_host": { - "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_REST_PROXY_HOST", + "kafka_connect": { + "allOf": [ + { + "$ref": "#/definitions/KafkaConnectConfig" + } + ], + "default": { + "url": "http://localhost:8083" + }, + "description": "Configuration for Kafka Connect.", "env_names": [ - "kpops_rest_proxy_host" + "kpops_kafka_connect" ], - "example": "http://localhost:8082", - "title": "Kafka Rest Host", - "type": "string" + "title": "Kafka Connect" + }, + "kafka_rest": { + "allOf": [ + { + "$ref": "#/definitions/KafkaRestConfig" + } + ], + "default": { + "url": "http://localhost:8082" + }, + "description": "Configuration for Kafka REST Proxy.", + "env_names": [ + "kpops_kafka_rest" + ], + "title": "Kafka Rest" }, "retain_clean_jobs": { "default": false, @@ -156,15 +208,21 @@ "title": "Retain Clean Jobs", "type": "boolean" }, - "schema_registry_url": { - "description": "Address of the Schema Registry.", - "env": "KPOPS_SCHEMA_REGISTRY_URL", + "schema_registry": { + "allOf": [ + { + "$ref": "#/definitions/SchemaRegistryConfig" + } + ], + "default": { + "enabled": false, + "url": "http://localhost:8081" + }, + "description": "Configuration for Schema Registry.", "env_names": [ - "kpops_schema_registry_url" + "kpops_schema_registry" ], - "example": "http://localhost:8081", - "title": "Schema Registry Url", - "type": "string" + "title": "Schema Registry" }, "timeout": { "default": 300, @@ -188,21 +246,51 @@ }, "description": "Configure the topic name variables you can use in the pipeline definition.", "env_names": [ - "topic_name_config" + "kpops_topic_name_config" ], "title": "Topic Name Config" } }, "required": [ "environment", - "brokers" + "kafka_brokers" ], - "title": "PipelineConfig", + "title": "KpopsConfig", + "type": "object" + }, + "SchemaRegistryConfig": { + "additionalProperties": false, + "description": "Configuration for Schema Registry.", + "properties": { + "enabled": { + "default": false, + "description": "Whether the Schema Registry handler should be initialized.", + "env_names": [ + "enabled" + ], + "title": "Enabled", + "type": "boolean" + }, + "url": { + "default": "http://localhost:8081", + "description": "Address of the Schema Registry.", + "env": "KPOPS_SCHEMA_REGISTRY_URL", + "env_names": [ + "kpops_schema_registry_url" + ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, + "title": "Url", + "type": "string" + } + }, + "title": "SchemaRegistryConfig", "type": "object" }, "TopicNameConfig": { "additionalProperties": false, - "description": "Configures topic names.", + "description": "Configure the topic name variables you can use in the pipeline definition.", "properties": { "default_error_topic_name": { "default": "${pipeline_name}-${component_name}-error", diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index e0d669859..def10c0f0 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -1,8 +1,56 @@ # Migrate from V2 to V3 +## [Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config](https://github.com/bakdata/kpops/pull/354) + +The breaking changes target the `config.yaml` file: + +- The `schema_registry_url` is replaced with `schema_registry.url` (default `http://localhost:8081`) and `schema_registry.enabled` (default `false`). + +- `kafka_rest_host` is renamed to `kafka_rest.url` (default `http://localhost:8082`). + +- `kafka_connect_host` is replaced with `kafka_connect.url` (default `http://localhost:8083`). + +- `brokers` is renamed to `kafka_brokers`. + +The environment variable names of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes. + +#### config.yaml + +```diff + environment: development +- brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +- kafka_rest_host: "http://my-custom-rest.url:8082" +- kafka_connect_host: "http://my-custom-connect.url:8083" +- schema_registry_url: "http://my-custom-sr.url:8081" ++ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" ++ kafka_rest: ++ url: "http://my-custom-rest.url:8082" ++ kafka_connect: ++ url: "http://my-custom-connect.url:8083" ++ schema_registry: ++ enabled: true ++ url: "http://my-custom-sr.url:8081" +``` + +#### pipeline.yaml and default.yaml + +The variable is now called `kafka_brokers`. + +```diff +... + app: + streams: +- brokers: ${brokers} ++ brokers: ${kafka_brokers} + schemaRegistryUrl: ${schema_registry_url} + nameOverride: override-with-this-name + imageTag: "1.0.0" +... +``` + ## [Move GitHub action to repsitory root](https://github.com/bakdata/kpops/pull/356) -The location of the GitHub action has changed and it's now available directly as `bakdata/kpops`. +The location of the GitHub action has changed, and it's now available directly as `bakdata/kpops`. You'll need to change it in your GitHub CI workflows. diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index e3742ded9..d03a12c64 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -4,12 +4,16 @@ topic_name_config: default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" default_output_topic_name: "${pipeline_name}-${component_name}-topic" -brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -schema_registry_url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" +schema_registry: + enabled: true + url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" -kafka_rest_host: "http://localhost:8082" +kafka_rest: + url: "http://localhost:8082" -kafka_connect_host: "http://localhost:8083" +kafka_connect: + url: "http://localhost:8083" defaults_path: . diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml index 609933f13..e3ba49c67 100644 --- a/examples/bakdata/atm-fraud-detection/defaults.yaml +++ b/examples/bakdata/atm-fraud-detection/defaults.yaml @@ -10,7 +10,7 @@ kafka-connector: kafka-app: app: streams: - brokers: ${brokers} + brokers: ${kafka_brokers} schemaRegistryUrl: ${schema_registry_url} optimizeLeaveGroupBehavior: false diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index ac88b82b6..24106e18f 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -21,7 +21,7 @@ from hooks import PATH_ROOT from hooks.gen_docs import IterableStrEnum from kpops.cli import main -from kpops.cli.pipeline_config import PipelineConfig +from kpops.config import KpopsConfig PATH_DOCS_RESOURCES = PATH_ROOT / "docs/docs/resources" PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables" @@ -254,8 +254,8 @@ def fill_csv_pipeline_config(target: Path) -> None: :param target: The path to the `.csv` file. Note that it must already contain the column names """ - for field in collect_fields(PipelineConfig): - field_info = PipelineConfig.Config.get_field_info(field.name) + for field in collect_fields(KpopsConfig): + field_info = KpopsConfig.Config.get_field_info(field.name) field_description: str = ( field.field_info.description or "No description available, please refer to the pipeline config documentation." diff --git a/kpops/cli/main.py b/kpops/cli/main.py index f58808cd2..f689231af 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -10,7 +10,6 @@ from kpops import __version__ from kpops.cli.custom_formatter import CustomFormatter -from kpops.cli.pipeline_config import ENV_PREFIX, PipelineConfig from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( @@ -19,6 +18,7 @@ from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper +from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema @@ -111,25 +111,25 @@ def setup_pipeline( pipeline_base_dir: Path, pipeline_path: Path, components_module: str | None, - pipeline_config: PipelineConfig, + kpops_config: KpopsConfig, ) -> Pipeline: registry = Registry() if components_module: registry.find_components(components_module) registry.find_components("kpops.components") - handlers = setup_handlers(components_module, pipeline_config) + handlers = setup_handlers(components_module, kpops_config) return Pipeline.load_from_yaml( - pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers + pipeline_base_dir, pipeline_path, registry, kpops_config, handlers ) def setup_handlers( - components_module: str | None, config: PipelineConfig + components_module: str | None, config: KpopsConfig ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) - connector_handler = KafkaConnectHandler.from_pipeline_config(config) - proxy_wrapper = ProxyWrapper(config) + connector_handler = KafkaConnectHandler.from_kpops_config(config) + proxy_wrapper = ProxyWrapper(config.kafka_rest) topic_handler = TopicHandler(proxy_wrapper) return ComponentHandlers(schema_handler, connector_handler, topic_handler) @@ -191,17 +191,17 @@ def log_action(action: str, pipeline_component: PipelineComponent): log.info("\n") -def create_pipeline_config( +def create_kpops_config( config: Path, defaults: Optional[Path], verbose: bool -) -> PipelineConfig: +) -> KpopsConfig: setup_logging_level(verbose) - PipelineConfig.Config.config_path = config + KpopsConfig.Config.config_path = config if defaults: - pipeline_config = PipelineConfig(defaults_path=defaults) + kpops_config = KpopsConfig(defaults_path=defaults) else: - pipeline_config = PipelineConfig() - pipeline_config.defaults_path = config.parent / pipeline_config.defaults_path - return pipeline_config + kpops_config = KpopsConfig() + kpops_config.defaults_path = config.parent / kpops_config.defaults_path + return kpops_config @app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 @@ -248,9 +248,9 @@ def generate( filter_type: FilterType = FILTER_TYPE, verbose: bool = VERBOSE_OPTION, ) -> Pipeline: - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) if not template: @@ -283,9 +283,9 @@ def deploy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) @@ -308,9 +308,9 @@ def destroy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -332,9 +332,9 @@ def reset( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -357,9 +357,9 @@ def clean( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 9a3dd307e..aa1918a43 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -1,8 +1,12 @@ +from __future__ import annotations + import logging import time from time import sleep +from typing import TYPE_CHECKING import httpx +from pydantic import AnyHttpUrl from kpops.component_handlers.kafka_connect.exception import ( ConnectorNotFoundException, @@ -14,6 +18,9 @@ KafkaConnectResponse, ) +if TYPE_CHECKING: + from kpops.config import KafkaConnectConfig + HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} log = logging.getLogger("KafkaConnectAPI") @@ -24,18 +31,12 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, host: str | None): - if not host: - error_message = ( - "The Kafka Connect host is not set. Please set the host in the config." - ) - log.error(error_message) - raise RuntimeError(error_message) - self._host: str = host + def __init__(self, config: KafkaConnectConfig) -> None: + self._config: KafkaConnectConfig = config @property - def host(self) -> str: - return self._host + def url(self) -> AnyHttpUrl: + return self._config.url def create_connector( self, connector_config: KafkaConnectorConfig @@ -49,7 +50,7 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._host}/connectors", headers=HEADERS, json=connect_data + url=f"{self.url}/connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -71,7 +72,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector """ response = httpx.get( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -99,7 +100,7 @@ def update_connector_config( connector_name = connector_config.name config_json = connector_config.dict() response = httpx.put( - url=f"{self._host}/connectors/{connector_name}/config", + url=f"{self.url}/connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -129,7 +130,7 @@ def validate_connector_config( :return: """ response = httpx.put( - url=f"{self._host}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self.url}/connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, json=connector_config.dict(), ) @@ -156,7 +157,7 @@ def delete_connector(self, connector_name: str) -> None: API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)- """ response = httpx.delete( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 14f5af076..7e3d798fe 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -19,7 +19,7 @@ from typing_extensions import Self if TYPE_CHECKING: - from kpops.cli.pipeline_config import PipelineConfig + from kpops.config import KpopsConfig log = logging.getLogger("KafkaConnectHandler") @@ -100,14 +100,14 @@ def __dry_run_connector_creation( log.debug(connector_config.dict()) log.debug(f"PUT /connectors/{connector_name}/config HTTP/1.1") - log.debug(f"HOST: {self._connect_wrapper.host}") + log.debug(f"HOST: {self._connect_wrapper.url}") except ConnectorNotFoundException: diff = render_diff({}, connector_config.dict()) log.info( f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" ) log.debug("POST /connectors HTTP/1.1") - log.debug(f"HOST: {self._connect_wrapper.host}") + log.debug(f"HOST: {self._connect_wrapper.url}") errors = self._connect_wrapper.validate_connector_config(connector_config) if len(errors) > 0: @@ -129,15 +129,15 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: ) ) log.debug(f"DELETE /connectors/{connector_name} HTTP/1.1") - log.debug(f"HOST: {self._connect_wrapper.host}") + log.debug(f"HOST: {self._connect_wrapper.url}") except ConnectorNotFoundException: log.warning( f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping." ) @classmethod - def from_pipeline_config(cls, pipeline_config: PipelineConfig) -> Self: + def from_kpops_config(cls, config: KpopsConfig) -> Self: return cls( - connect_wrapper=ConnectWrapper(host=pipeline_config.kafka_connect_host), - timeout=pipeline_config.timeout, + connect_wrapper=ConnectWrapper(config.kafka_connect), + timeout=config.timeout, ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index a053ccc62..4b21083de 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -8,21 +8,27 @@ from schema_registry.client.schema import AvroSchema from kpops.cli.exception import ClassNotFoundError -from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import find_class from kpops.component_handlers.schema_handler.schema_provider import ( Schema, SchemaProvider, ) from kpops.components.base_components.models.to_section import ToSection +from kpops.config import KpopsConfig from kpops.utils.colorify import greenify, magentaify log = logging.getLogger("SchemaHandler") class SchemaHandler: - def __init__(self, url: str, components_module: str | None): - self.schema_registry_client = SchemaRegistryClient(url) + def __init__( + self, + kpops_config: KpopsConfig, + components_module: str | None, + ) -> None: + self.schema_registry_client = SchemaRegistryClient( + kpops_config.schema_registry.url + ) self.components_module = components_module @cached_property @@ -42,15 +48,11 @@ def schema_provider(self) -> SchemaProvider: @classmethod def load_schema_handler( - cls, components_module: str | None, config: PipelineConfig + cls, components_module: str | None, config: KpopsConfig ) -> SchemaHandler | None: - if not config.schema_registry_url: - return None - - return cls( - url=config.schema_registry_url, - components_module=components_module, - ) + if config.schema_registry.enabled: + return cls(config, components_module) + return None def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: for topic_name, config in to_section.topics.items(): diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 1df0d106a..cef544ab9 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -129,7 +129,7 @@ def __dry_run_topic_creation( ) ) log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") - log.debug(f"Host: {self.proxy_wrapper.host}") + log.debug(f"Host: {self.proxy_wrapper.url}") log.debug(HEADERS) log.debug(topic_spec.dict()) @@ -187,7 +187,7 @@ def __dry_run_topic_deletion(self, topic_name: str) -> None: log.warning( f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." ) - log.debug(f"Host: {self.proxy_wrapper.host}") + log.debug(f"Host: {self.proxy_wrapper.url}") log.debug(HEADERS) log.debug("HTTP/1.1 404 Not Found") log.debug(HEADERS) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index af7914379..407dcfcd8 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -1,9 +1,12 @@ +from __future__ import annotations + import logging from functools import cached_property +from typing import TYPE_CHECKING import httpx +from pydantic import AnyHttpUrl -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers.topic.exception import ( KafkaRestProxyError, TopicNotFoundException, @@ -15,6 +18,9 @@ TopicSpec, ) +if TYPE_CHECKING: + from kpops.config import KafkaRestConfig + log = logging.getLogger("KafkaRestProxy") HEADERS = {"Content-Type": "application/json"} @@ -25,13 +31,8 @@ class ProxyWrapper: Wraps Kafka REST Proxy APIs """ - def __init__(self, pipeline_config: PipelineConfig) -> None: - if not pipeline_config.kafka_rest_host: - raise ValueError( - "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." - ) - - self._host = pipeline_config.kafka_rest_host + def __init__(self, config: KafkaRestConfig) -> None: + self._config: KafkaRestConfig = config @cached_property def cluster_id(self) -> str: @@ -44,7 +45,7 @@ def cluster_id(self) -> str: bootstrap.servers configuration. Therefore, only one Kafka cluster will be returned. :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._host}/v3/clusters") + response = httpx.get(url=f"{self._config.url}/v3/clusters") if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -52,8 +53,8 @@ def cluster_id(self) -> str: raise KafkaRestProxyError(response) @property - def host(self) -> str: - return self._host + def url(self) -> AnyHttpUrl: + return self._config.url def create_topic(self, topic_spec: TopicSpec) -> None: """ @@ -62,7 +63,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :param topic_spec: The topic specification. """ response = httpx.post( - url=f"{self._host}/v3/clusters/{self.cluster_id}/topics", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics", headers=HEADERS, json=topic_spec.dict(exclude_none=True), ) @@ -80,7 +81,7 @@ def delete_topic(self, topic_name: str) -> None: :param topic_name: Name of the topic """ response = httpx.delete( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: @@ -97,7 +98,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: :return: Response of the get topic API """ response = httpx.get( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.OK: @@ -123,7 +124,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: :return: The topic configuration. """ response = httpx.get( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", headers=HEADERS, ) @@ -150,7 +151,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No :param config_name: The configuration parameter name. """ response = httpx.post( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": json_body}, ) @@ -167,7 +168,7 @@ def get_broker_config(self) -> BrokerConfigResponse: :return: The broker configuration. """ response = httpx.get( - url=f"{self.host}/v3/clusters/{self.cluster_id}/brokers/-/configs", + url=f"{self.url}/v3/clusters/{self.cluster_id}/brokers/-/configs", headers=HEADERS, ) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 99dec42f2..545813f53 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -9,8 +9,8 @@ import typer from pydantic import BaseModel, Field -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers +from kpops.config import KpopsConfig from kpops.utils import cached_classproperty from kpops.utils.dict_ops import update_nested from kpops.utils.docstring import describe_attr @@ -45,7 +45,7 @@ class BaseDefaultsComponent(BaseModel): exclude=True, hidden_from_schema=True, ) - config: PipelineConfig = Field( + config: KpopsConfig = Field( default=..., description=describe_attr("config", __doc__), exclude=True, @@ -90,7 +90,7 @@ def extend_with_defaults(self, **kwargs) -> dict: :param kwargs: The init kwargs for pydantic :returns: Enriched kwargs with inheritted defaults """ - config: PipelineConfig = kwargs["config"] + config: KpopsConfig = kwargs["config"] log.debug( typer.style( "Enriching component of type ", fg=typer.colors.GREEN, bold=False @@ -177,7 +177,7 @@ def defaults_from_yaml(path: Path, key: str) -> dict: return value -def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]: +def get_defaults_file_paths(config: KpopsConfig) -> tuple[Path, Path]: """Return the paths to the main and the environment defaults-files The files need not exist, this function will only check if the dir set in diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index e53886d68..bad11b6fc 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -241,7 +241,7 @@ def _get_kafka_connect_resetter_values( **KafkaConnectResetterValues( config=KafkaConnectResetterConfig( connector=self.full_name, - brokers=self.config.brokers, + brokers=self.config.kafka_brokers, **kwargs, ), connector_type=self._connector_type.value, diff --git a/kpops/cli/pipeline_config.py b/kpops/config.py similarity index 67% rename from kpops/cli/pipeline_config.py rename to kpops/config.py index 1400323f5..95193bd53 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/config.py @@ -3,9 +3,10 @@ from pathlib import Path from typing import TYPE_CHECKING, Any -from pydantic import BaseConfig, BaseSettings, Field +from pydantic import AnyHttpUrl, BaseConfig, BaseSettings, Field, parse_obj_as from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig +from kpops.utils.docstring import describe_object from kpops.utils.yaml_loading import load_yaml_file if TYPE_CHECKING: @@ -17,7 +18,7 @@ class TopicNameConfig(BaseSettings): - """Configures topic names.""" + """Configure the topic name variables you can use in the pipeline definition.""" default_output_topic_name: str = Field( default="${pipeline_name}-${component_name}", @@ -29,7 +30,43 @@ class TopicNameConfig(BaseSettings): ) -class PipelineConfig(BaseSettings): +class SchemaRegistryConfig(BaseSettings): + """Configuration for Schema Registry.""" + + enabled: bool = Field( + default=False, + description="Whether the Schema Registry handler should be initialized.", + ) + url: AnyHttpUrl = Field( + # For validating URLs use parse_obj_as + # https://github.com/pydantic/pydantic/issues/1106 + default=parse_obj_as(AnyHttpUrl, "http://localhost:8081"), + env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", + description="Address of the Schema Registry.", + ) + + +class KafkaRestConfig(BaseSettings): + """Configuration for Kafka REST Proxy.""" + + url: AnyHttpUrl = Field( + default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), + env=f"{ENV_PREFIX}KAFKA_REST_URL", + description="Address of the Kafka REST Proxy.", + ) + + +class KafkaConnectConfig(BaseSettings): + """Configuration for Kafka Connect.""" + + url: AnyHttpUrl = Field( + default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), + env=f"{ENV_PREFIX}KAFKA_CONNECT_URL", + description="Address of Kafka Connect.", + ) + + +class KpopsConfig(BaseSettings): """Pipeline configuration unrelated to the components.""" defaults_path: Path = Field( @@ -45,7 +82,7 @@ class PipelineConfig(BaseSettings): description="The environment you want to generate and deploy the pipeline to. " "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", ) - brokers: str = Field( + kafka_brokers: str = Field( default=..., env=f"{ENV_PREFIX}KAFKA_BROKERS", description="The comma separated Kafka brokers address.", @@ -57,25 +94,19 @@ class PipelineConfig(BaseSettings): ) topic_name_config: TopicNameConfig = Field( default=TopicNameConfig(), - description="Configure the topic name variables you can use in the pipeline definition.", + description=describe_object(TopicNameConfig.__doc__), ) - schema_registry_url: str | None = Field( - default=None, - example="http://localhost:8081", - env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", - description="Address of the Schema Registry.", + schema_registry: SchemaRegistryConfig = Field( + default=SchemaRegistryConfig(), + description=describe_object(SchemaRegistryConfig.__doc__), ) - kafka_rest_host: str | None = Field( - default=None, - env=f"{ENV_PREFIX}REST_PROXY_HOST", - example="http://localhost:8082", - description="Address of the Kafka REST Proxy.", + kafka_rest: KafkaRestConfig = Field( + default=KafkaRestConfig(), + description=describe_object(KafkaRestConfig.__doc__), ) - kafka_connect_host: str | None = Field( - default=None, - env=f"{ENV_PREFIX}CONNECT_HOST", - example="http://localhost:8083", - description="Address of Kafka Connect.", + kafka_connect: KafkaConnectConfig = Field( + default=KafkaConnectConfig(), + description=describe_object(KafkaConnectConfig.__doc__), ) timeout: int = Field( default=300, @@ -104,6 +135,7 @@ class Config(BaseConfig): config_path = Path("config.yaml") env_file = ".env" env_file_encoding = "utf-8" + env_prefix = ENV_PREFIX @classmethod def customise_sources( @@ -112,7 +144,7 @@ def customise_sources( env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> tuple[ - SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ... + SettingsSourceCallable | Callable[[KpopsConfig], dict[str, Any]], ... ]: return ( env_settings, @@ -122,7 +154,7 @@ def customise_sources( ) -def yaml_config_settings_source(settings: PipelineConfig) -> dict[str, Any]: +def yaml_config_settings_source(settings: KpopsConfig) -> dict[str, Any]: path_to_config = settings.Config.config_path if path_to_config.exists(): if isinstance(source := load_yaml_file(path_to_config), dict): diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 093a452ea..ce5b698cc 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -12,10 +12,10 @@ from rich.console import Console from rich.syntax import Syntax -from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.config import KpopsConfig from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested @@ -100,7 +100,7 @@ def __init__( component_list: list[dict], environment_components: list[dict], registry: Registry, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, ) -> None: self.components: PipelineComponents = PipelineComponents() @@ -117,7 +117,7 @@ def load_from_yaml( base_dir: Path, path: Path, registry: Registry, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, ) -> Pipeline: """Load pipeline definition from yaml @@ -310,7 +310,7 @@ def validate(self) -> None: self.components.validate_unique_names() @staticmethod - def pipeline_filename_environment(path: Path, config: PipelineConfig) -> Path: + def pipeline_filename_environment(path: Path, config: KpopsConfig) -> Path: """Add the environment name from the PipelineConfig to the pipeline.yaml path :param path: Path to pipeline.yaml file diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 470a1412d..571a82a7d 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -8,9 +8,9 @@ from pydantic.fields import FieldInfo, ModelField from pydantic.schema import SkipField -from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.config import KpopsConfig from kpops.utils.docstring import describe_object @@ -139,6 +139,6 @@ def gen_pipeline_schema( def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" schema = schema_json_of( - PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True + KpopsConfig, title="KPOps config schema", indent=4, sort_keys=True ) print(schema) diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index 509c5e0cc..40c496497 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -3,28 +3,27 @@ from pytest_mock import MockerFixture from kpops.cli.main import setup_handlers -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( KafkaConnectHandler, ) from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler +from kpops.config import KpopsConfig, SchemaRegistryConfig from tests.cli.resources.module import CustomSchemaProvider MODULE = CustomSchemaProvider.__module__ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): - config = PipelineConfig( + config = KpopsConfig( defaults_path=Path("fake"), environment="development", - kafka_rest_host="https://testhost:8082", - schema_registry_url=None, + kafka_brokers="broker:9092", ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") - connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config) - connector_handler_mock.from_pipeline_config.return_value = connector_handler + connector_handler = KafkaConnectHandler.from_kpops_config(config) + connector_handler_mock.from_kpops_config.return_value = connector_handler topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler") wrapper = mocker.patch("kpops.cli.main.ProxyWrapper") @@ -39,7 +38,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): actual_handlers = setup_handlers(MODULE, config) - connector_handler_mock.from_pipeline_config.assert_called_once_with(config) + connector_handler_mock.from_kpops_config.assert_called_once_with(config) assert actual_handlers.schema_handler == expected.schema_handler assert actual_handlers.connector_handler == expected.connector_handler @@ -51,19 +50,19 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): - config = PipelineConfig( + config = KpopsConfig( defaults_path=Path("fake"), environment="development", - kafka_rest_host="https://testhost:8082", - schema_registry_url="https://testhost:8081", + schema_registry=SchemaRegistryConfig(enabled=True), + kafka_brokers="broker:9092", ) schema_handler_mock = mocker.patch("kpops.cli.main.SchemaHandler") schema_handler = SchemaHandler.load_schema_handler(MODULE, config) schema_handler_mock.load_schema_handler.return_value = schema_handler connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") - connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config) - connector_handler_mock.from_pipeline_config.return_value = connector_handler + connector_handler = KafkaConnectHandler.from_kpops_config(config) + connector_handler_mock.from_kpops_config.return_value = connector_handler topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler") wrapper = mocker.patch("kpops.cli.main.ProxyWrapper") @@ -80,7 +79,7 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): schema_handler_mock.load_schema_handler.assert_called_once_with(MODULE, config) - connector_handler_mock.from_pipeline_config.assert_called_once_with(config) + connector_handler_mock.from_kpops_config.assert_called_once_with(config) assert actual_handlers.schema_handler == expected.schema_handler assert actual_handlers.connector_handler == expected.connector_handler diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py new file mode 100644 index 000000000..254a2d73a --- /dev/null +++ b/tests/cli/test_kpops_config.py @@ -0,0 +1,67 @@ +from pathlib import Path + +import pytest +from pydantic import AnyHttpUrl, ValidationError, parse_obj_as + +from kpops.config import ( + KafkaConnectConfig, + KafkaRestConfig, + KpopsConfig, + SchemaRegistryConfig, +) + + +def test_kpops_config_with_default_values(): + default_config = KpopsConfig( + environment="development", kafka_brokers="http://broker:9092" + ) + + assert default_config.defaults_path == Path(".") + assert default_config.defaults_filename_prefix == "defaults" + assert ( + default_config.topic_name_config.default_output_topic_name + == "${pipeline_name}-${component_name}" + ) + assert ( + default_config.topic_name_config.default_error_topic_name + == "${pipeline_name}-${component_name}-error" + ) + assert default_config.schema_registry.enabled is False + assert default_config.schema_registry.url == "http://localhost:8081" + assert default_config.kafka_rest.url == "http://localhost:8082" + assert default_config.kafka_connect.url == "http://localhost:8083" + assert default_config.timeout == 300 + assert default_config.create_namespace is False + assert default_config.helm_config.context is None + assert default_config.helm_config.debug is False + assert default_config.helm_config.api_version is None + assert default_config.helm_diff_config.ignore == set() + assert default_config.retain_clean_jobs is False + + +def test_kpops_config_with_different_invalid_urls(): + with pytest.raises(ValidationError): + KpopsConfig( + environment="development", + kafka_brokers="http://broker:9092", + kafka_connect=KafkaConnectConfig( + url=parse_obj_as(AnyHttpUrl, "invalid-host") + ), + ) + + with pytest.raises(ValidationError): + KpopsConfig( + environment="development", + kafka_brokers="http://broker:9092", + kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "invalid-host")), + ) + + with pytest.raises(ValidationError): + KpopsConfig( + environment="development", + kafka_brokers="http://broker:9092", + schema_registry=SchemaRegistryConfig( + enabled=True, + url=parse_obj_as(AnyHttpUrl, "invalid-host"), + ), + ) diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 7a07c1a12..9a44412dd 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -2,7 +2,7 @@ import pytest -from kpops.cli.pipeline_config import PipelineConfig +from kpops.config import KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.environment import ENV @@ -55,7 +55,7 @@ def test_should_not_set_pipeline_name_with_the_same_base_dir(): def test_pipeline_file_name_environment(): - config = PipelineConfig( + config = KpopsConfig( defaults_path=DEFAULTS_PATH, environment="some_environment", ) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 3db9c090f..ca9d53313 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -6,7 +6,6 @@ import pytest from pytest_httpx import HTTPXMock -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers.kafka_connect.connect_wrapper import ConnectWrapper from kpops.component_handlers.kafka_connect.exception import ( ConnectorNotFoundException, @@ -17,22 +16,22 @@ KafkaConnectResponse, ) from kpops.component_handlers.kafka_connect.timeout import timeout +from kpops.config import KpopsConfig HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} -HOST = "http://localhost:8083" +DEFAULT_HOST = "http://localhost:8083" DEFAULTS_PATH = Path(__file__).parent / "resources" class TestConnectorApiWrapper: @pytest.fixture(autouse=True) def setup(self): - config = PipelineConfig( + config = KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", - kafka_connect_host=HOST, ) - self.connect_wrapper = ConnectWrapper(host=config.kafka_connect_host) + self.connect_wrapper = ConnectWrapper(config.kafka_connect) @pytest.fixture def connector_config(self) -> KafkaConnectorConfig: @@ -43,19 +42,6 @@ def connector_config(self) -> KafkaConnectorConfig: } ) - def test_should_through_exception_when_host_is_not_set(self): - config = PipelineConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - kafka_connect_host=None, - ) - with pytest.raises(RuntimeError) as run_time_error: - ConnectWrapper(host=config.kafka_connect_host) - assert ( - str(run_time_error.value) - == "The Kafka Connect host is not set. Please set the host in the config." - ) - @patch("httpx.post") def test_should_create_post_requests_for_given_connector_configuration( self, mock_post: MagicMock @@ -75,7 +61,7 @@ def test_should_create_post_requests_for_given_connector_configuration( self.connect_wrapper.create_connector(KafkaConnectorConfig(**configs)) mock_post.assert_called_with( - url=f"{HOST}/connectors", + url=f"{DEFAULT_HOST}/connectors", headers=HEADERS, json={ "name": "test-connector", @@ -107,7 +93,7 @@ def test_should_return_correct_response_when_connector_created( } httpx_mock.add_response( method="POST", - url=f"{HOST}/connectors", + url=f"{DEFAULT_HOST}/connectors", headers=HEADERS, json=actual_response, status_code=201, @@ -124,7 +110,7 @@ def test_should_raise_connector_exists_exception_when_connector_exists( ): httpx_mock.add_response( method="POST", - url=f"{HOST}/connectors", + url=f"{DEFAULT_HOST}/connectors", json={}, status_code=409, ) @@ -145,7 +131,7 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): self.connect_wrapper.get_connector(connector_name) mock_get.assert_called_with( - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers={"Accept": "application/json", "Content-Type": "application/json"}, ) @@ -176,7 +162,7 @@ def test_should_return_correct_response_when_getting_connector( } httpx_mock.add_response( method="GET", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json=actual_response, status_code=200, @@ -193,7 +179,7 @@ def test_should_raise_connector_not_found_when_getting_connector( httpx_mock.add_response( method="GET", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=404, @@ -213,7 +199,7 @@ def test_should_raise_rebalance_in_progress_when_getting_connector( httpx_mock.add_response( method="GET", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=409, @@ -247,7 +233,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc ) mock_put.assert_called_with( - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers={"Accept": "application/json", "Content-Type": "application/json"}, json=KafkaConnectorConfig(**configs).dict(), ) @@ -281,7 +267,7 @@ def test_should_return_correct_response_when_update_connector( } httpx_mock.add_response( method="PUT", - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers=HEADERS, json=actual_response, status_code=200, @@ -323,7 +309,7 @@ def test_should_return_correct_response_when_update_connector_created( } httpx_mock.add_response( method="PUT", - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers=HEADERS, json=actual_response, status_code=201, @@ -345,7 +331,7 @@ def test_should_raise_connector_exists_exception_when_update_connector( httpx_mock.add_response( method="PUT", - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers=HEADERS, json={}, status_code=409, @@ -369,7 +355,7 @@ def test_should_create_correct_delete_connector_request( self.connect_wrapper.delete_connector(connector_name) mock_delete.assert_called_with( - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, ) @@ -399,7 +385,7 @@ def test_should_return_correct_response_when_deleting_connector( } httpx_mock.add_response( method="DELETE", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json=actual_response, status_code=204, @@ -416,7 +402,7 @@ def test_should_raise_connector_not_found_when_deleting_connector( httpx_mock.add_response( method="DELETE", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=404, @@ -436,7 +422,7 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( httpx_mock.add_response( method="DELETE", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=409, @@ -467,7 +453,7 @@ def test_should_create_correct_validate_connector_config_request( self.connect_wrapper.validate_connector_config(connector_config) mock_put.assert_called_with( - url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate", + url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, json=connector_config.dict(), ) @@ -489,7 +475,7 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( ) mock_put.assert_called_with( - url=f"{HOST}/connector-plugins/{connector_name}/config/validate", + url=f"{DEFAULT_HOST}/connector-plugins/{connector_name}/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, json=KafkaConnectorConfig(**{"name": connector_name, **configs}).dict(), ) @@ -501,7 +487,7 @@ def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): actual_response = json.load(f) httpx_mock.add_response( method="PUT", - url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate", + url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers=HEADERS, json=actual_response, status_code=200, diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index ccea021c6..1ead99781 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -1,15 +1,13 @@ import json -from pathlib import Path from unittest import mock from unittest.mock import MagicMock import pytest -from pydantic import BaseModel +from pydantic import AnyHttpUrl, BaseModel, parse_obj_as from pytest_mock import MockerFixture from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider from kpops.components.base_components.models import TopicName @@ -18,6 +16,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig, SchemaRegistryConfig from kpops.utils.colorify import greenify, magentaify from tests.pipeline.test_components import TestSchemaProvider @@ -69,34 +68,39 @@ def to_section(topic_config: TopicConfig) -> ToSection: return ToSection(topics={TopicName("topic-X"): topic_config}) -def test_load_schema_handler(): - config_enable = PipelineConfig( - defaults_path=Path("fake"), +@pytest.fixture() +def kpops_config_with_sr_enabled() -> KpopsConfig: + return KpopsConfig( environment="development", - schema_registry_url="http://localhost:8081", + kafka_brokers="broker:9092", + schema_registry=SchemaRegistryConfig( + enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") + ), ) - config_disable = config_enable.copy() - config_disable.schema_registry_url = None - assert ( - SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_disable) - is None - ) +def test_load_schema_handler(kpops_config_with_sr_enabled: KpopsConfig): assert isinstance( - SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_enable), + SchemaHandler.load_schema_handler( + TEST_SCHEMA_PROVIDER_MODULE, kpops_config_with_sr_enabled + ), SchemaHandler, ) + config_disable = kpops_config_with_sr_enabled.copy() + config_disable.schema_registry = SchemaRegistryConfig(enabled=False) -def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): - config_enable = PipelineConfig( - defaults_path=Path("fake"), - environment="development", - schema_registry_url="http://localhost:8081", + assert ( + SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_disable) + is None ) + + +def test_should_lazy_load_schema_provider( + find_class_mock: MagicMock, kpops_config_with_sr_enabled: KpopsConfig +): schema_handler = SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, config_enable + TEST_SCHEMA_PROVIDER_MODULE, kpops_config_with_sr_enabled ) assert schema_handler is not None @@ -111,9 +115,12 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): find_class_mock.assert_called_once_with(TEST_SCHEMA_PROVIDER_MODULE, SchemaProvider) -def test_should_raise_value_error_if_schema_provider_class_not_found(): +def test_should_raise_value_error_if_schema_provider_class_not_found( + kpops_config_with_sr_enabled: KpopsConfig, +): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=NON_EXISTING_PROVIDER_MODULE, ) with pytest.raises(ValueError) as value_error: @@ -129,22 +136,22 @@ def test_should_raise_value_error_if_schema_provider_class_not_found(): ) -def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(): - config_enable = PipelineConfig( - defaults_path=Path("fake"), - environment="development", - schema_registry_url="http://localhost:8081", - ) - +def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty( + kpops_config_with_sr_enabled: KpopsConfig, +): with pytest.raises(ValueError): - schema_handler = SchemaHandler.load_schema_handler(None, config_enable) + schema_handler = SchemaHandler.load_schema_handler( + None, kpops_config_with_sr_enabled + ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( "com.bakdata.kpops.test.SchemaHandlerTest", {} ) with pytest.raises(ValueError) as value_error: - schema_handler = SchemaHandler.load_schema_handler("", config_enable) + schema_handler = SchemaHandler.load_schema_handler( + "", kpops_config_with_sr_enabled + ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( "com.bakdata.kpops.test.SchemaHandlerTest", {} @@ -157,10 +164,14 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( - to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock + to_section: ToSection, + log_info_mock: MagicMock, + schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -178,9 +189,11 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [1, 2, 3] @@ -199,10 +212,12 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl topic_config: TopicConfig, to_section: ToSection, schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -239,10 +254,12 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve log_info_mock: MagicMock, log_debug_mock: MagicMock, schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) @@ -273,12 +290,14 @@ def test_should_submit_non_existing_schema_when_not_dry( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -300,9 +319,11 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -317,10 +338,13 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( - to_section: ToSection, schema_registry_mock: MagicMock + to_section: ToSection, + schema_registry_mock: MagicMock, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + kpops_config=kpops_config_with_sr_enabled, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 7b587ecb3..e1ff9ae40 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -7,16 +7,16 @@ from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers.topic.exception import ( KafkaRestProxyError, TopicNotFoundException, ) from kpops.component_handlers.topic.model import TopicResponse, TopicSpec from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper +from kpops.config import KpopsConfig HEADERS = {"Content-Type": "application/json"} -HOST = "http://localhost:8082" +DEFAULT_HOST = "http://localhost:8082" DEFAULTS_PATH = Path(__file__).parent.parent / "resources" @@ -31,10 +31,8 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def setup(self, httpx_mock: HTTPXMock): - config = PipelineConfig( - defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST - ) - self.proxy_wrapper = ProxyWrapper(pipeline_config=config) + config = KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") + self.proxy_wrapper = ProxyWrapper(config.kafka_rest) with open( DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json" @@ -43,23 +41,13 @@ def setup(self, httpx_mock: HTTPXMock): httpx_mock.add_response( method="GET", - url=f"{HOST}/v3/clusters", + url=f"{DEFAULT_HOST}/v3/clusters", json=cluster_response, status_code=200, ) - assert self.proxy_wrapper.host == HOST + assert self.proxy_wrapper.url == DEFAULT_HOST assert self.proxy_wrapper.cluster_id == "cluster-1" - def test_should_raise_exception_when_host_is_not_set(self): - config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development") - config.kafka_rest_host = None - with pytest.raises(ValueError) as exception: - ProxyWrapper(pipeline_config=config) - assert ( - str(exception.value) - == "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." - ) - @patch("httpx.post") def test_should_create_topic_with_all_topic_configuration( self, mock_post: MagicMock @@ -78,7 +66,7 @@ def test_should_create_topic_with_all_topic_configuration( self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) mock_post.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", headers=HEADERS, json=topic_spec, ) @@ -91,7 +79,7 @@ def test_should_create_topic_with_no_configuration(self, mock_post: MagicMock): self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) mock_post.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", headers=HEADERS, json=topic_spec, ) @@ -104,7 +92,7 @@ def test_should_call_get_topic(self, mock_get: MagicMock): self.proxy_wrapper.get_topic(topic_name=topic_name) mock_get.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", headers=HEADERS, ) @@ -122,7 +110,7 @@ def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock): ) mock_put.assert_called_with( - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", headers=HEADERS, json={ "data": [ @@ -140,7 +128,7 @@ def test_should_call_delete_topic(self, mock_delete: MagicMock): self.proxy_wrapper.delete_topic(topic_name=topic_name) mock_delete.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", headers=HEADERS, ) @@ -150,7 +138,7 @@ def test_should_call_get_broker_config(self, mock_get: MagicMock): self.proxy_wrapper.get_broker_config() mock_get.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs", headers=HEADERS, ) @@ -169,7 +157,7 @@ def test_should_log_topic_creation( httpx_mock.add_response( method="POST", - url=f"{HOST}/v3/clusters/cluster-1/topics", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics", json=topic_spec, headers=HEADERS, status_code=201, @@ -184,7 +172,7 @@ def test_should_log_topic_deletion( httpx_mock.add_response( method="DELETE", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}", headers=HEADERS, status_code=204, ) @@ -213,7 +201,7 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock httpx_mock.add_response( method="GET", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}", headers=HEADERS, status_code=200, json=res, @@ -231,7 +219,7 @@ def test_should_rais_topic_not_found_exception_get_topic( httpx_mock.add_response( method="GET", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}", headers=HEADERS, status_code=404, json={ @@ -251,7 +239,7 @@ def test_should_log_reset_default_topic_config_when_deleted( httpx_mock.add_response( method="POST", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": [{"name": config_name, "operation": "DELETE"}]}, status_code=204, diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 7b25e5f74..dd593f826 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -3,12 +3,12 @@ import pytest -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, load_defaults, ) +from kpops.config import KpopsConfig from kpops.utils.environment import ENV DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -38,8 +38,8 @@ class EnvVarTest(BaseDefaultsComponent): @pytest.fixture -def config() -> PipelineConfig: - return PipelineConfig( +def config() -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", ) @@ -116,9 +116,7 @@ def test_load_defaults_with_environment( == defaults ) - def test_inherit_defaults( - self, config: PipelineConfig, handlers: ComponentHandlers - ): + def test_inherit_defaults(self, config: KpopsConfig, handlers: ComponentHandlers): component = Child(config=config, handlers=handlers) assert ( @@ -137,7 +135,7 @@ def test_inherit_defaults( component.hard_coded == "hard_coded_value" ), "Defaults in code should be kept for parents" - def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_inherit(self, config: KpopsConfig, handlers: ComponentHandlers): component = Child( config=config, handlers=handlers, @@ -161,7 +159,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): ), "Defaults in code should be kept for parents" def test_multiple_generations( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): component = GrandChild(config=config, handlers=handlers) @@ -183,7 +181,7 @@ def test_multiple_generations( assert component.grand_child == "grand-child-value" def test_env_var_substitution( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): ENV["pipeline_name"] = str(DEFAULTS_PATH) component = EnvVarTest(config=config, handlers=handlers) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index c6527c00c..66d9daa31 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -4,7 +4,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -12,14 +11,15 @@ HelmUpgradeInstallFlags, ) from kpops.components.base_components import KafkaApp +from kpops.config import KpopsConfig DEFAULTS_PATH = Path(__file__).parent / "resources" class TestKafkaApp: @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", helm_diff_config=HelmDiffConfig(), @@ -33,7 +33,7 @@ def handlers(self) -> ComponentHandlers: topic_handler=MagicMock(), ) - def test_default_configs(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers): kafka_app = KafkaApp( name="example-name", config=config, @@ -59,7 +59,7 @@ def test_default_configs(self, config: PipelineConfig, handlers: ComponentHandle def test_should_deploy_kafka_app( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 912f449fb..46616cd17 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -4,11 +4,11 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig from kpops.components.base_components.kafka_connector import KafkaConnector +from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop" @@ -19,15 +19,15 @@ class TestKafkaConnector: @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", topic_name_config=TopicNameConfig( default_error_topic_name="${component_type}-error-topic", default_output_topic_name="${component_type}-output-topic", ), - brokers="broker:9092", + kafka_brokers="broker:9092", helm_diff_config=HelmDiffConfig(), ) @@ -62,7 +62,7 @@ def connector_config(self) -> KafkaConnectorConfig: def test_connector_config_name_override( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 91760e90c..6861817bd 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -3,7 +3,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmUpgradeInstallFlags, @@ -25,6 +24,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify from tests.components.test_kafka_connector import ( CONNECTOR_CLEAN_FULL_NAME, @@ -42,7 +42,7 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture def connector( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ) -> KafkaSinkConnector: @@ -63,7 +63,7 @@ def connector( def test_connector_config_parsing( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -93,7 +93,7 @@ def test_connector_config_parsing( def test_from_section_parsing_input_topic( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -120,7 +120,7 @@ def test_from_section_parsing_input_topic( def test_from_section_parsing_input_pattern( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -256,7 +256,7 @@ def test_clean_when_dry_run_is_true( def test_clean_when_dry_run_is_false( self, connector: KafkaSinkConnector, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, log_info_mock: MagicMock, @@ -334,7 +334,7 @@ def test_clean_when_dry_run_is_false( def test_clean_without_to_when_dry_run_is_true( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, dry_run_handler: MagicMock, connector_config: KafkaConnectorConfig, @@ -353,7 +353,7 @@ def test_clean_without_to_when_dry_run_is_true( def test_clean_without_to_when_dry_run_is_false( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, dry_run_handler: MagicMock, diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index db9a2dd77..82b042d0c 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -3,7 +3,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmUpgradeInstallFlags, @@ -22,6 +21,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig from kpops.utils.environment import ENV from tests.components.test_kafka_connector import ( CONNECTOR_CLEAN_FULL_NAME, @@ -35,7 +35,7 @@ class TestKafkaSourceConnector(TestKafkaConnector): @pytest.fixture def connector( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ) -> KafkaSourceConnector: @@ -57,7 +57,7 @@ def connector( def test_from_section_raises_exception( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -266,7 +266,7 @@ def test_clean_when_dry_run_is_false( def test_clean_without_to_when_dry_run_is_false( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, dry_run_handler: MagicMock, @@ -342,7 +342,7 @@ def test_clean_without_to_when_dry_run_is_false( def test_clean_without_to_when_dry_run_is_true( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, dry_run_handler: MagicMock, connector_config: KafkaConnectorConfig, diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 46eb9795d..d89db64bd 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -5,7 +5,6 @@ from pytest_mock import MockerFixture from typing_extensions import override -from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -17,6 +16,7 @@ KubernetesApp, KubernetesAppConfig, ) +from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -28,8 +28,8 @@ class KubernetesTestValue(KubernetesAppConfig): class TestKubernetesApp: @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", helm_diff_config=HelmDiffConfig(), @@ -64,7 +64,7 @@ def repo_config(self) -> HelmRepoConfig: @pytest.fixture def kubernetes_app( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, app_value: KubernetesTestValue, repo_config: HelmRepoConfig, @@ -106,7 +106,7 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, mocker: MockerFixture, @@ -152,7 +152,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( def test_should_deploy_app_with_local_helm_chart( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, app_value: KubernetesTestValue, @@ -218,7 +218,7 @@ def test_should_call_helm_uninstall_when_destroying_kubernetes_app( def test_should_raise_value_error_when_name_is_not_valid( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, app_value: KubernetesTestValue, repo_config: HelmRepoConfig, diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 56d52a68b..2c7853fb3 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -5,7 +5,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmUpgradeInstallFlags from kpops.components import ProducerApp @@ -13,6 +12,7 @@ OutputTopicTypes, TopicConfig, ) +from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -30,8 +30,8 @@ def handlers(self) -> ComponentHandlers: ) @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", topic_name_config=TopicNameConfig( @@ -42,7 +42,7 @@ def config(self) -> PipelineConfig: @pytest.fixture def producer_app( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ) -> ProducerApp: return ProducerApp( name=self.PRODUCER_APP_NAME, @@ -65,7 +65,7 @@ def producer_app( }, ) - def test_output_topics(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_output_topics(self, config: KpopsConfig, handlers: ComponentHandlers): producer_app = ProducerApp( name=self.PRODUCER_APP_NAME, config=config, diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index dce2c7e96..50ab2c332 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -4,7 +4,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -17,6 +16,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -34,8 +34,8 @@ def handlers(self) -> ComponentHandlers: ) @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", topic_name_config=TopicNameConfig( @@ -47,7 +47,7 @@ def config(self) -> PipelineConfig: @pytest.fixture def streams_app( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ) -> StreamsApp: return StreamsApp( name=self.STREAMS_APP_NAME, @@ -68,7 +68,7 @@ def streams_app( }, ) - def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_set_topics(self, config: KpopsConfig, handlers: ComponentHandlers): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, config=config, @@ -113,7 +113,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert "extraInputPatterns" in streams_config def test_no_empty_input_topic( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -143,7 +143,7 @@ def test_no_empty_input_topic( assert "inputPattern" in streams_config assert "extraInputPatterns" not in streams_config - def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_should_validate(self, config: KpopsConfig, handlers: ComponentHandlers): # An exception should be raised when both role and type are defined and type is input with pytest.raises(ValueError): StreamsApp( @@ -189,7 +189,7 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle ) def test_set_streams_output_from_to( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -228,7 +228,7 @@ def test_set_streams_output_from_to( assert streams_app.app.streams.error_topic == "${error_topic_name}" def test_weave_inputs_from_prev_component( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -265,7 +265,7 @@ def test_weave_inputs_from_prev_component( def test_deploy_order_when_dry_run_is_false( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 2707ee0fa..8a9ca81c3 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -3,9 +3,13 @@ defaults_path: ../no-topics-defaults topic_name_config: default_error_topic_name: "${component_name}-dead-letter-topic" default_output_topic_name: "${component_name}-test-topic" -brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -kafka_connect_host: "http://localhost:8083" -kafka_rest_host: "http://localhost:8082" -schema_registry_url: "http://localhost:8081" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" +schema_registry: + enabled: true + url: "http://localhost:8081" helm_config: api_version: "2.1.1" diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index c4e2aa259..e1223203b 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -5,7 +5,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${brokers}" + brokers: "${kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 6b7c754ab..14c488c5f 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -1,10 +1,12 @@ environment: development defaults_path: .. -brokers: "broker:9092" +kafka_brokers: "broker:9092" topic_name_config: default_error_topic_name: ${component_type}-error-topic default_output_topic_name: ${component_type}-output-topic helm_diff_config: enable: false -kafka_connect_host: "kafka_connect_host:8083" -kafka_rest_host: "kafka_rest_host:8082" +kafka_connect: + url: "http://kafka_connect_url:8083" +kafka_rest: + url: "http://kafka_rest_url:8082" diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml index 47de626e6..87d21d47d 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml @@ -1,7 +1,7 @@ kafka-app: app: streams: - brokers: "${brokers}" + brokers: "${kafka_brokers}" schemaRegistryUrl: "${schema_registry_url}" producer-app: diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml index dfbe23db9..c67f869d9 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml @@ -7,5 +7,5 @@ kubernetes-app: kafka-app: app: streams: - brokers: ${brokers} + brokers: ${kafka_brokers} schemaRegistryUrl: ${schema_registry_url} diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index 2564e0012..77d666b1e 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -4,7 +4,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${brokers}" + brokers: "${kafka_brokers}" schemaRegistryUrl: "${schema_registry_url}" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml index 00b3b2673..3b9e93eb7 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml @@ -5,7 +5,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${broker}" + brokers: "${kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index af9cde479..42992bbb7 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -462,7 +462,6 @@ def test_default_config(self, snapshot: SnapshotTest): def test_env_vars_precedence_over_config( self, monkeypatch: MonkeyPatch, - snapshot: SnapshotTest, ): monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker") From a04c98f4d926ef627139e11f1a1a52e5dcaa942e Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 24 Oct 2023 15:36:43 +0200 Subject: [PATCH 03/34] Create HelmApp component (#370) --- docs/docs/developer/auto-generation.md | 2 +- .../architecture/components-hierarchy.md | 22 +- ...aults_pipeline_component_dependencies.yaml | 5 +- .../dependencies/kpops_structure.yaml | 14 +- .../pipeline_component_dependencies.yaml | 16 +- .../pipeline-components/headers/helm-app.yaml | 3 + .../pipeline-components/helm-app.yaml | 63 +++++ .../pipeline-components/kubernetes-app.yaml | 11 - .../pipeline-components/pipeline.yaml | 74 +++++- .../sections/app-helm-app.yaml | 6 + ...tes-app.yaml => repo_config-helm-app.yaml} | 0 .../pipeline-defaults/defaults-helm-app.yaml | 21 ++ .../defaults-kubernetes-app.yaml | 13 +- .../resources/pipeline-defaults/defaults.yaml | 34 ++- .../headers/defaults-helm-app.yaml | 5 + .../headers/defaults-kubernetes-app.yaml | 2 +- docs/docs/schema/pipeline.json | 166 +++++++------- .../core-concepts/components/kafka-app.md | 4 +- .../components/kubernetes-app.md | 6 +- docs/docs/user/references/cli-commands.md | 2 +- docs/mkdocs.yml | 1 + hooks/gen_docs/gen_docs_env_vars.py | 4 +- kpops/cli/main.py | 2 +- kpops/components/__init__.py | 2 + kpops/components/base_components/__init__.py | 2 + .../base_defaults_component.py | 3 +- kpops/components/base_components/helm_app.py | 166 ++++++++++++++ kpops/components/base_components/kafka_app.py | 8 +- .../base_components/kubernetes_app.py | 157 +------------ tests/cli/test_registry.py | 3 +- tests/components/test_helm_app.py | 215 ++++++++++++++++++ tests/components/test_kubernetes_app.py | 172 +------------- 32 files changed, 716 insertions(+), 488 deletions(-) create mode 100644 docs/docs/resources/pipeline-components/headers/helm-app.yaml create mode 100644 docs/docs/resources/pipeline-components/helm-app.yaml create mode 100644 docs/docs/resources/pipeline-components/sections/app-helm-app.yaml rename docs/docs/resources/pipeline-components/sections/{repo_config-kubernetes-app.yaml => repo_config-helm-app.yaml} (100%) create mode 100644 docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml create mode 100644 docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml create mode 100644 kpops/components/base_components/helm_app.py create mode 100644 tests/components/test_helm_app.py diff --git a/docs/docs/developer/auto-generation.md b/docs/docs/developer/auto-generation.md index b87cbcad0..a530e9f72 100644 --- a/docs/docs/developer/auto-generation.md +++ b/docs/docs/developer/auto-generation.md @@ -10,7 +10,7 @@ Auto generation happens mostly with [`pre-commit`](https://pre-commit.com/) hook - `cli_env_vars.env` -- All CLI environment variables in a `dotenv` file. - `cli_env_vars.md` -- All CLI environment variables in a table. -- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`PipelineConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/kpops_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it. +- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`KpopsConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/kpops_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it. - `config_env_vars.env` -- Almost all pipeline config environment variables in a table. - `variable_substitution.yaml` -- A copy of `./tests/pipeline/resources/component-type-substitution/pipeline.yaml` used as an example of substitution. diff --git a/docs/docs/resources/architecture/components-hierarchy.md b/docs/docs/resources/architecture/components-hierarchy.md index fec927ddf..190c44f82 100644 --- a/docs/docs/resources/architecture/components-hierarchy.md +++ b/docs/docs/resources/architecture/components-hierarchy.md @@ -1,20 +1,22 @@ ```mermaid flowchart BT KubernetesApp --> PipelineComponent - KafkaConnector --> PipelineComponent - KafkaApp --> KubernetesApp + HelmApp --> KubernetesApp + KafkaApp --> HelmApp StreamsApp --> KafkaApp ProducerApp --> KafkaApp + KafkaConnector --> PipelineComponent KafkaSourceConnector --> KafkaConnector KafkaSinkConnector --> KafkaConnector - - click KubernetesApp "../kubernetes-app" - click KafkaApp "../kafka-app" - click StreamsApp "../streams-app" - click ProducerApp "../producer-app" - click KafkaConnector "../kafka-connector" - click KafkaSourceConnector "../kafka-source-connector" - click KafkaSinkConnector "../kafka-sink-connector" + + click KubernetesApp "/kpops/user/core-concepts/components/kubernetes-app" + click HelmApp "/kpops/user/core-concepts/components/helm-app" + click KafkaApp "/kpops/user/core-concepts/components/kafka-app" + click StreamsApp "/kpops/user/core-concepts/components/streams-app" + click ProducerApp "/kpops/user/core-concepts/components/producer-app" + click KafkaConnector "/kpops/user/core-concepts/components/kafka-connector" + click KafkaSourceConnector "/kpops/user/core-concepts/components/kafka-source-connector" + click KafkaSinkConnector "/kpops/user/core-concepts/components/kafka-sink-connector" ```

KPOps component hierarchy

diff --git a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml index c431a71b9..4e12885af 100644 --- a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml @@ -1,3 +1,6 @@ +helm-app.yaml: +- app-helm-app.yaml +- repo_config-helm-app.yaml kafka-app.yaml: - app-kafka-app.yaml - version-kafka-app.yaml @@ -20,8 +23,6 @@ kubernetes-app.yaml: - to.yaml - namespace.yaml - app-kubernetes-app.yaml -- repo_config-kubernetes-app.yaml -- version.yaml producer-app.yaml: - from_-producer-app.yaml - app-producer-app.yaml diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 21af5971c..70dc43870 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -1,4 +1,13 @@ kpops_components_fields: + helm-app: + - name + - prefix + - from_ + - to + - namespace + - app + - repo_config + - version kafka-app: - name - prefix @@ -46,8 +55,6 @@ kpops_components_fields: - to - namespace - app - - repo_config - - version pipeline-component: - name - prefix @@ -72,7 +79,8 @@ kpops_components_fields: - repo_config - version kpops_components_inheritance_ref: - kafka-app: kubernetes-app + helm-app: kubernetes-app + kafka-app: helm-app kafka-connector: pipeline-component kafka-sink-connector: kafka-connector kafka-source-connector: kafka-connector diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index 485c3c253..8504a0135 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -1,10 +1,18 @@ +helm-app.yaml: +- prefix.yaml +- from_.yaml +- to.yaml +- namespace.yaml +- app-helm-app.yaml +- repo_config-helm-app.yaml +- version.yaml kafka-app.yaml: - prefix.yaml - from_.yaml - to.yaml - namespace.yaml - app-kafka-app.yaml -- repo_config-kubernetes-app.yaml +- repo_config-helm-app.yaml - version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml @@ -40,15 +48,13 @@ kubernetes-app.yaml: - to.yaml - namespace.yaml - app-kubernetes-app.yaml -- repo_config-kubernetes-app.yaml -- version.yaml producer-app.yaml: - prefix.yaml - from_-producer-app.yaml - to.yaml - namespace.yaml - app-producer-app.yaml -- repo_config-kubernetes-app.yaml +- repo_config-helm-app.yaml - version-kafka-app.yaml streams-app.yaml: - prefix.yaml @@ -56,5 +62,5 @@ streams-app.yaml: - to.yaml - namespace.yaml - app-streams-app.yaml -- repo_config-kubernetes-app.yaml +- repo_config-helm-app.yaml - version-kafka-app.yaml diff --git a/docs/docs/resources/pipeline-components/headers/helm-app.yaml b/docs/docs/resources/pipeline-components/headers/helm-app.yaml new file mode 100644 index 000000000..2c8bf9fea --- /dev/null +++ b/docs/docs/resources/pipeline-components/headers/helm-app.yaml @@ -0,0 +1,3 @@ +# Kubernetes app managed through Helm with an associated Helm chart +- type: helm-app + name: helm-app # required diff --git a/docs/docs/resources/pipeline-components/helm-app.yaml b/docs/docs/resources/pipeline-components/helm-app.yaml new file mode 100644 index 000000000..8f0a59c86 --- /dev/null +++ b/docs/docs/resources/pipeline-components/helm-app.yaml @@ -0,0 +1,63 @@ +# Kubernetes app managed through Helm with an associated Helm chart +- type: helm-app + name: helm-app # required + # Pipeline prefix that will prefix every component name. If you wish to not + # have any prefix you can specify an empty string. + prefix: ${pipeline_name}- + from: # Must not be null + topics: # read from topic + ${pipeline_name}-input-topic: + type: input # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra + ${pipeline_name}-input-pattern-topic: + type: pattern # Implied to be an input pattern if `role` is undefined + ${pipeline_name}-extra-pattern-topic: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + components: # read from specific component + account-producer: + type: output # Implied when role is NOT specified + other-producer: + role: some-role # Implies `type` to be extra + component-as-input-pattern: + type: pattern # Implied to be an input pattern if `role` is undefined + component-as-extra-pattern: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + # Topic(s) into which the component will write output + to: + topics: + ${pipeline_name}-output-topic: + type: output # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined + ${pipeline_name}-error-topic: + type: error + # Currently KPOps supports Avro and JSON schemas. + key_schema: key-schema # must implement SchemaProvider to use + value_schema: value-schema + partitions_count: 1 + replication_factor: 1 + configs: # https://kafka.apache.org/documentation/#topicconfigs + cleanup.policy: compact + models: # SchemaProvider is initiated with the values given here + model: model + namespace: namespace # required + # `app` contains application-specific settings, hence it does not have a rigid + # structure. The fields below are just an example. + app: # required + image: exampleImage # Example + debug: false # Example + commandLine: {} # Example + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false + version: "1.0.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/kubernetes-app.yaml b/docs/docs/resources/pipeline-components/kubernetes-app.yaml index ffe41894c..5170768c2 100644 --- a/docs/docs/resources/pipeline-components/kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-components/kubernetes-app.yaml @@ -50,14 +50,3 @@ image: exampleImage # Example debug: false # Example commandLine: {} # Example - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index eb7930376..1c6350fbc 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -1,3 +1,66 @@ +# Kubernetes app managed through Helm with an associated Helm chart +- type: helm-app + name: helm-app # required + # Pipeline prefix that will prefix every component name. If you wish to not + # have any prefix you can specify an empty string. + prefix: ${pipeline_name}- + from: # Must not be null + topics: # read from topic + ${pipeline_name}-input-topic: + type: input # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra + ${pipeline_name}-input-pattern-topic: + type: pattern # Implied to be an input pattern if `role` is undefined + ${pipeline_name}-extra-pattern-topic: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + components: # read from specific component + account-producer: + type: output # Implied when role is NOT specified + other-producer: + role: some-role # Implies `type` to be extra + component-as-input-pattern: + type: pattern # Implied to be an input pattern if `role` is undefined + component-as-extra-pattern: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + # Topic(s) into which the component will write output + to: + topics: + ${pipeline_name}-output-topic: + type: output # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined + ${pipeline_name}-error-topic: + type: error + # Currently KPOps supports Avro and JSON schemas. + key_schema: key-schema # must implement SchemaProvider to use + value_schema: value-schema + partitions_count: 1 + replication_factor: 1 + configs: # https://kafka.apache.org/documentation/#topicconfigs + cleanup.policy: compact + models: # SchemaProvider is initiated with the values given here + model: model + namespace: namespace # required + # `app` contains application-specific settings, hence it does not have a rigid + # structure. The fields below are just an example. + app: # required + image: exampleImage # Example + debug: false # Example + commandLine: {} # Example + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false + version: "1.0.0" # Helm chart version # Base component for Kafka-based components. # Producer or streaming apps should inherit from this class. - type: kafka-app # required @@ -230,17 +293,6 @@ image: exampleImage # Example debug: false # Example commandLine: {} # Example - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version # Holds configuration to use as values for the streams bootstrap producer-app Helm # chart. # More documentation on ProducerApp: diff --git a/docs/docs/resources/pipeline-components/sections/app-helm-app.yaml b/docs/docs/resources/pipeline-components/sections/app-helm-app.yaml new file mode 100644 index 000000000..e2b6cbae0 --- /dev/null +++ b/docs/docs/resources/pipeline-components/sections/app-helm-app.yaml @@ -0,0 +1,6 @@ + # `app` contains application-specific settings, hence it does not have a rigid + # structure. The fields below are just an example. + app: # required + image: exampleImage # Example + debug: false # Example + commandLine: {} # Example diff --git a/docs/docs/resources/pipeline-components/sections/repo_config-kubernetes-app.yaml b/docs/docs/resources/pipeline-components/sections/repo_config-helm-app.yaml similarity index 100% rename from docs/docs/resources/pipeline-components/sections/repo_config-kubernetes-app.yaml rename to docs/docs/resources/pipeline-components/sections/repo_config-helm-app.yaml diff --git a/docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml new file mode 100644 index 000000000..d08200203 --- /dev/null +++ b/docs/docs/resources/pipeline-defaults/defaults-helm-app.yaml @@ -0,0 +1,21 @@ +# Kubernetes app managed through Helm with an associated Helm chart +# +# Parent of: KafkaApp +# Child of: KubernetesApp +helm-app: + # `app` contains application-specific settings, hence it does not have a rigid + # structure. The fields below are just an example. + app: # required + image: exampleImage # Example + debug: false # Example + commandLine: {} # Example + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false diff --git a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml index d49764b8f..5dd85e9ce 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml @@ -1,6 +1,6 @@ # Base Kubernetes App # -# Parent of: KafkaApp +# Parent of: HelmApp # Child of: PipelineComponent kubernetes-app: # Pipeline prefix that will prefix every component name. If you wish to not @@ -52,14 +52,3 @@ kubernetes-app: image: exampleImage # Example debug: false # Example commandLine: {} # Example - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index 3a43d81e7..58b22d3f3 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -1,3 +1,24 @@ +# Kubernetes app managed through Helm with an associated Helm chart +# +# Parent of: KafkaApp +# Child of: KubernetesApp +helm-app: + # `app` contains application-specific settings, hence it does not have a rigid + # structure. The fields below are just an example. + app: # required + image: exampleImage # Example + debug: false # Example + commandLine: {} # Example + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false # Base component for Kafka-based components. # # Parent of: ProducerApp, StreamsApp @@ -95,7 +116,7 @@ kafka-source-connector: offset_topic: offset_topic # Base Kubernetes App # -# Parent of: KafkaApp +# Parent of: HelmApp # Child of: PipelineComponent kubernetes-app: # Pipeline prefix that will prefix every component name. If you wish to not @@ -147,17 +168,6 @@ kubernetes-app: image: exampleImage # Example debug: false # Example commandLine: {} # Example - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version # Holds configuration to use as values for the streams bootstrap producer-app Helm # chart. # diff --git a/docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml b/docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml new file mode 100644 index 000000000..bbc3b5622 --- /dev/null +++ b/docs/docs/resources/pipeline-defaults/headers/defaults-helm-app.yaml @@ -0,0 +1,5 @@ +# Kubernetes app managed through Helm with an associated Helm chart +# +# Parent of: KafkaApp +# Child of: KubernetesApp +helm-app: diff --git a/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml b/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml index f99e42e6d..cc1175938 100644 --- a/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-defaults/headers/defaults-kubernetes-app.yaml @@ -1,5 +1,5 @@ # Base Kubernetes App # -# Parent of: KafkaApp +# Parent of: HelmApp # Child of: PipelineComponent kubernetes-app: diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 7e77b0ddd..2fe9aeeac 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -47,6 +47,84 @@ "title": "FromTopic", "type": "object" }, + "HelmApp": { + "description": "Kubernetes app managed through Helm with an associated Helm chart.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/definitions/KubernetesAppConfig" + } + ], + "description": "Application-specific settings", + "title": "App" + }, + "from": { + "allOf": [ + { + "$ref": "#/definitions/FromSection" + } + ], + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/definitions/HelmRepoConfig" + } + ], + "description": "Configuration of the Helm chart repo to be used for deploying the component", + "title": "Repo Config" + }, + "to": { + "allOf": [ + { + "$ref": "#/definitions/ToSection" + } + ], + "description": "Topic(s) into which the component will write output", + "title": "To" + }, + "type": { + "default": "helm-app", + "description": "Kubernetes app managed through Helm with an associated Helm chart.", + "enum": [ + "helm-app" + ], + "title": "Component type", + "type": "string" + }, + "version": { + "description": "Helm chart version", + "title": "Version", + "type": "string" + } + }, + "required": [ + "name", + "namespace", + "app" + ], + "title": "HelmApp", + "type": "object" + }, "HelmRepoConfig": { "description": "Helm repository configuration.", "properties": { @@ -305,86 +383,8 @@ "title": "KafkaSourceConnector", "type": "object" }, - "KubernetesApp": { - "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.", - "properties": { - "app": { - "allOf": [ - { - "$ref": "#/definitions/KubernetesAppConfig" - } - ], - "description": "Application-specific settings", - "title": "App" - }, - "from": { - "allOf": [ - { - "$ref": "#/definitions/FromSection" - } - ], - "description": "Topic(s) and/or components from which the component will read input", - "title": "From" - }, - "name": { - "description": "Component name", - "title": "Name", - "type": "string" - }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, - "prefix": { - "default": "${pipeline_name}-", - "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", - "title": "Prefix", - "type": "string" - }, - "repo_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmRepoConfig" - } - ], - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" - }, - "to": { - "allOf": [ - { - "$ref": "#/definitions/ToSection" - } - ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kubernetes-app", - "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.", - "enum": [ - "kubernetes-app" - ], - "title": "Component type", - "type": "string" - }, - "version": { - "description": "Helm chart version", - "title": "Version", - "type": "string" - } - }, - "required": [ - "name", - "namespace", - "app" - ], - "title": "KubernetesApp", - "type": "object" - }, "KubernetesAppConfig": { - "description": "Settings specific to Kubernetes Apps.", + "description": "Settings specific to Kubernetes apps.", "properties": {}, "title": "KubernetesAppConfig", "type": "object" @@ -940,9 +940,9 @@ "items": { "discriminator": { "mapping": { + "helm-app": "#/definitions/HelmApp", "kafka-sink-connector": "#/definitions/KafkaSinkConnector", "kafka-source-connector": "#/definitions/KafkaSourceConnector", - "kubernetes-app": "#/definitions/KubernetesApp", "producer-app": "#/definitions/ProducerApp", "streams-app": "#/definitions/StreamsApp" }, @@ -950,13 +950,13 @@ }, "oneOf": [ { - "$ref": "#/definitions/KafkaSinkConnector" + "$ref": "#/definitions/HelmApp" }, { - "$ref": "#/definitions/KafkaSourceConnector" + "$ref": "#/definitions/KafkaSinkConnector" }, { - "$ref": "#/definitions/KubernetesApp" + "$ref": "#/definitions/KafkaSourceConnector" }, { "$ref": "#/definitions/ProducerApp" diff --git a/docs/docs/user/core-concepts/components/kafka-app.md b/docs/docs/user/core-concepts/components/kafka-app.md index e69153c5e..acbe86742 100644 --- a/docs/docs/user/core-concepts/components/kafka-app.md +++ b/docs/docs/user/core-concepts/components/kafka-app.md @@ -1,6 +1,6 @@ # KafkaApp -Subclass of [_KubernetesApp_](kubernetes-app.md). +Subclass of [_HelmApp_](helm-app.md). ### Usage @@ -26,7 +26,7 @@ Subclass of [_KubernetesApp_](kubernetes-app.md). #### deploy -In addition to [KubernetesApp's `deploy`](kubernetes-app.md#deploy): +In addition to [HelmApp's `deploy`](helm-app.md#deploy): - Create topics if provided (optional) - Submit Avro schemas to the registry if provided (optional) diff --git a/docs/docs/user/core-concepts/components/kubernetes-app.md b/docs/docs/user/core-concepts/components/kubernetes-app.md index 4a28dbe0e..80a0c3467 100644 --- a/docs/docs/user/core-concepts/components/kubernetes-app.md +++ b/docs/docs/user/core-concepts/components/kubernetes-app.md @@ -2,7 +2,7 @@ ### Usage -Can be used to deploy any app in Kubernetes using Helm, for example, a REST service that serves Kafka data. +Can be used to create components for any Kubernetes app. ### Configuration @@ -22,11 +22,11 @@ Can be used to deploy any app in Kubernetes using Helm, for example, a REST serv #### deploy -Deploy using Helm. +Do nothing. #### destroy -Uninstall Helm release. +Do nothing. #### reset diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 952ae5a35..100f05c4a 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -177,7 +177,7 @@ $ kpops schema [OPTIONS] SCOPE:{pipeline|config} [COMPONENTS_MODULE] - config: Schema of PipelineConfig. [required] + config: Schema of KpopsConfig. [required] * `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 79cf83374..132bc79e3 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -89,6 +89,7 @@ nav: - Components: - Overview: user/core-concepts/components/overview.md - KubernetesApp: user/core-concepts/components/kubernetes-app.md + - HelmApp: user/core-concepts/components/helm-app.md - KafkaApp: user/core-concepts/components/kafka-app.md - StreamsApp: user/core-concepts/components/streams-app.md - ProducerApp: user/core-concepts/components/producer-app.md diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 24106e18f..3ab34c1df 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -246,9 +246,9 @@ def write_csv_to_md_file( def fill_csv_pipeline_config(target: Path) -> None: - """Append all ``PipelineConfig``-related env vars to a ``.csv`` file. + """Append all ``KpopsConfig``-related env vars to a ``.csv`` file. - Finds all ``PipelineConfig``-related env vars and appends them to + Finds all ``KpopsConfig``-related env vars and appends them to a ``.csv`` file. :param target: The path to the `.csv` file. Note that it must already diff --git a/kpops/cli/main.py b/kpops/cli/main.py index a0b68ee11..e958f4535 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -222,7 +222,7 @@ def schema( \n\n\n pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide [COMPONENTS_MODULES]. \n\n\n - config: Schema of PipelineConfig.""", + config: Schema of KpopsConfig.""", ), components_module: Optional[str] = COMPONENTS_MODULES, include_stock_components: bool = typer.Option( diff --git a/kpops/components/__init__.py b/kpops/components/__init__.py index 77b8c69fb..98e1d3530 100644 --- a/kpops/components/__init__.py +++ b/kpops/components/__init__.py @@ -1,4 +1,5 @@ from kpops.components.base_components import ( + HelmApp, KafkaApp, KafkaSinkConnector, KafkaSourceConnector, @@ -9,6 +10,7 @@ from kpops.components.streams_bootstrap import ProducerApp, StreamsApp __all__ = ( + "HelmApp", "KafkaApp", "KafkaConnector", "KafkaSinkConnector", diff --git a/kpops/components/base_components/__init__.py b/kpops/components/base_components/__init__.py index 37aca3c70..cde4c13bb 100644 --- a/kpops/components/base_components/__init__.py +++ b/kpops/components/base_components/__init__.py @@ -1,3 +1,4 @@ +from kpops.components.base_components.helm_app import HelmApp from kpops.components.base_components.kafka_app import KafkaApp from kpops.components.base_components.kafka_connector import ( KafkaSinkConnector, @@ -7,6 +8,7 @@ from kpops.components.base_components.pipeline_component import PipelineComponent __all__ = ( + "HelmApp", "KafkaApp", "KafkaSinkConnector", "KafkaSourceConnector", diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 2b97d997a..73bf54c7c 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -1,5 +1,6 @@ import inspect import logging +from abc import ABC from collections import deque from collections.abc import Sequence from functools import cached_property @@ -26,7 +27,7 @@ log = logging.getLogger("BaseDefaultsComponent") -class BaseDefaultsComponent(BaseModel): +class BaseDefaultsComponent(BaseModel, ABC): """Base for all components, handles defaults. Component defaults are usually provided in a yaml file called diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py new file mode 100644 index 000000000..f98abd648 --- /dev/null +++ b/kpops/components/base_components/helm_app.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +import logging +from functools import cached_property +from typing import Any + +from pydantic import Field +from typing_extensions import override + +from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler +from kpops.component_handlers.helm_wrapper.helm import Helm +from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff +from kpops.component_handlers.helm_wrapper.model import ( + HelmFlags, + HelmRepoConfig, + HelmTemplateFlags, + HelmUpgradeInstallFlags, +) +from kpops.components.base_components.kubernetes_app import KubernetesApp +from kpops.utils.colorify import magentaify +from kpops.utils.docstring import describe_attr + +log = logging.getLogger("HelmApp") + + +class HelmApp(KubernetesApp): + """Kubernetes app managed through Helm with an associated Helm chart. + + :param repo_config: Configuration of the Helm chart repo to be used for + deploying the component, defaults to None this means that the command "helm repo add" is not called and Helm + expects a path to local Helm chart. + :param version: Helm chart version, defaults to None + """ + + repo_config: HelmRepoConfig | None = Field( + default=None, + description=describe_attr("repo_config", __doc__), + ) + version: str | None = Field( + default=None, + description=describe_attr("version", __doc__), + ) + + @cached_property + def helm(self) -> Helm: + """Helm object that contains component-specific config such as repo.""" + helm = Helm(self.config.helm_config) + if self.repo_config is not None: + helm.add_repo( + self.repo_config.repository_name, + self.repo_config.url, + self.repo_config.repo_auth_flags, + ) + return helm + + @cached_property + def helm_diff(self) -> HelmDiff: + """Helm diff object of last and current release of this component.""" + return HelmDiff(self.config.helm_diff_config) + + @cached_property + def dry_run_handler(self) -> DryRunHandler: + helm_diff = HelmDiff(self.config.helm_diff_config) + return DryRunHandler(self.helm, helm_diff, self.namespace) + + @property + def helm_release_name(self) -> str: + """The name for the Helm release. Can be overridden.""" + return self.full_name + + @property + def helm_chart(self) -> str: + """Return component's Helm chart.""" + msg = ( + f"Please implement the helm_chart property of the {self.__module__} module." + ) + raise NotImplementedError(msg) + + @property + def helm_flags(self) -> HelmFlags: + """Return shared flags for Helm commands.""" + auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} + return HelmFlags( + **auth_flags, + version=self.version, + create_namespace=self.config.create_namespace, + ) + + @property + def template_flags(self) -> HelmTemplateFlags: + """Return flags for Helm template command.""" + return HelmTemplateFlags( + **self.helm_flags.dict(), + api_version=self.config.helm_config.api_version, + ) + + @override + def template(self) -> None: + stdout = self.helm.template( + self.helm_release_name, + self.helm_chart, + self.namespace, + self.to_helm_values(), + self.template_flags, + ) + print(stdout) + + @property + def deploy_flags(self) -> HelmUpgradeInstallFlags: + """Return flags for Helm upgrade install command.""" + return HelmUpgradeInstallFlags(**self.helm_flags.dict()) + + @override + def deploy(self, dry_run: bool) -> None: + stdout = self.helm.upgrade_install( + self.helm_release_name, + self.helm_chart, + dry_run, + self.namespace, + self.to_helm_values(), + self.deploy_flags, + ) + if dry_run: + self.dry_run_handler.print_helm_diff(stdout, self.helm_release_name, log) + + @override + def destroy(self, dry_run: bool) -> None: + stdout = self.helm.uninstall( + self.namespace, + self.helm_release_name, + dry_run, + ) + + if stdout: + log.info(magentaify(stdout)) + + def to_helm_values(self) -> dict: + """Generate a dictionary of values readable by Helm from `self.app`. + + :returns: Thte values to be used by Helm + """ + return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) + + def print_helm_diff(self, stdout: str) -> None: + """Print the diff of the last and current release of this component. + + :param stdout: The output of a Helm command that installs or upgrades the release + """ + current_release = list( + self.helm.get_manifest(self.helm_release_name, self.namespace) + ) + if current_release: + log.info(f"Helm release {self.helm_release_name} already exists") + else: + log.info(f"Helm release {self.helm_release_name} does not exist") + new_release = Helm.load_manifest(stdout) + self.helm_diff.log_helm_diff(log, current_release, new_release) + + @override + def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: + # HACK: workaround for Pydantic to exclude cached properties during model export + if exclude is None: + exclude = set() + exclude.add("helm") + exclude.add("helm_diff") + return super().dict(exclude=exclude, **kwargs) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index a13dc7a7d..cf8e5f4ef 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -11,10 +11,8 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import trim_release_name -from kpops.components.base_components.kubernetes_app import ( - KubernetesApp, - KubernetesAppConfig, -) +from kpops.components.base_components.helm_app import HelmApp +from kpops.components.base_components.kubernetes_app import KubernetesAppConfig from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfig, DescConfig @@ -52,7 +50,7 @@ class KafkaAppConfig(KubernetesAppConfig): ) -class KafkaApp(KubernetesApp, ABC): +class KafkaApp(HelmApp, ABC): """Base component for Kafka-based components. Producer or streaming apps should inherit from this class. diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index ff35459c3..4b4e24c1a 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -2,27 +2,16 @@ import logging import re -from functools import cached_property -from typing import Any +from abc import ABC from pydantic import BaseModel, Extra, Field from typing_extensions import override -from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler -from kpops.component_handlers.helm_wrapper.helm import Helm -from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff -from kpops.component_handlers.helm_wrapper.model import ( - HelmFlags, - HelmRepoConfig, - HelmTemplateFlags, - HelmUpgradeInstallFlags, -) from kpops.components.base_components.pipeline_component import PipelineComponent -from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfig, DescConfig -log = logging.getLogger("KubernetesAppComponent") +log = logging.getLogger("KubernetesApp") KUBERNETES_NAME_CHECK_PATTERN = re.compile( r"^(?![0-9]+$)(?!.*-$)(?!-)[a-z0-9-.]{1,253}(? Helm: - """Helm object that contains component-specific config such as repo.""" - helm = Helm(self.config.helm_config) - if self.repo_config is not None: - helm.add_repo( - self.repo_config.repository_name, - self.repo_config.url, - self.repo_config.repo_auth_flags, - ) - return helm - - @cached_property - def helm_diff(self) -> HelmDiff: - """Helm diff object of last and current release of this component.""" - return HelmDiff(self.config.helm_diff_config) - - @cached_property - def dry_run_handler(self) -> DryRunHandler: - helm_diff = HelmDiff(self.config.helm_diff_config) - return DryRunHandler(self.helm, helm_diff, self.namespace) - - @property - def helm_release_name(self) -> str: - """The name for the Helm release. Can be overridden.""" - return self.full_name - - @property - def helm_chart(self) -> str: - """Return component's Helm chart.""" - msg = ( - f"Please implement the helm_chart property of the {self.__module__} module." - ) - raise NotImplementedError(msg) - - @property - def helm_flags(self) -> HelmFlags: - """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} - return HelmFlags( - **auth_flags, - version=self.version, - create_namespace=self.config.create_namespace, - ) - - @property - def template_flags(self) -> HelmTemplateFlags: - """Return flags for Helm template command.""" - return HelmTemplateFlags( - **self.helm_flags.dict(), - api_version=self.config.helm_config.api_version, - ) - - @override - def template(self) -> None: - stdout = self.helm.template( - self.helm_release_name, - self.helm_chart, - self.namespace, - self.to_helm_values(), - self.template_flags, - ) - print(stdout) - - @property - def deploy_flags(self) -> HelmUpgradeInstallFlags: - """Return flags for Helm upgrade install command.""" - return HelmUpgradeInstallFlags(**self.helm_flags.dict()) - - @override - def deploy(self, dry_run: bool) -> None: - stdout = self.helm.upgrade_install( - self.helm_release_name, - self.helm_chart, - dry_run, - self.namespace, - self.to_helm_values(), - self.deploy_flags, - ) - if dry_run: - self.dry_run_handler.print_helm_diff(stdout, self.helm_release_name, log) - - @override - def destroy(self, dry_run: bool) -> None: - stdout = self.helm.uninstall( - self.namespace, - self.helm_release_name, - dry_run, - ) - - if stdout: - log.info(magentaify(stdout)) - - def to_helm_values(self) -> dict: - """Generate a dictionary of values readable by Helm from `self.app`. - - :returns: Thte values to be used by Helm - """ - return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) - - def print_helm_diff(self, stdout: str) -> None: - """Print the diff of the last and current release of this component. - - :param stdout: The output of a Helm command that installs or upgrades the release - """ - current_release = list( - self.helm.get_manifest(self.helm_release_name, self.namespace) - ) - if current_release: - log.info(f"Helm release {self.helm_release_name} already exists") - else: - log.info(f"Helm release {self.helm_release_name} does not exist") - new_release = Helm.load_manifest(stdout) - self.helm_diff.log_helm_diff(log, current_release, new_release) @override def _validate_custom(self, **kwargs) -> None: @@ -196,12 +58,3 @@ def validate_kubernetes_name(name: str) -> None: if not bool(KUBERNETES_NAME_CHECK_PATTERN.match(name)): msg = f"The component name {name} is invalid for Kubernetes." raise ValueError(msg) - - @override - def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: - # HACK: workaround for Pydantic to exclude cached properties during model export - if exclude is None: - exclude = set() - exclude.add("helm") - exclude.add("helm_diff") - return super().dict(exclude=exclude, **kwargs) diff --git a/tests/cli/test_registry.py b/tests/cli/test_registry.py index 0e6722eeb..13a9c854b 100644 --- a/tests/cli/test_registry.py +++ b/tests/cli/test_registry.py @@ -36,8 +36,9 @@ def test_find_builtin_classes(): class_.__name__ for class_ in _find_classes("kpops.components", PipelineComponent) ] - assert len(components) == 8 + assert len(components) == 9 assert components == [ + "HelmApp", "KafkaApp", "KafkaConnector", "KafkaSinkConnector", diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py new file mode 100644 index 000000000..946739da0 --- /dev/null +++ b/tests/components/test_helm_app.py @@ -0,0 +1,215 @@ +from pathlib import Path +from unittest.mock import MagicMock + +import pytest +from pytest_mock import MockerFixture +from typing_extensions import override + +from kpops.component_handlers import ComponentHandlers +from kpops.component_handlers.helm_wrapper.model import ( + HelmDiffConfig, + HelmRepoConfig, + HelmUpgradeInstallFlags, + RepoAuthFlags, +) +from kpops.components.base_components.helm_app import HelmApp +from kpops.components.base_components.kubernetes_app import KubernetesAppConfig +from kpops.config import KpopsConfig +from kpops.utils.colorify import magentaify + +DEFAULTS_PATH = Path(__file__).parent / "resources" + + +class HelmTestValue(KubernetesAppConfig): + name_override: str + + +class TestHelmApp: + @pytest.fixture() + def config(self) -> KpopsConfig: + return KpopsConfig( + defaults_path=DEFAULTS_PATH, + environment="development", + helm_diff_config=HelmDiffConfig(), + ) + + @pytest.fixture() + def handlers(self) -> ComponentHandlers: + return ComponentHandlers( + schema_handler=MagicMock(), + connector_handler=MagicMock(), + topic_handler=MagicMock(), + ) + + @pytest.fixture() + def helm_mock(self, mocker: MockerFixture) -> MagicMock: + return mocker.patch( + "kpops.components.base_components.helm_app.Helm" + ).return_value + + @pytest.fixture() + def log_info_mock(self, mocker: MockerFixture) -> MagicMock: + return mocker.patch("kpops.components.base_components.helm_app.log.info") + + @pytest.fixture() + def app_value(self) -> HelmTestValue: + return HelmTestValue(name_override="test-value") + + @pytest.fixture() + def repo_config(self) -> HelmRepoConfig: + return HelmRepoConfig(repository_name="test", url="https://bakdata.com") + + @pytest.fixture() + def helm_app( + self, + config: KpopsConfig, + handlers: ComponentHandlers, + app_value: HelmTestValue, + repo_config: HelmRepoConfig, + ) -> HelmApp: + return HelmApp( + name="test-helm-app", + config=config, + handlers=handlers, + app=app_value, + namespace="test-namespace", + repo_config=repo_config, + ) + + def test_should_lazy_load_helm_wrapper_and_not_repo_add( + self, + helm_app: HelmApp, + mocker: MockerFixture, + helm_mock: MagicMock, + ): + helm_mock.add_repo.assert_not_called() + + mocker.patch.object( + HelmApp, + "helm_chart", + return_value="test/test-chart", + new_callable=mocker.PropertyMock, + ) + + helm_app.deploy(False) + + helm_mock.upgrade_install.assert_called_once_with( + "${pipeline_name}-test-helm-app", + "test/test-chart", + False, + "test-namespace", + {"nameOverride": "test-value"}, + HelmUpgradeInstallFlags(), + ) + + def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( + self, + config: KpopsConfig, + handlers: ComponentHandlers, + helm_mock: MagicMock, + mocker: MockerFixture, + app_value: HelmTestValue, + ): + repo_config = HelmRepoConfig( + repository_name="test-repo", url="https://test.com/charts/" + ) + helm_app = HelmApp( + name="test-helm-app", + config=config, + handlers=handlers, + app=app_value, + namespace="test-namespace", + repo_config=repo_config, + version="3.4.5", + ) + + mocker.patch.object( + HelmApp, + "helm_chart", + return_value="test/test-chart", + new_callable=mocker.PropertyMock, + ) + + helm_app.deploy(dry_run=False) + + assert helm_mock.mock_calls == [ + mocker.call.add_repo( + "test-repo", + "https://test.com/charts/", + RepoAuthFlags(), + ), + mocker.call.upgrade_install( + "${pipeline_name}-test-helm-app", + "test/test-chart", + False, + "test-namespace", + {"nameOverride": "test-value"}, + HelmUpgradeInstallFlags(version="3.4.5"), + ), + ] + + def test_should_deploy_app_with_local_helm_chart( + self, + config: KpopsConfig, + handlers: ComponentHandlers, + helm_mock: MagicMock, + app_value: HelmTestValue, + ): + class AppWithLocalChart(HelmApp): + repo_config: None = None + + @property + @override + def helm_chart(self) -> str: + return "path/to/helm/charts/" + + app_with_local_chart = AppWithLocalChart( + name="test-app-with-local-chart", + config=config, + handlers=handlers, + app=app_value, + namespace="test-namespace", + ) + + app_with_local_chart.deploy(dry_run=False) + + helm_mock.add_repo.assert_not_called() + + helm_mock.upgrade_install.assert_called_once_with( + "${pipeline_name}-test-app-with-local-chart", + "path/to/helm/charts/", + False, + "test-namespace", + {"nameOverride": "test-value"}, + HelmUpgradeInstallFlags(), + ) + + def test_should_raise_not_implemented_error_when_helm_chart_is_not_set( + self, + helm_app: HelmApp, + helm_mock: MagicMock, + ): + with pytest.raises(NotImplementedError) as error: + helm_app.deploy(True) + helm_mock.add_repo.assert_called() + assert ( + str(error.value) + == "Please implement the helm_chart property of the kpops.components.base_components.helm_app module." + ) + + def test_should_call_helm_uninstall_when_destroying_helm_app( + self, + helm_app: HelmApp, + helm_mock: MagicMock, + log_info_mock: MagicMock, + ): + stdout = 'HelmApp - release "test-helm-app" uninstalled' + helm_mock.uninstall.return_value = stdout + + helm_app.destroy(True) + + helm_mock.uninstall.assert_called_once_with( + "test-namespace", "${pipeline_name}-test-helm-app", True + ) + + log_info_mock.assert_called_once_with(magentaify(stdout)) diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 66fb432d6..87f9527ee 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -3,37 +3,25 @@ import pytest from pytest_mock import MockerFixture -from typing_extensions import override from kpops.component_handlers import ComponentHandlers -from kpops.component_handlers.helm_wrapper.model import ( - HelmDiffConfig, - HelmRepoConfig, - HelmUpgradeInstallFlags, - RepoAuthFlags, -) from kpops.components.base_components.kubernetes_app import ( KubernetesApp, KubernetesAppConfig, ) from kpops.config import KpopsConfig -from kpops.utils.colorify import magentaify DEFAULTS_PATH = Path(__file__).parent / "resources" class KubernetesTestValue(KubernetesAppConfig): - name_override: str + foo: str class TestKubernetesApp: @pytest.fixture() def config(self) -> KpopsConfig: - return KpopsConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - helm_diff_config=HelmDiffConfig(), - ) + return KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") @pytest.fixture() def handlers(self) -> ComponentHandlers: @@ -43,23 +31,13 @@ def handlers(self) -> ComponentHandlers: topic_handler=MagicMock(), ) - @pytest.fixture() - def helm_mock(self, mocker: MockerFixture) -> MagicMock: - return mocker.patch( - "kpops.components.base_components.kubernetes_app.Helm" - ).return_value - @pytest.fixture() def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.kubernetes_app.log.info") @pytest.fixture() def app_value(self) -> KubernetesTestValue: - return KubernetesTestValue(**{"name_override": "test-value"}) - - @pytest.fixture() - def repo_config(self) -> HelmRepoConfig: - return HelmRepoConfig(repository_name="test", url="https://bakdata.com") + return KubernetesTestValue(foo="foo") @pytest.fixture() def kubernetes_app( @@ -67,7 +45,6 @@ def kubernetes_app( config: KpopsConfig, handlers: ComponentHandlers, app_value: KubernetesTestValue, - repo_config: HelmRepoConfig, ) -> KubernetesApp: return KubernetesApp( name="test-kubernetes-app", @@ -75,153 +52,13 @@ def kubernetes_app( handlers=handlers, app=app_value, namespace="test-namespace", - repo_config=repo_config, - ) - - def test_should_lazy_load_helm_wrapper_and_not_repo_add( - self, - kubernetes_app: KubernetesApp, - mocker: MockerFixture, - helm_mock: MagicMock, - ): - helm_mock.add_repo.assert_not_called() - - mocker.patch.object( - KubernetesApp, - "helm_chart", - return_value="test/test-chart", - new_callable=mocker.PropertyMock, - ) - - kubernetes_app.deploy(False) - - helm_mock.upgrade_install.assert_called_once_with( - "${pipeline_name}-test-kubernetes-app", - "test/test-chart", - False, - "test-namespace", - {"nameOverride": "test-value"}, - HelmUpgradeInstallFlags(), - ) - - def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( - self, - config: KpopsConfig, - handlers: ComponentHandlers, - helm_mock: MagicMock, - mocker: MockerFixture, - app_value: KubernetesTestValue, - ): - repo_config = HelmRepoConfig( - repository_name="test-repo", url="https://test.com/charts/" - ) - kubernetes_app = KubernetesApp( - name="test-kubernetes-app", - config=config, - handlers=handlers, - app=app_value, - namespace="test-namespace", - repo_config=repo_config, - version="3.4.5", ) - mocker.patch.object( - KubernetesApp, - "helm_chart", - return_value="test/test-chart", - new_callable=mocker.PropertyMock, - ) - - kubernetes_app.deploy(dry_run=False) - - assert helm_mock.mock_calls == [ - mocker.call.add_repo( - "test-repo", - "https://test.com/charts/", - RepoAuthFlags(), - ), - mocker.call.upgrade_install( - "${pipeline_name}-test-kubernetes-app", - "test/test-chart", - False, - "test-namespace", - {"nameOverride": "test-value"}, - HelmUpgradeInstallFlags(version="3.4.5"), - ), - ] - - def test_should_deploy_app_with_local_helm_chart( - self, - config: KpopsConfig, - handlers: ComponentHandlers, - helm_mock: MagicMock, - app_value: KubernetesTestValue, - ): - class AppWithLocalChart(KubernetesApp): - repo_config: None = None - - @property - @override - def helm_chart(self) -> str: - return "path/to/helm/charts/" - - app_with_local_chart = AppWithLocalChart( - name="test-app-with-local-chart", - config=config, - handlers=handlers, - app=app_value, - namespace="test-namespace", - ) - - app_with_local_chart.deploy(dry_run=False) - - helm_mock.add_repo.assert_not_called() - - helm_mock.upgrade_install.assert_called_once_with( - "${pipeline_name}-test-app-with-local-chart", - "path/to/helm/charts/", - False, - "test-namespace", - {"nameOverride": "test-value"}, - HelmUpgradeInstallFlags(), - ) - - def test_should_raise_not_implemented_error_when_helm_chart_is_not_set( - self, - kubernetes_app: KubernetesApp, - helm_mock: MagicMock, - ): - with pytest.raises(NotImplementedError) as error: - kubernetes_app.deploy(True) - helm_mock.add_repo.assert_called() - assert ( - str(error.value) - == "Please implement the helm_chart property of the kpops.components.base_components.kubernetes_app module." - ) - - def test_should_call_helm_uninstall_when_destroying_kubernetes_app( - self, - kubernetes_app: KubernetesApp, - helm_mock: MagicMock, - log_info_mock: MagicMock, - ): - stdout = 'KubernetesAppComponent - release "test-kubernetes-app" uninstalled' - helm_mock.uninstall.return_value = stdout - - kubernetes_app.destroy(True) - - helm_mock.uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-test-kubernetes-app", True - ) - - log_info_mock.assert_called_once_with(magentaify(stdout)) - def test_should_raise_value_error_when_name_is_not_valid( self, config: KpopsConfig, handlers: ComponentHandlers, app_value: KubernetesTestValue, - repo_config: HelmRepoConfig, ): with pytest.raises( ValueError, match=r"The component name .* is invalid for Kubernetes." @@ -232,7 +69,6 @@ def test_should_raise_value_error_when_name_is_not_valid( handlers=handlers, app=app_value, namespace="test-namespace", - repo_config=repo_config, ) with pytest.raises( @@ -244,7 +80,6 @@ def test_should_raise_value_error_when_name_is_not_valid( handlers=handlers, app=app_value, namespace="test-namespace", - repo_config=repo_config, ) assert KubernetesApp( @@ -253,5 +88,4 @@ def test_should_raise_value_error_when_name_is_not_valid( handlers=handlers, app=app_value, namespace="test-namespace", - repo_config=repo_config, ) From c1bf854dd3f690d9323b5ae5241a77b47ec3d3d0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 23 Oct 2023 11:56:29 +0300 Subject: [PATCH 04/34] Fix early exit upon Helm exit code 1 (#376) fixes #373 > I am getting this error when I try to use dry-run: subprocess.CalledProcessError: Command '['helm', 'get', 'manifest', 'account-producer', '--namespace', 'my-namespace']' returned non-zero exit status 1. why? does he launch it first in dry-run ? when I do that in my commandline I get To learn more, consult https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke Error: release: not found The [problem came from](https://github.com/bakdata/kpops/compare/2.0.9...2.0.10#diff-1ca7465daf9238dfe221304df636d96704ebf4935288b854a5c4c6b3ea9e3162R215) setting `subproces.run(..., check = True)` without handling the raised exception. I took some imports out of `TYPE_CHECKING` blocks to avoid problems with `pydantic` as it uses type hints at runtime Updated to the latest (`0.1.1`) version. Now all fixes deemed unsafe will not be automatically carried out even if it is possible to do so. The user can choose to enable the unsafe autofixes with the `--unsafe-fixes` flag While testing I noticed that we could use the namespace env var in some places in the example, so I did it and had to adjust the tests. --- .../bakdata/atm-fraud-detection/config.yaml | 4 +- .../bakdata/atm-fraud-detection/pipeline.yaml | 4 +- kpops/component_handlers/helm_wrapper/helm.py | 2 +- .../base_components/kafka_connector.py | 6 +-- kpops/config.py | 9 ++--- poetry.lock | 38 +++++++++---------- pyproject.toml | 3 +- tests/conftest.py | 16 ++++++++ tests/pipeline/snapshots/snap_test_example.py | 24 ++++++------ tests/pipeline/test_example.py | 2 + tests/pipeline/test_pipeline.py | 1 + 11 files changed, 61 insertions(+), 48 deletions(-) create mode 100644 tests/conftest.py diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index d03a12c64..7ef3e6575 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -4,11 +4,11 @@ topic_name_config: default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" default_output_topic_name: "${pipeline_name}-${component_name}-topic" -kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_brokers: "http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092" schema_registry: enabled: true - url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" + url: "http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081" kafka_rest: url: "http://localhost:8082" diff --git a/examples/bakdata/atm-fraud-detection/pipeline.yaml b/examples/bakdata/atm-fraud-detection/pipeline.yaml index 9cf3610a2..9982aa0a7 100644 --- a/examples/bakdata/atm-fraud-detection/pipeline.yaml +++ b/examples/bakdata/atm-fraud-detection/pipeline.yaml @@ -84,14 +84,14 @@ connector.class: io.confluent.connect.jdbc.JdbcSinkConnector tasks.max: 1 topics: ${pipeline_name}-account-linker-topic - connection.url: jdbc:postgresql://postgresql-dev.kpops.svc.cluster.local:5432/app_db + connection.url: jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db connection.user: app1 connection.password: AppPassword connection.ds.pool.size: 5 insert.mode: insert insert.mode.databaselevel: true value.converter: io.confluent.connect.avro.AvroConverter - value.converter.schema.registry.url: http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081 + value.converter.schema.registry.url: http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081 key.converter: org.apache.kafka.connect.storage.StringConverter transforms: flatten transforms.flatten.type: org.apache.kafka.connect.transforms.Flatten$Value diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index b1b101b41..5e4d758db 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -212,7 +212,7 @@ def __execute(self, command: list[str]) -> str: log.debug(f"Executing {' '.join(command)}") process = subprocess.run( command, - check=True, + check=False, capture_output=True, text=True, ) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 1843fb8fb..8f0163025 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,7 +3,7 @@ import logging from abc import ABC from functools import cached_property -from typing import TYPE_CHECKING, Any, NoReturn +from typing import Any, NoReturn from pydantic import Field, validator from typing_extensions import override @@ -25,13 +25,11 @@ KafkaConnectResetterValues, ) from kpops.components.base_components.base_defaults_component import deduplicate +from kpops.components.base_components.models.from_section import FromTopic from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr -if TYPE_CHECKING: - from kpops.components.base_components.models.from_section import FromTopic - log = logging.getLogger("KafkaConnector") diff --git a/kpops/config.py b/kpops/config.py index 95193bd53..718568fb6 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -1,19 +1,16 @@ from __future__ import annotations +from collections.abc import Callable from pathlib import Path -from typing import TYPE_CHECKING, Any +from typing import Any from pydantic import AnyHttpUrl, BaseConfig, BaseSettings, Field, parse_obj_as +from pydantic.env_settings import SettingsSourceCallable from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object from kpops.utils.yaml_loading import load_yaml_file -if TYPE_CHECKING: - from collections.abc import Callable - - from pydantic.env_settings import SettingsSourceCallable - ENV_PREFIX = "KPOPS_" diff --git a/poetry.lock b/poetry.lock index 9a50b9ae1..d901baf75 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1385,28 +1385,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.0.292" +version = "0.1.1" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, - {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, - {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, - {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, - {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, - {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, + {file = "ruff-0.1.1-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b7cdc893aef23ccc14c54bd79a8109a82a2c527e11d030b62201d86f6c2b81c5"}, + {file = "ruff-0.1.1-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:620d4b34302538dbd8bbbe8fdb8e8f98d72d29bd47e972e2b59ce6c1e8862257"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a909d3930afdbc2e9fd893b0034479e90e7981791879aab50ce3d9f55205bd6"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3305d1cb4eb8ff6d3e63a48d1659d20aab43b49fe987b3ca4900528342367145"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c34ae501d0ec71acf19ee5d4d889e379863dcc4b796bf8ce2934a9357dc31db7"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6aa7e63c3852cf8fe62698aef31e563e97143a4b801b57f920012d0e07049a8d"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d68367d1379a6b47e61bc9de144a47bcdb1aad7903bbf256e4c3d31f11a87ae"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc11955f6ce3398d2afe81ad7e49d0ebf0a581d8bcb27b8c300281737735e3a3"}, + {file = "ruff-0.1.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd8eead88ea83a250499074e2a8e9d80975f0b324b1e2e679e4594da318c25"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f4780e2bb52f3863a565ec3f699319d3493b83ff95ebbb4993e59c62aaf6e75e"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8f5b24daddf35b6c207619301170cae5d2699955829cda77b6ce1e5fc69340df"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d3f9ac658ba29e07b95c80fa742b059a55aefffa8b1e078bc3c08768bdd4b11a"}, + {file = "ruff-0.1.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3521bf910104bf781e6753282282acc145cbe3eff79a1ce6b920404cd756075a"}, + {file = "ruff-0.1.1-py3-none-win32.whl", hash = "sha256:ba3208543ab91d3e4032db2652dcb6c22a25787b85b8dc3aeff084afdc612e5c"}, + {file = "ruff-0.1.1-py3-none-win_amd64.whl", hash = "sha256:3ff3006c97d9dc396b87fb46bb65818e614ad0181f059322df82bbfe6944e264"}, + {file = "ruff-0.1.1-py3-none-win_arm64.whl", hash = "sha256:e140bd717c49164c8feb4f65c644046fe929c46f42493672853e3213d7bdbce2"}, + {file = "ruff-0.1.1.tar.gz", hash = "sha256:c90461ae4abec261609e5ea436de4a4b5f2822921cf04c16d2cc9327182dbbcc"}, ] [[package]] @@ -1769,4 +1769,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "056b014fc985bda3ac9d33518eae39b228f776e84307ee4ddd28bd330a1c36e6" +content-hash = "c90b6d135b493146da2d248ed7c20e717051bed7ddf222087fe7aa52633f8682" diff --git a/pyproject.toml b/pyproject.toml index 64b573128..84ff5d74b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -ruff = "^0.0.292" +ruff = "^0.1.1" black = "^23.7.0" typer-cli = "^0.0.13" pyright = "^1.1.314" @@ -135,7 +135,6 @@ select = [ "RET", # flake8-return "SLOT", # flake8-slots "SIM", # flake8-simplify - "TCH", # flake8-type-checking, configure correctly and add "PTH", # flake8-use-pathlib "PGH", # pygrep-hooks "PL", # Pylint diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..cb88c2294 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,16 @@ +import os +from collections.abc import Iterator +from unittest import mock + +import pytest + + +@pytest.fixture() +def mock_env() -> Iterator[os._Environ[str]]: + """Clear ``os.environ``. + + :yield: ``os.environ``. Prevents the function and the mock + context from exiting. + """ + with mock.patch.dict(os.environ, clear=True): + yield os.environ diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index cff924b5f..95d63ab70 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -23,12 +23,12 @@ 'replicaCount': 1, 'schedule': '0 12 * * *', 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', 'extraOutputTopics': { }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' }, 'suspend': True }, @@ -74,12 +74,12 @@ 'replicaCount': 1, 'schedule': '0 12 * * *', 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', 'extraOutputTopics': { }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' }, 'suspend': True }, @@ -129,14 +129,14 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', 'inputTopics': [ 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' } }, 'name': 'transaction-joiner', @@ -191,14 +191,14 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', 'inputTopics': [ 'bakdata-atm-fraud-detection-transaction-joiner-topic' ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' } }, 'name': 'fraud-detector', @@ -253,7 +253,7 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', 'extraInputTopics': { 'accounts': [ @@ -265,7 +265,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' } }, 'from': { @@ -315,7 +315,7 @@ 'auto.create': True, 'connection.ds.pool.size': 5, 'connection.password': 'AppPassword', - 'connection.url': 'jdbc:postgresql://postgresql-dev.kpops.svc.cluster.local:5432/app_db', + 'connection.url': 'jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db', 'connection.user': 'app1', 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector', 'errors.deadletterqueue.context.headers.enable': True, @@ -333,7 +333,7 @@ 'transforms': 'flatten', 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value', 'value.converter': 'io.confluent.connect.avro.AvroConverter', - 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' }, 'name': 'postgresql-connector', 'namespace': '${NAMESPACE}', diff --git a/tests/pipeline/test_example.py b/tests/pipeline/test_example.py index ea2b27c43..e3b3e5286 100644 --- a/tests/pipeline/test_example.py +++ b/tests/pipeline/test_example.py @@ -1,3 +1,4 @@ +import pytest import yaml from snapshottest.module import SnapshotTest from typer.testing import CliRunner @@ -7,6 +8,7 @@ runner = CliRunner() +@pytest.mark.usefixtures("mock_env") class TestExample: def test_atm_fraud(self, snapshot: SnapshotTest): result = runner.invoke( diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index debe9e5c2..d09ae6795 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -16,6 +16,7 @@ PIPELINE_BASE_DIR_PATH = RESOURCE_PATH.parent +@pytest.mark.usefixtures("mock_env") class TestPipeline: def test_python_api(self): pipeline = kpops.generate( From 2b6710bd2e7c2f29073a90dd776f62d156d52d9b Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 23 Oct 2023 14:16:20 +0200 Subject: [PATCH 05/34] Migrate deprecated mkdocs-material-extensions (#378) --- docs/mkdocs.yml | 65 ++++++++++++++------------- poetry.lock | 114 +++++++++++++++++++++++++++++++----------------- pyproject.toml | 1 - 3 files changed, 106 insertions(+), 74 deletions(-) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 132bc79e3..1ed7a99b0 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -65,8 +65,8 @@ markdown_extensions: - pymdownx.tabbed: alternate_style: true - pymdownx.emoji: - emoji_index: !!python/name:materialx.emoji.twemoji - emoji_generator: !!python/name:materialx.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg - pymdownx.superfences: preserve_tabs: true custom_fences: @@ -78,37 +78,36 @@ nav: - Home: KPOps Documentation: index.md - User Guide: - - What is KPOps: user/what-is-kpops.md - - Getting Started: - - Setup: user/getting-started/setup.md - - Quick start: user/getting-started/quick-start.md - - Teardown: user/getting-started/teardown.md - - Examples: - - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md - - Core Concepts: - - Components: - - Overview: user/core-concepts/components/overview.md - - KubernetesApp: user/core-concepts/components/kubernetes-app.md - - HelmApp: user/core-concepts/components/helm-app.md - - KafkaApp: user/core-concepts/components/kafka-app.md - - StreamsApp: user/core-concepts/components/streams-app.md - - ProducerApp: user/core-concepts/components/producer-app.md - - KafkaConnector: user/core-concepts/components/kafka-connector.md - - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md - - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md - - Config: user/core-concepts/config.md - - Defaults: user/core-concepts/defaults.md - - Variables: - - Environment variables: user/core-concepts/variables/environment_variables.md - - Substitution: user/core-concepts/variables/substitution.md - - References: - - Migration guide: - - Migrate from v1 to v2: user/migration-guide/v1-v2.md - - Migrate from v2 to v3: user/migration-guide/v2-v3.md - - CLI usage: user/references/cli-commands.md - - Editor integration: user/references/editor-integration.md - - CI integration: - - GitHub Actions: user/references/ci-integration/github-actions.md + - What is KPOps: user/what-is-kpops.md + - Getting Started: + - Setup: user/getting-started/setup.md + - Quick start: user/getting-started/quick-start.md + - Teardown: user/getting-started/teardown.md + - Examples: + - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md + - Core Concepts: + - Components: + - Overview: user/core-concepts/components/overview.md + - KubernetesApp: user/core-concepts/components/kubernetes-app.md + - KafkaApp: user/core-concepts/components/kafka-app.md + - StreamsApp: user/core-concepts/components/streams-app.md + - ProducerApp: user/core-concepts/components/producer-app.md + - KafkaConnector: user/core-concepts/components/kafka-connector.md + - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md + - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md + - Config: user/core-concepts/config.md + - Defaults: user/core-concepts/defaults.md + - Variables: + - Environment variables: user/core-concepts/variables/environment_variables.md + - Substitution: user/core-concepts/variables/substitution.md + - References: + - Migration guide: + - Migrate from v1 to v2: user/migration-guide/v1-v2.md + - Migrate from v2 to v3: user/migration-guide/v2-v3.md + - CLI usage: user/references/cli-commands.md + - Editor integration: user/references/editor-integration.md + - CI integration: + - GitHub Actions: user/references/ci-integration/github-actions.md - Developer Guide: - Auto generation: developer/auto-generation.md - Formatting: developer/formatting.md diff --git a/poetry.lock b/poetry.lock index d901baf75..ded8b706c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -48,6 +48,20 @@ docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +[[package]] +name = "babel" +version = "2.13.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, + {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "beautifulsoup4" version = "4.11.1" @@ -607,13 +621,13 @@ test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" -version = "1.4.2" +version = "1.5.3" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs-1.4.2-py3-none-any.whl", hash = "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"}, - {file = "mkdocs-1.4.2.tar.gz", hash = "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5"}, + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, ] [package.dependencies] @@ -621,31 +635,31 @@ click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" jinja2 = ">=2.11.1" -markdown = ">=3.2.1,<3.4" +markdown = ">=3.2.1" +markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" packaging = ">=20.5" +pathspec = ">=0.11.1" +platformdirs = ">=2.2.0" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] [[package]] name = "mkdocs-glightbox" -version = "0.3.1" +version = "0.3.4" description = "MkDocs plugin supports image lightbox with GLightbox." optional = false python-versions = "*" files = [ - {file = "mkdocs-glightbox-0.3.1.tar.gz", hash = "sha256:ac85e2d4d422cc4a670fa276840f0aa3064a1ec4ad25ccb6d6e82d11bb11e513"}, - {file = "mkdocs_glightbox-0.3.1-py3-none-any.whl", hash = "sha256:1974f505e3272b617b5e7552fd09d8d918d267631ed991772b4bd103dc74bea2"}, + {file = "mkdocs-glightbox-0.3.4.tar.gz", hash = "sha256:96aaf98216f83c0d0fad2e42a8d805cfa6329d6ab25b54265012ccb2154010d8"}, + {file = "mkdocs_glightbox-0.3.4-py3-none-any.whl", hash = "sha256:8f894435b4f75231164e5d9fb023c01e922e6769e74a121e822c4914f310a41d"}, ] -[package.dependencies] -beautifulsoup4 = ">=4.11.1" - [[package]] name = "mkdocs-macros-plugin" version = "0.7.0" @@ -669,35 +683,42 @@ test = ["mkdocs-include-markdown-plugin", "mkdocs-macros-test", "mkdocs-material [[package]] name = "mkdocs-material" -version = "9.1.1" +version = "9.4.6" description = "Documentation that simply works" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.1.1-py3-none-any.whl", hash = "sha256:3b20d4e9ee28b2c276d391eb2c4e599ff8865e6c7dcab8146a7fd9805ca59263"}, - {file = "mkdocs_material-9.1.1.tar.gz", hash = "sha256:836f0066c9346afc05b1962c146ea097025512bbb607c5f04a38248d7415f165"}, + {file = "mkdocs_material-9.4.6-py3-none-any.whl", hash = "sha256:78802035d5768a78139c84ad7dce0c6493e8f7dc4861727d36ed91d1520a54da"}, + {file = "mkdocs_material-9.4.6.tar.gz", hash = "sha256:09665e60df7ee9e5ff3a54af173f6d45be718b1ee7dd962bcff3102b81fb0c14"}, ] [package.dependencies] -colorama = ">=0.4" -jinja2 = ">=3.0" -markdown = ">=3.2" -mkdocs = ">=1.4.2" -mkdocs-material-extensions = ">=1.1" -pygments = ">=2.14" -pymdown-extensions = ">=9.9.1" -regex = ">=2022.4.24" -requests = ">=2.26" +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.5.3,<2.0" +mkdocs-material-extensions = ">=1.2,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] name = "mkdocs-material-extensions" -version = "1.1.1" +version = "1.3" description = "Extension pack for Python Markdown and MkDocs Material." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, - {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, + {file = "mkdocs_material_extensions-1.3-py3-none-any.whl", hash = "sha256:0297cc48ba68a9fdd1ef3780a3b41b534b0d0df1d1181a44676fda5f464eeadc"}, + {file = "mkdocs_material_extensions-1.3.tar.gz", hash = "sha256:f0446091503acb110a7cab9349cbc90eeac51b58d1caa92a704a81ca1e24ddbd"}, ] [[package]] @@ -736,6 +757,16 @@ files = [ {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "path" version = "16.7.1" @@ -753,13 +784,13 @@ testing = ["appdirs", "packaging", "pygments", "pytest (>=6)", "pytest-black (>= [[package]] name = "pathspec" -version = "0.10.3" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, - {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -880,13 +911,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.14.0" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, - {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -905,19 +936,22 @@ files = [ [[package]] name = "pymdown-extensions" -version = "9.10" +version = "10.3.1" description = "Extension pack for Python Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-9.10-py3-none-any.whl", hash = "sha256:31eaa76ce6f96aabfcea98787c2fff2c5c0611b20a53a94213970cfbf05f02b8"}, - {file = "pymdown_extensions-9.10.tar.gz", hash = "sha256:562c38eee4ce3f101ce631b804bfc2177a8a76c7e4dc908871fb6741a90257a7"}, + {file = "pymdown_extensions-10.3.1-py3-none-any.whl", hash = "sha256:8cba67beb2a1318cdaf742d09dff7c0fc4cafcc290147ade0f8fb7b71522711a"}, + {file = "pymdown_extensions-10.3.1.tar.gz", hash = "sha256:f6c79941498a458852853872e379e7bab63888361ba20992fc8b4f8a9b61735e"}, ] [package.dependencies] markdown = ">=3.2" pyyaml = "*" +[package.extras] +extra = ["pygments (>=2.12)"] + [[package]] name = "pyright" version = "1.1.314" @@ -1769,4 +1803,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "c90b6d135b493146da2d248ed7c20e717051bed7ddf222087fe7aa52633f8682" +content-hash = "d46da53e55dc8260864458cc9eef32f71eae0799836806dbc766c751bdac3b80" diff --git a/pyproject.toml b/pyproject.toml index 84ff5d74b..1a7408403 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,6 @@ optional = true mkdocs-macros-plugin = "^0.7.0" mkdocs-material = "^9.0.0" mkdocs = "^1.4.2" -mkdocs-material-extensions = "^1.1.1" mkdocs-glightbox = "^0.3.1" mike = "^1.1.2" From ec55524a34f8364324ec5043230f96719dfc9c20 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 09:02:36 +0300 Subject: [PATCH 06/34] Fix docs setup page list indentation (#377) closes #375 --- docs/docs/user/examples/atm-fraud-pipeline.md | 29 +++--- docs/docs/user/getting-started/quick-start.md | 94 ++++++++++--------- docs/docs/user/getting-started/setup.md | 79 ++++++++-------- docs/docs/user/getting-started/teardown.md | 32 ++++--- 4 files changed, 123 insertions(+), 111 deletions(-) diff --git a/docs/docs/user/examples/atm-fraud-pipeline.md b/docs/docs/user/examples/atm-fraud-pipeline.md index 06c249f3d..cfc811c70 100644 --- a/docs/docs/user/examples/atm-fraud-pipeline.md +++ b/docs/docs/user/examples/atm-fraud-pipeline.md @@ -72,23 +72,23 @@ kubectl port-forward --namespace kpops service/k8kafka-cp-kafka-connect 8083:808 ### Deploying the ATM fraud detection pipeline + + 1. Export environment variables in your terminal: - ```shell - export DOCKER_REGISTRY=bakdata && \ - export NAMESPACE=kpops - ``` + ```shell + export DOCKER_REGISTRY=bakdata && \ + export NAMESPACE=kpops + ``` 2. Deploy the pipeline - ```shell - poetry run kpops deploy ./examples/bakdata/atm-fraud-detection/pipeline.yaml \ - --pipeline-base-dir ./examples \ - --config ./examples/bakdata/atm-fraud-detection/config.yaml \ - --execute - ``` - - + ```shell + poetry run kpops deploy ./examples/bakdata/atm-fraud-detection/pipeline.yaml \ + --pipeline-base-dir ./examples \ + --config ./examples/bakdata/atm-fraud-detection/config.yaml \ + --execute + ``` !!! Note You can use the `--dry-run` flag instead of the `--execute` flag and check the logs if your pipeline will be @@ -120,6 +120,8 @@ You should be able to see pipeline shown in the image below: Moreover, Streams Explorer needs a while to scrape the information from Kafka connect. Therefore, it might take a bit until you see the whole graph. + + ## Teardown resources ### PostrgreSQL @@ -132,6 +134,8 @@ helm --namespace kpops uninstall postgresql ### ATM fraud pipeline + + 1. Export environment variables in your terminal. ```shell @@ -149,7 +153,6 @@ helm --namespace kpops uninstall postgresql --execute ``` - !!! Note You can use the `--dry-run` flag instead of the `--execute` flag and check the logs if your pipeline will be destroyed correctly. diff --git a/docs/docs/user/getting-started/quick-start.md b/docs/docs/user/getting-started/quick-start.md index 3727f8ca8..c97cf0e1f 100644 --- a/docs/docs/user/getting-started/quick-start.md +++ b/docs/docs/user/getting-started/quick-start.md @@ -67,37 +67,37 @@ kubectl port-forward --namespace kpops service/k8kafka-cp-kafka-connect 8083:808 ### Deploying the Word-count pipeline + + 1. Copy the [configuration](https://github.com/bakdata/kpops-examples/tree/main/word-count/deployment/kpops){target=_blank} from the [kpops-examples repository](https://github.com/bakdata/kpops-examples/tree/main/word-count){target=_blank} into `kpops>examples>bakdata>word-count` like so: - ``` - kpops - ├── examples - | ├── bakdata - | | ├── word-count - | | | ├── config.yaml - | | | ├── defaults - | | | │   └── defaults.yaml - | | | └── pipeline.yaml - | | | - ``` + ``` + kpops + ├── examples + | ├── bakdata + | | ├── word-count + | | | ├── config.yaml + | | | ├── defaults + | | | │   └── defaults.yaml + | | | └── pipeline.yaml + | | | + ``` 2. Export environment variables in your terminal: - ```shell - export DOCKER_REGISTRY=bakdata && \ - export NAMESPACE=kpops - ``` + ```shell + export DOCKER_REGISTRY=bakdata && \ + export NAMESPACE=kpops + ``` 3. Deploy the pipeline - ```shell - kpops deploy ./examples/bakdata/word-count/pipeline.yaml \ - --pipeline-base-dir ./examples \ - --config ./examples/bakdata/word-count/config.yaml \ - --execute - ``` - - + ```shell + kpops deploy ./examples/bakdata/word-count/pipeline.yaml \ + --pipeline-base-dir ./examples \ + --config ./examples/bakdata/word-count/config.yaml \ + --execute + ``` !!! Note You can use the `--dry-run` flag instead of the `--execute` flag and check the logs if your pipeline will be @@ -144,24 +144,24 @@ helm --namespace kpops uninstall redis ### Word-count pipeline + + 1. Export environment variables in your terminal. - ```shell - export DOCKER_REGISTRY=bakdata && \ - export NAMESPACE=kpops - ``` + ```shell + export DOCKER_REGISTRY=bakdata && \ + export NAMESPACE=kpops + ``` 2. Remove the pipeline - ```shell - kpops clean ./examples/bakdata/word-count/pipeline.yaml \ - --pipeline-base-dir ./examples \ - --config ./examples/bakdata/word-count/config.yaml \ - --verbose \ - --execute - ``` - - + ```shell + kpops clean ./examples/bakdata/word-count/pipeline.yaml \ + --pipeline-base-dir ./examples \ + --config ./examples/bakdata/word-count/config.yaml \ + --verbose \ + --execute + ``` !!! Note You can use the `--dry-run` flag instead of the `--execute` flag and check the logs if your pipeline will be @@ -174,14 +174,18 @@ helm --namespace kpops uninstall redis ## Common errors + + - `deploy` fails: - 1. Read the error message. - 2. Try to correct the mistakes if there were any. Likely the configuration is not correct or the port-forwarding is not working as intended. - 3. Run `clean`. - 4. Run `deploy --dry-run` to avoid having to `clean` again. If an error is dropped, start over from step 1. - 5. If the dry-run is successful, run `deploy`. + 1. Read the error message. + 2. Try to correct the mistakes if there were any. Likely the configuration is not correct or the port-forwarding is not working as intended. + 3. Run `clean`. + 4. Run `deploy --dry-run` to avoid having to `clean` again. If an error is dropped, start over from step 1. + 5. If the dry-run is successful, run `deploy`. - `clean` fails: - 1. Read the error message. - 2. Try to correct the indicated mistakes if there were any. Likely the configuration is not correct or the port-forwarding is not working as intended. - 3. Run `clean`. - 4. If `clean` fails, follow the steps in [teardown](../teardown). + 1. Read the error message. + 2. Try to correct the indicated mistakes if there were any. Likely the configuration is not correct or the port-forwarding is not working as intended. + 3. Run `clean`. + 4. If `clean` fails, follow the steps in [teardown](../teardown). + + diff --git a/docs/docs/user/getting-started/setup.md b/docs/docs/user/getting-started/setup.md index 1c608b77e..3e2fef676 100644 --- a/docs/docs/user/getting-started/setup.md +++ b/docs/docs/user/getting-started/setup.md @@ -14,29 +14,30 @@ In this part, you will set up KPOps. This includes: ## Setup Kubernetes with k3d + + If you don't have access to an existing Kubernetes cluster, this section will guide you through creating a local cluster. We recommend the lightweight Kubernetes distribution [k3s](https://k3s.io/){target=_blank} for this. [k3d](https://k3d.io/){target=_blank} is a wrapper around k3s in Docker that lets you get started fast. + 1. You can install k3d with its installation script: - ```shell - wget -q -O - https://raw.githubusercontent.com/k3d-io/k3d/v5.4.6/install.sh | bash - ``` + ```shell + wget -q -O - https://raw.githubusercontent.com/k3d-io/k3d/v5.4.6/install.sh | bash + ``` - For other ways of installing k3d, you can have a look at their [installation guide](https://k3d.io/v5.4.6/#installation){target=_blank}. + For other ways of installing k3d, you can have a look at their [installation guide](https://k3d.io/v5.4.6/#installation){target=_blank}. 2. The [Kafka deployment](#deploy-kafka) needs a modified Docker image. In that case the image is built and pushed to a Docker registry that holds it. If you do not have access to an existing Docker registry, you can use k3d's Docker registry: - ```shell - k3d registry create kpops-registry.localhost --port 12345 - ``` + ```shell + k3d registry create kpops-registry.localhost --port 12345 + ``` 3. Now you can create a new cluster called `kpops` that uses the previously created Docker registry: - ```shell - k3d cluster create kpops --k3s-arg "--no-deploy=traefik@server:*" --registry-use k3d-kpops-registry.localhost:12345 - ``` - - + ```shell + k3d cluster create kpops --k3s-arg "--no-deploy=traefik@server:*" --registry-use k3d-kpops-registry.localhost:12345 + ``` !!! Note Creating a new k3d cluster automatically configures `kubectl` to connect to the local cluster by modifying your `~/.kube/config`. In case you manually set the `KUBECONFIG` variable or don't want k3d to modify your config, k3d offers [many other options](https://k3d.io/v5.4.6/usage/kubeconfig/#handling-kubeconfigs){target=_blank}. @@ -47,48 +48,48 @@ You can check the cluster status with `kubectl get pods -n kube-system`. If all ## Deploy Kafka + + [Kafka](https://kafka.apache.org/){target=_blank} is an open-source data streaming platform. More information about Kafka can be found in the [documentation](https://kafka.apache.org/documentation/){target=_blank}. To deploy Kafka, this guide uses Confluent's [Helm chart](https://github.com/confluentinc/cp-helm-charts){target=_blank}. 1. To allow connectivity to other systems [Kafka Connect](https://docs.confluent.io/platform/current/connect/index.html#kafka-connect){target=_blank} needs to be extended with drivers. You can install a [JDBC driver](https://docs.confluent.io/kafka-connectors/jdbc/current/jdbc-drivers.html){target=_blank} for Kafka Connect by creating a new Docker image: - 1. Create a `Dockerfile` with the following content: + 1. Create a `Dockerfile` with the following content: - ```dockerfile - FROM confluentinc/cp-kafka-connect:7.1.3 + ```dockerfile + FROM confluentinc/cp-kafka-connect:7.1.3 - RUN confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.6.0 - ``` + RUN confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.6.0 + ``` - 2. Build and push the modified image to your private Docker registry: + 2. Build and push the modified image to your private Docker registry: - ```shell - docker build . --tag localhost:12345/kafka-connect-jdbc:7.1.3 && \ - docker push localhost:12345/kafka-connect-jdbc:7.1.3 - ``` + ```shell + docker build . --tag localhost:12345/kafka-connect-jdbc:7.1.3 && \ + docker push localhost:12345/kafka-connect-jdbc:7.1.3 + ``` - Detailed instructions on building, tagging and pushing a docker image can be found in [Docker docs](https://docs.docker.com/){target=_blank}. + Detailed instructions on building, tagging and pushing a docker image can be found in [Docker docs](https://docs.docker.com/){target=_blank}. 2. Add Confluent's Helm chart repository and update the index: - ```shell - helm repo add confluentinc https://confluentinc.github.io/cp-helm-charts/ && - helm repo update - ``` + ```shell + helm repo add confluentinc https://confluentinc.github.io/cp-helm-charts/ && + helm repo update + ``` 3. Install Kafka, Zookeeper, Confluent's Schema Registry, Kafka Rest Proxy, and Kafka Connect. A single Helm chart installs all five components. Below you can find an example for the `--values ./kafka.yaml` file configuring the deployment accordingly. Deploy the services: - ```shell - helm upgrade \ - --install \ - --version 0.6.1 \ - --values ./kafka.yaml \ - --namespace kpops \ - --create-namespace \ - --wait \ - k8kafka confluentinc/cp-helm-charts - ``` - - + ```shell + helm upgrade \ + --install \ + --version 0.6.1 \ + --values ./kafka.yaml \ + --namespace kpops \ + --create-namespace \ + --wait \ + k8kafka confluentinc/cp-helm-charts + ``` ??? example "Kafka Helm chart values (`kafka.yaml`)" An example value configuration for Confluent's Helm chart. This configuration deploys a single Kafka Broker, a Schema Registry, Zookeeper, Kafka Rest Proxy, and Kafka Connect with minimal resources. diff --git a/docs/docs/user/getting-started/teardown.md b/docs/docs/user/getting-started/teardown.md index 47c839a18..c006c2c45 100644 --- a/docs/docs/user/getting-started/teardown.md +++ b/docs/docs/user/getting-started/teardown.md @@ -11,28 +11,32 @@ The [`kpops` CLI](../references/cli-commands.md) can be used to destroy a pipeline that was previously deployed with KPOps. In case that doesn't work, the pipeline can always be taken down manually with `helm` (see section [Infrastructure](#infrastructure)). + + 1. Export environment variables. - ```shell - export DOCKER_REGISTRY=bakdata && \ - export NAMESPACE=kpops - ``` + ```shell + export DOCKER_REGISTRY=bakdata && \ + export NAMESPACE=kpops + ``` 2. Navigate to the `examples` folder. - Replace the `` with the example you want to tear down. - For example the `atm-fraud-detection`. + Replace the `` with the example you want to tear down. + For example the `atm-fraud-detection`. 3. Remove the pipeline - ```shell - # Uncomment 1 line to either destroy, reset or clean. + ```shell + # Uncomment 1 line to either destroy, reset or clean. - # poetry run kpops destroy /pipeline.yaml \ - # poetry run kpops reset /pipeline.yaml \ - # poetry run kpops clean /pipeline.yaml \ - --config /config.yaml \ - --execute - ``` + # poetry run kpops destroy /pipeline.yaml \ + # poetry run kpops reset /pipeline.yaml \ + # poetry run kpops clean /pipeline.yaml \ + --config /config.yaml \ + --execute + ``` + + ## Infrastructure From dfb2881ef77e137059e89192ac501a7f6a88fe0d Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 24 Oct 2023 10:14:34 +0200 Subject: [PATCH 07/34] Exclude resources from docs search (#371) --- docs/mkdocs.yml | 3 +++ poetry.lock | 16 +++++++++++++++- pyproject.toml | 3 ++- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 1ed7a99b0..14750ff6c 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -32,6 +32,9 @@ theme: plugins: - macros - search + - exclude-search: + exclude: + - resources/* - glightbox extra: diff --git a/poetry.lock b/poetry.lock index ded8b706c..e0cabdcba 100644 --- a/poetry.lock +++ b/poetry.lock @@ -649,6 +649,20 @@ watchdog = ">=2.0" i18n = ["babel (>=2.9.0)"] min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +[[package]] +name = "mkdocs-exclude-search" +version = "0.6.5" +description = "A mkdocs plugin that lets you exclude selected files or sections from the search index." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mkdocs-exclude-search-0.6.5.tar.gz", hash = "sha256:6a126367653caf946c56e446ff30ffdec931438a3e2a8740feff3f8682d52a54"}, + {file = "mkdocs_exclude_search-0.6.5-py3-none-any.whl", hash = "sha256:475e372dd17195700acd00213bcc804280d63f39cb7a0c76a801aceefd2807a6"}, +] + +[package.dependencies] +mkdocs = ">=1.0.4" + [[package]] name = "mkdocs-glightbox" version = "0.3.4" @@ -1803,4 +1817,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "d46da53e55dc8260864458cc9eef32f71eae0799836806dbc766c751bdac3b80" +content-hash = "dfdbdd18867682898d908dd3a3e19741993478eb7b722d3dd14a5c2acd0b9826" diff --git a/pyproject.toml b/pyproject.toml index 1a7408403..6fc6839f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,10 +57,11 @@ pytablewriter = { extras = ["from"], version = "^1.0.0" } optional = true [tool.poetry.group.docs.dependencies] +mkdocs = "^1.4.2" mkdocs-macros-plugin = "^0.7.0" mkdocs-material = "^9.0.0" -mkdocs = "^1.4.2" mkdocs-glightbox = "^0.3.1" +mkdocs-exclude-search = "^0.6.5" mike = "^1.1.2" [tool.poetry_bumpversion.file."kpops/__init__.py"] From bbaf5bd95739aa4a6cea72c7958c1a9f4f97d64a Mon Sep 17 00:00:00 2001 From: bakdata-bots Date: Tue, 24 Oct 2023 14:50:48 +0000 Subject: [PATCH 08/34] =?UTF-8?q?Bump=20version=202.0.10=20=E2=86=92=202.0?= =?UTF-8?q?.11?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 22 ++++++++++++++++++++++ kpops/__init__.py | 2 +- pyproject.toml | 2 +- 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 402a6fe7e..b964b7de2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,26 @@ # Changelog +## [2.0.11](https://github.com/bakdata/kpops/releases/tag/2.0.11) - Release Date: [2023-10-24] + +### 🐛 Fixes + +- Fix early exit upon Helm exit code 1 - [#376](https://github.com/bakdata/kpops/pull/376) + +- Fix docs setup page list indentation - [#377](https://github.com/bakdata/kpops/pull/377) + + +### 📝 Documentation + +- Migrate deprecated mkdocs-material-extensions - [#378](https://github.com/bakdata/kpops/pull/378) + +- Fix docs setup page list indentation - [#377](https://github.com/bakdata/kpops/pull/377) + +- Exclude resources from docs search - [#371](https://github.com/bakdata/kpops/pull/371) + + + + + + ## [2.0.10](https://github.com/bakdata/kpops/releases/tag/2.0.10) - Release Date: [2023-10-12] ### 🌀 Miscellaneous diff --git a/kpops/__init__.py b/kpops/__init__.py index 70fbe94a4..8fea6bcdf 100644 --- a/kpops/__init__.py +++ b/kpops/__init__.py @@ -1,4 +1,4 @@ -__version__ = "2.0.10" +__version__ = "2.0.11" # export public API functions from kpops.cli.main import clean, deploy, destroy, generate, reset diff --git a/pyproject.toml b/pyproject.toml index 6fc6839f9..e65abe824 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "kpops" -version = "2.0.10" +version = "2.0.11" description = "KPOps is a tool to deploy Kafka pipelines to Kubernetes" authors = ["bakdata "] license = "MIT" From 52d9b1355f82be27e6f5d87eb4ed169050010f51 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 25 Oct 2023 17:24:25 +0200 Subject: [PATCH 09/34] Fix test --- .../bakdata/atm-fraud-detection/config.yaml | 4 ++-- tests/pipeline/snapshots/snap_test_example.py | 20 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index 7ef3e6575..d03a12c64 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -4,11 +4,11 @@ topic_name_config: default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" default_output_topic_name: "${pipeline_name}-${component_name}-topic" -kafka_brokers: "http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" schema_registry: enabled: true - url: "http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081" + url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" kafka_rest: url: "http://localhost:8082" diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 95d63ab70..2ef44b969 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -23,12 +23,12 @@ 'replicaCount': 1, 'schedule': '0 12 * * *', 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' }, 'suspend': True }, @@ -74,12 +74,12 @@ 'replicaCount': 1, 'schedule': '0 12 * * *', 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' }, 'suspend': True }, @@ -129,14 +129,14 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', 'inputTopics': [ 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' } }, 'name': 'transaction-joiner', @@ -191,14 +191,14 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', 'inputTopics': [ 'bakdata-atm-fraud-detection-transaction-joiner-topic' ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' } }, 'name': 'fraud-detector', @@ -253,7 +253,7 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', 'extraInputTopics': { 'accounts': [ @@ -265,7 +265,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' } }, 'from': { From 919c0d4695c4ca5b6bf2129b2ffe4ca14d3aa6f2 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 31 Oct 2023 12:32:32 +0100 Subject: [PATCH 10/34] Add missing HelmApp docs --- .../user/core-concepts/components/helm-app.md | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 docs/docs/user/core-concepts/components/helm-app.md diff --git a/docs/docs/user/core-concepts/components/helm-app.md b/docs/docs/user/core-concepts/components/helm-app.md new file mode 100644 index 000000000..4a7af609b --- /dev/null +++ b/docs/docs/user/core-concepts/components/helm-app.md @@ -0,0 +1,37 @@ +# HelmApp + +### Usage + +Can be used to deploy any app in Kubernetes using Helm, for example, a REST service that serves Kafka data. + +### Configuration + + + +??? example "`pipeline.yaml`" + + ```yaml + --8<-- + ./docs/resources/pipeline-components/helm-app.yaml + --8<-- + ``` + + + +### Operations + +#### deploy + +Deploy using Helm. + +#### destroy + +Uninstall Helm release. + +#### reset + +Do nothing. + +#### clean + +Do nothing. From 74ffd6e84013d9e5cc324afe6b39ae634cac0bbd Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 12 Dec 2023 14:24:50 +0200 Subject: [PATCH 11/34] Migrate to Pydantic v2 (#347) closes #341 --------- Co-authored-by: Salomon Popp --- .../docs/resources/variables/cli_env_vars.env | 3 + docs/docs/resources/variables/cli_env_vars.md | 15 +- .../resources/variables/config_env_vars.env | 46 +- .../resources/variables/config_env_vars.md | 28 +- docs/docs/schema/config.json | 373 +++++------ docs/docs/schema/pipeline.json | 580 ++++++++++++------ .../variables/environment_variables.md | 5 +- docs/docs/user/references/cli-commands.md | 5 + docs/mkdocs.yml | 71 +-- hooks/gen_docs/gen_docs_cli_usage.py | 2 +- hooks/gen_docs/gen_docs_components.py | 5 +- hooks/gen_docs/gen_docs_env_vars.py | 78 ++- kpops/cli/main.py | 38 +- .../component_handlers/helm_wrapper/model.py | 28 +- .../kafka_connect/connect_wrapper.py | 20 +- .../kafka_connect/kafka_connect_handler.py | 6 +- .../component_handlers/kafka_connect/model.py | 77 ++- .../schema_handler/schema_handler.py | 2 +- kpops/component_handlers/topic/handler.py | 4 +- kpops/component_handlers/topic/model.py | 39 +- .../component_handlers/topic/proxy_wrapper.py | 16 +- .../base_defaults_component.py | 30 +- kpops/components/base_components/helm_app.py | 28 +- kpops/components/base_components/kafka_app.py | 11 +- .../base_components/kafka_connector.py | 31 +- .../base_components/kubernetes_app.py | 11 +- .../base_components/models/from_section.py | 28 +- .../base_components/models/to_section.py | 30 +- .../base_components/pipeline_component.py | 11 +- .../streams_bootstrap/producer/model.py | 5 +- .../streams_bootstrap/streams/model.py | 61 +- kpops/config.py | 89 ++- kpops/pipeline_generator/pipeline.py | 16 +- kpops/utils/dict_ops.py | 20 +- kpops/utils/gen_schema.py | 92 +-- kpops/utils/pydantic.py | 135 +++- poetry.lock | 241 ++++++-- pyproject.toml | 5 +- .../snapshots/snap_test_schema_generation.py | 260 +++++--- tests/cli/test_kpops_config.py | 16 +- tests/cli/test_schema_generation.py | 2 +- .../kafka_connect/test_connect_wrapper.py | 10 +- .../schema_handler/test_schema_handler.py | 7 +- .../topic/test_proxy_wrapper.py | 3 +- tests/components/test_kafka_sink_connector.py | 4 +- tests/defaults.yaml | 2 + tests/pipeline/resources/dotenv/.env | 3 + tests/pipeline/resources/dotenv/config.yaml | 12 + tests/pipeline/resources/dotenv/custom.env | 3 + .../temp-trim-release-name/defaults.yaml | 23 + .../temp-trim-release-name/pipeline.yaml | 6 + tests/pipeline/snapshots/snap_test_example.py | 10 +- .../pipeline/snapshots/snap_test_pipeline.py | 74 +-- tests/pipeline/test_components/components.py | 2 +- tests/pipeline/test_pipeline.py | 71 +++ tests/utils/resources/nested_base_settings.py | 5 +- tests/utils/test_dict_ops.py | 2 +- tests/utils/test_doc_gen.py | 12 - 58 files changed, 1723 insertions(+), 1089 deletions(-) create mode 100644 tests/defaults.yaml create mode 100644 tests/pipeline/resources/dotenv/.env create mode 100644 tests/pipeline/resources/dotenv/config.yaml create mode 100644 tests/pipeline/resources/dotenv/custom.env create mode 100644 tests/pipeline/resources/temp-trim-release-name/defaults.yaml create mode 100644 tests/pipeline/resources/temp-trim-release-name/pipeline.yaml diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index dec1d8b3a..dc44ac3a6 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -12,6 +12,9 @@ KPOPS_PIPELINE_BASE_DIR=. KPOPS_CONFIG_PATH=config.yaml # Path to defaults folder KPOPS_DEFAULT_PATH # No default value, not required +# Path to dotenv file. Multiple files can be provided. The files will +# be loaded in order, with each file overriding the previous one. +KPOPS_DOTENV_PATH # No default value, not required # Path to YAML with pipeline definition KPOPS_PIPELINE_PATH # No default value, required # Comma separated list of steps to apply the command on diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index 763cb936e..ed0880bee 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -1,9 +1,10 @@ These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags. -| Name |Default Value|Required| Description | -|-----------------------|-------------|--------|----------------------------------------------------------------------| -|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory)| -|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | -|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | -|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | -|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | +| Name |Default Value|Required| Description | +|-----------------------|-------------|--------|-----------------------------------------------------------------------------------------------------------------------------------| +|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | +|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | +|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | +|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one.| +|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | +|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 00bef6a4c..2b99d2172 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -4,6 +4,11 @@ # alternative to the settings in `config.yaml`. Variables marked as # required can instead be set in the pipeline config. # +# defaults_path +# The path to the folder containing the defaults.yaml file and the +# environment defaults files. Paths can either be absolute or relative +# to `config.yaml` +KPOPS_DEFAULTS_PATH=. # environment # The environment you want to generate and deploy the pipeline to. # Suffix your environment files with this value (e.g. @@ -12,19 +17,48 @@ KPOPS_ENVIRONMENT # No default value, required # kafka_brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required -# url +# defaults_filename_prefix +# The name of the defaults file and the prefix of the defaults +# environment file. +KPOPS_DEFAULTS_FILENAME_PREFIX=defaults +# topic_name_config.default_output_topic_name +# Configures the value for the variable ${output_topic_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component_name} +# topic_name_config.default_error_topic_name +# Configures the value for the variable ${error_topic_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component_name}-error +# schema_registry.enabled +# Whether the Schema Registry handler should be initialized. +KPOPS_SCHEMA_REGISTRY__ENABLED=False +# schema_registry.url # Address of the Schema Registry. -KPOPS_SCHEMA_REGISTRY_URL=http://localhost:8081 -# url +KPOPS_SCHEMA_REGISTRY__URL=http://localhost:8081/ +# kafka_rest.url # Address of the Kafka REST Proxy. -KPOPS_KAFKA_REST_URL=http://localhost:8082 -# url +KPOPS_KAFKA_REST__URL=http://localhost:8082/ +# kafka_connect.url # Address of Kafka Connect. -KPOPS_KAFKA_CONNECT_URL=http://localhost:8083 +KPOPS_KAFKA_CONNECT__URL=http://localhost:8083/ # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. KPOPS_TIMEOUT=300 +# create_namespace +# Flag for `helm upgrade --install`. Create the release namespace if +# not present. +KPOPS_CREATE_NAMESPACE=False +# helm_config.context +# Name of kubeconfig context (`--kube-context`) +KPOPS_HELM_CONFIG__CONTEXT # No default value, not required +# helm_config.debug +# Run Helm in Debug mode +KPOPS_HELM_CONFIG__DEBUG=False +# helm_config.api_version +# Kubernetes API version used for Capabilities.APIVersions +KPOPS_HELM_CONFIG__API_VERSION # No default value, not required +# helm_diff_config.ignore +# Set of keys that should not be checked. +KPOPS_HELM_DIFF_CONFIG__IGNORE # No default value, required # retain_clean_jobs # Whether to retain clean up jobs in the cluster or uninstall the, # after completion. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 2419de11d..f81eb8f56 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,11 +1,21 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value |Required| Description | Setting name | -|-------------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | -|KPOPS_SCHEMA_REGISTRY_URL|http://localhost:8081|False |Address of the Schema Registry. |url | -|KPOPS_KAFKA_REST_URL |http://localhost:8082|False |Address of the Kafka REST Proxy. |url | -|KPOPS_KAFKA_CONNECT_URL |http://localhost:8083|False |Address of Kafka Connect. |url | -|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| +| Name | Default Value |Required| Description | Setting name | +|--------------------------------------------------|----------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| +|KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | +|KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component_name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component_name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | +|KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | +|KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | +|KPOPS_KAFKA_CONNECT__URL |http://localhost:8083/ |False |Address of Kafka Connect. |kafka_connect.url | +|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | +|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context | +|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug | +|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version | +|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | +|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 391a0f2b5..09a848235 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -1,19 +1,36 @@ { - "$ref": "#/definitions/KpopsConfig", - "definitions": { + "$defs": { "HelmConfig": { "description": "Global Helm configuration.", "properties": { "api_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Kubernetes API version used for Capabilities.APIVersions", - "title": "API version", - "type": "string" + "title": "API version" }, "context": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Name of kubeconfig context (`--kube-context`)", - "example": "dev-storage", - "title": "Context", - "type": "string" + "examples": [ + "dev-storage" + ], + "title": "Context" }, "debug": { "default": false, @@ -29,7 +46,9 @@ "properties": { "ignore": { "description": "Set of keys that should not be checked.", - "example": "- name\n- imageTag", + "examples": [ + "- name\n- imageTag" + ], "items": { "type": "string" }, @@ -46,14 +65,9 @@ "description": "Configuration for Kafka Connect.", "properties": { "url": { - "default": "http://localhost:8083", + "default": "http://localhost:8083/", "description": "Address of Kafka Connect.", - "env": "KPOPS_KAFKA_CONNECT_URL", - "env_names": [ - "kpops_kafka_connect_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -67,14 +81,9 @@ "description": "Configuration for Kafka REST Proxy.", "properties": { "url": { - "default": "http://localhost:8082", + "default": "http://localhost:8082/", "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_KAFKA_REST_URL", - "env_names": [ - "kpops_kafka_rest_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -83,181 +92,6 @@ "title": "KafkaRestConfig", "type": "object" }, - "KpopsConfig": { - "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", - "properties": { - "create_namespace": { - "default": false, - "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", - "env_names": [ - "kpops_create_namespace" - ], - "title": "Create Namespace", - "type": "boolean" - }, - "defaults_filename_prefix": { - "default": "defaults", - "description": "The name of the defaults file and the prefix of the defaults environment file.", - "env_names": [ - "kpops_defaults_filename_prefix" - ], - "title": "Defaults Filename Prefix", - "type": "string" - }, - "defaults_path": { - "default": ".", - "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", - "env_names": [ - "kpops_defaults_path" - ], - "example": "defaults", - "format": "path", - "title": "Defaults Path", - "type": "string" - }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "env": "KPOPS_ENVIRONMENT", - "env_names": [ - "kpops_environment" - ], - "example": "development", - "title": "Environment", - "type": "string" - }, - "helm_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmConfig" - } - ], - "default": { - "api_version": null, - "context": null, - "debug": false - }, - "description": "Global flags for Helm.", - "env_names": [ - "kpops_helm_config" - ], - "title": "Helm Config" - }, - "helm_diff_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmDiffConfig" - } - ], - "default": { - "ignore": [] - }, - "description": "Configure Helm Diff.", - "env_names": [ - "kpops_helm_diff_config" - ], - "title": "Helm Diff Config" - }, - "kafka_brokers": { - "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_KAFKA_BROKERS", - "env_names": [ - "kpops_kafka_brokers" - ], - "example": "broker1:9092,broker2:9092,broker3:9092", - "title": "Kafka Brokers", - "type": "string" - }, - "kafka_connect": { - "allOf": [ - { - "$ref": "#/definitions/KafkaConnectConfig" - } - ], - "default": { - "url": "http://localhost:8083" - }, - "description": "Configuration for Kafka Connect.", - "env_names": [ - "kpops_kafka_connect" - ], - "title": "Kafka Connect" - }, - "kafka_rest": { - "allOf": [ - { - "$ref": "#/definitions/KafkaRestConfig" - } - ], - "default": { - "url": "http://localhost:8082" - }, - "description": "Configuration for Kafka REST Proxy.", - "env_names": [ - "kpops_kafka_rest" - ], - "title": "Kafka Rest" - }, - "retain_clean_jobs": { - "default": false, - "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", - "env": "KPOPS_RETAIN_CLEAN_JOBS", - "env_names": [ - "kpops_retain_clean_jobs" - ], - "title": "Retain Clean Jobs", - "type": "boolean" - }, - "schema_registry": { - "allOf": [ - { - "$ref": "#/definitions/SchemaRegistryConfig" - } - ], - "default": { - "enabled": false, - "url": "http://localhost:8081" - }, - "description": "Configuration for Schema Registry.", - "env_names": [ - "kpops_schema_registry" - ], - "title": "Schema Registry" - }, - "timeout": { - "default": 300, - "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", - "env": "KPOPS_TIMEOUT", - "env_names": [ - "kpops_timeout" - ], - "title": "Timeout", - "type": "integer" - }, - "topic_name_config": { - "allOf": [ - { - "$ref": "#/definitions/TopicNameConfig" - } - ], - "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" - }, - "description": "Configure the topic name variables you can use in the pipeline definition.", - "env_names": [ - "kpops_topic_name_config" - ], - "title": "Topic Name Config" - } - }, - "required": [ - "environment", - "kafka_brokers" - ], - "title": "KpopsConfig", - "type": "object" - }, "SchemaRegistryConfig": { "additionalProperties": false, "description": "Configuration for Schema Registry.", @@ -265,21 +99,13 @@ "enabled": { "default": false, "description": "Whether the Schema Registry handler should be initialized.", - "env_names": [ - "enabled" - ], "title": "Enabled", "type": "boolean" }, "url": { - "default": "http://localhost:8081", + "default": "http://localhost:8081/", "description": "Address of the Schema Registry.", - "env": "KPOPS_SCHEMA_REGISTRY_URL", - "env_names": [ - "kpops_schema_registry_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -295,18 +121,12 @@ "default_error_topic_name": { "default": "${pipeline_name}-${component_name}-error", "description": "Configures the value for the variable ${error_topic_name}", - "env_names": [ - "default_error_topic_name" - ], "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { "default": "${pipeline_name}-${component_name}", "description": "Configures the value for the variable ${output_topic_name}", - "env_names": [ - "default_output_topic_name" - ], "title": "Default Output Topic Name", "type": "string" } @@ -315,5 +135,136 @@ "type": "object" } }, - "title": "KPOps config schema" + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { + "create_namespace": { + "default": false, + "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", + "title": "Create Namespace", + "type": "boolean" + }, + "defaults_filename_prefix": { + "default": "defaults", + "description": "The name of the defaults file and the prefix of the defaults environment file.", + "title": "Defaults Filename Prefix", + "type": "string" + }, + "defaults_path": { + "default": ".", + "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", + "examples": [ + "defaults", + "." + ], + "format": "path", + "title": "Defaults Path", + "type": "string" + }, + "environment": { + "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", + "examples": [ + "development", + "production" + ], + "title": "Environment", + "type": "string" + }, + "helm_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmConfig" + } + ], + "default": { + "api_version": null, + "context": null, + "debug": false + }, + "description": "Global flags for Helm." + }, + "helm_diff_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmDiffConfig" + } + ], + "default": { + "ignore": [] + }, + "description": "Configure Helm Diff." + }, + "kafka_brokers": { + "description": "The comma separated Kafka brokers address.", + "examples": [ + "broker1:9092,broker2:9092,broker3:9092" + ], + "title": "Kafka Brokers", + "type": "string" + }, + "kafka_connect": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConnectConfig" + } + ], + "default": { + "url": "http://localhost:8083/" + }, + "description": "Configuration for Kafka Connect." + }, + "kafka_rest": { + "allOf": [ + { + "$ref": "#/$defs/KafkaRestConfig" + } + ], + "default": { + "url": "http://localhost:8082/" + }, + "description": "Configuration for Kafka REST Proxy." + }, + "retain_clean_jobs": { + "default": false, + "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", + "title": "Retain Clean Jobs", + "type": "boolean" + }, + "schema_registry": { + "allOf": [ + { + "$ref": "#/$defs/SchemaRegistryConfig" + } + ], + "default": { + "enabled": false, + "url": "http://localhost:8081/" + }, + "description": "Configuration for Schema Registry." + }, + "timeout": { + "default": 300, + "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", + "title": "Timeout", + "type": "integer" + }, + "topic_name_config": { + "allOf": [ + { + "$ref": "#/$defs/TopicNameConfig" + } + ], + "default": { + "default_error_topic_name": "${pipeline_name}-${component_name}-error", + "default_output_topic_name": "${pipeline_name}-${component_name}" + }, + "description": "Configure the topic name variables you can use in the pipeline definition." + } + }, + "required": [ + "environment", + "kafka_brokers" + ], + "title": "KpopsConfig", + "type": "object" } diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 2fe9aeeac..0882ccfa5 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -1,12 +1,12 @@ { - "definitions": { + "$defs": { "FromSection": { "additionalProperties": false, "description": "Holds multiple input topics.", "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -15,7 +15,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -31,16 +31,28 @@ "description": "Input topic.", "properties": { "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/InputTopicTypes" + "$ref": "#/$defs/InputTopicTypes" + }, + { + "type": "null" } ], + "default": null, "description": "Topic type" } }, @@ -48,23 +60,27 @@ "type": "object" }, "HelmApp": { + "additionalProperties": true, "description": "Kubernetes app managed through Helm with an associated Helm chart.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KubernetesAppConfig" + "$ref": "#/$defs/KubernetesAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -85,36 +101,41 @@ "type": "string" }, "repo_config": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + }, { - "$ref": "#/definitions/HelmRepoConfig" + "type": "null" } ], - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "default": null, + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "helm-app", - "description": "Kubernetes app managed through Helm with an associated Helm chart.", - "enum": [ - "helm-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -131,7 +152,7 @@ "repo_auth_flags": { "allOf": [ { - "$ref": "#/definitions/RepoAuthFlags" + "$ref": "#/$defs/RepoAuthFlags" } ], "default": { @@ -141,8 +162,7 @@ "password": null, "username": null }, - "description": "Authorisation-related flags", - "title": "Repo Auth Flags" + "description": "Authorisation-related flags" }, "repository_name": { "description": "Name of the Helm repository", @@ -172,7 +192,8 @@ "type": "string" }, "KafkaConnectorConfig": { - "additionalProperties": { + "additionalProperties": true, + "additional_properties": { "type": "string" }, "description": "Settings specific to Kafka Connectors.", @@ -189,23 +210,27 @@ "type": "object" }, "KafkaSinkConnector": { + "additionalProperties": true, "description": "Kafka sink connector model.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -228,7 +253,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -242,8 +267,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -251,28 +275,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-sink-connector", - "description": "Kafka sink connector model.", - "enum": [ - "kafka-sink-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -284,23 +309,27 @@ "type": "object" }, "KafkaSourceConnector": { + "additionalProperties": true, "description": "Kafka source connector model.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -315,9 +344,17 @@ "type": "string" }, "offset_topic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running", - "title": "Offset Topic", - "type": "string" + "title": "Offset Topic" }, "prefix": { "default": "${pipeline_name}-", @@ -328,7 +365,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -342,8 +379,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -351,28 +387,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-source-connector", - "description": "Kafka source connector model.", - "enum": [ - "kafka-source-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -384,6 +421,7 @@ "type": "object" }, "KubernetesAppConfig": { + "additionalProperties": true, "description": "Settings specific to Kubernetes apps.", "properties": {}, "title": "KubernetesAppConfig", @@ -399,18 +437,19 @@ "type": "string" }, "ProducerApp": { + "additionalProperties": true, "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/ProducerValues" + "$ref": "#/$defs/ProducerValues" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { + "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" @@ -434,7 +473,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -448,32 +487,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "producer-app", - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", - "enum": [ - "producer-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -485,6 +524,7 @@ "type": "object" }, "ProducerStreamsConfig": { + "additionalProperties": true, "description": "Kafka Streams settings specific to Producer.", "properties": { "brokers": { @@ -502,14 +542,30 @@ "type": "object" }, "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Output topic", - "title": "Outputtopic", - "type": "string" + "title": "Outputtopic" }, "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" + "title": "Schemaregistryurl" } }, "required": [ @@ -519,21 +575,29 @@ "type": "object" }, "ProducerValues": { + "additionalProperties": true, "description": "Settings specific to producers.", "properties": { "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/ProducerStreamsConfig" + "$ref": "#/$defs/ProducerStreamsConfig" } ], - "description": "Kafka Streams settings", - "title": "Streams" + "description": "Kafka Streams settings" } }, "required": [ @@ -546,16 +610,32 @@ "description": "Authorisation-related flags for `helm repo`.", "properties": { "ca_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers", - "format": "path", - "title": "Ca File", - "type": "string" + "title": "Ca File" }, "cert_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Path to SSL certificate file to identify HTTPS client", - "format": "path", - "title": "Cert File", - "type": "string" + "title": "Cert File" }, "insecure_skip_tls_verify": { "default": false, @@ -564,37 +644,57 @@ "type": "boolean" }, "password": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Password", - "title": "Password", - "type": "string" + "title": "Password" }, "username": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Username", - "title": "Username", - "type": "string" + "title": "Username" } }, "title": "RepoAuthFlags", "type": "object" }, "StreamsApp": { + "additionalProperties": true, "description": "StreamsApp component that configures a streams bootstrap app.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/StreamsAppConfig" + "$ref": "#/$defs/StreamsAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -617,7 +717,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -631,32 +731,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "streams-app", - "description": "StreamsApp component that configures a streams bootstrap app.", - "enum": [ - "streams-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -668,6 +768,7 @@ "type": "object" }, "StreamsAppAutoScaling": { + "additionalProperties": true, "description": "Kubernetes Event-driven Autoscaling config.", "properties": { "consumerGroup": { @@ -683,13 +784,22 @@ }, "enabled": { "default": false, + "description": "", "title": "Enabled", "type": "boolean" }, "idleReplicas": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", - "title": "Idle replica count", - "type": "integer" + "title": "Idle replica count" }, "lagThreshold": { "description": "Average target value to trigger scaling actions.", @@ -738,30 +848,41 @@ "type": "object" }, "StreamsAppConfig": { + "additionalProperties": true, "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/StreamsAppAutoScaling" + }, { - "$ref": "#/definitions/StreamsAppAutoScaling" + "type": "null" } ], - "description": "Kubernetes Event-driven Autoscaling config", - "title": "Autoscaling" + "default": null, + "description": "Kubernetes Event-driven Autoscaling config" }, "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/StreamsConfig" + "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section", - "title": "Streams" + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -771,6 +892,7 @@ "type": "object" }, "StreamsConfig": { + "additionalProperties": true, "description": "Streams Bootstrap streams section.", "properties": { "brokers": { @@ -779,18 +901,23 @@ "type": "string" }, "config": { - "additionalProperties": { - "type": "string" - }, "default": {}, "description": "Configuration", "title": "Config", "type": "object" }, "errorTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Error topic", - "title": "Errortopic", - "type": "string" + "title": "Errortopic" }, "extraInputPatterns": { "additionalProperties": { @@ -823,9 +950,17 @@ "type": "object" }, "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Input pattern", - "title": "Inputpattern", - "type": "string" + "title": "Inputpattern" }, "inputTopics": { "default": [], @@ -837,14 +972,30 @@ "type": "array" }, "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Output topic", - "title": "Outputtopic", - "type": "string" + "title": "Outputtopic" }, "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" + "title": "Schemaregistryurl" } }, "required": [ @@ -854,6 +1005,7 @@ "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics.", "properties": { "models": { @@ -867,7 +1019,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -899,38 +1051,82 @@ "type": "object" }, "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Key schema class name", - "title": "Key schema", - "type": "string" + "title": "Key schema" }, "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Number of partitions into which the topic is divided", - "title": "Partitions count", - "type": "integer" + "title": "Partitions count" }, "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Replication factor of the topic", - "title": "Replication factor", - "type": "integer" + "title": "Replication factor" }, "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, { - "$ref": "#/definitions/OutputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type", "title": "Topic type" }, "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Value schema class name", - "title": "Value schema", - "type": "string" + "title": "Value schema" } }, "title": "TopicConfig", @@ -940,32 +1136,32 @@ "items": { "discriminator": { "mapping": { - "helm-app": "#/definitions/HelmApp", - "kafka-sink-connector": "#/definitions/KafkaSinkConnector", - "kafka-source-connector": "#/definitions/KafkaSourceConnector", - "producer-app": "#/definitions/ProducerApp", - "streams-app": "#/definitions/StreamsApp" + "helm-app": "#/$defs/HelmApp", + "kafka-sink-connector": "#/$defs/KafkaSinkConnector", + "kafka-source-connector": "#/$defs/KafkaSourceConnector", + "producer-app": "#/$defs/ProducerApp", + "streams-app": "#/$defs/StreamsApp" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/definitions/HelmApp" + "$ref": "#/$defs/HelmApp" }, { - "$ref": "#/definitions/KafkaSinkConnector" + "$ref": "#/$defs/KafkaSinkConnector" }, { - "$ref": "#/definitions/KafkaSourceConnector" + "$ref": "#/$defs/KafkaSourceConnector" }, { - "$ref": "#/definitions/ProducerApp" + "$ref": "#/$defs/ProducerApp" }, { - "$ref": "#/definitions/StreamsApp" + "$ref": "#/$defs/StreamsApp" } ] }, - "title": "KPOps pipeline schema", + "title": "PipelineSchema", "type": "array" } diff --git a/docs/docs/user/core-concepts/variables/environment_variables.md b/docs/docs/user/core-concepts/variables/environment_variables.md index 2a57aabea..35ca235d7 100644 --- a/docs/docs/user/core-concepts/variables/environment_variables.md +++ b/docs/docs/user/core-concepts/variables/environment_variables.md @@ -6,10 +6,7 @@ Environment variables can be set by using the [export](https://www.unix.com/man- !!! tip "dotenv files" - Support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20), - but not implemented in KPOps yet. One of the possible ways to still - use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. - This would work in `bash` suppose there are no spaces inside the values. + KPOps currently supports `.env` files only for variables related to the [config](../config.md). Full support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20). One of the possible ways to use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. This would work in `bash` suppose there are no spaces inside the values. diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 100f05c4a..cb9b2ff5b 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -40,6 +40,7 @@ $ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -66,6 +67,7 @@ $ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -92,6 +94,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -118,6 +121,7 @@ $ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--template / --no-template`: Run Helm template [default: no-template] @@ -144,6 +148,7 @@ $ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index c288a66b9..c6ef09c16 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -82,39 +82,40 @@ nav: - Home: KPOps Documentation: index.md - User Guide: - - What is KPOps: user/what-is-kpops.md - - Changelog: user/changelog.md - - Getting Started: - - Setup: user/getting-started/setup.md - - Quick start: user/getting-started/quick-start.md - - Teardown: user/getting-started/teardown.md - - Examples: - - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md - - Core Concepts: - - Components: - - Overview: user/core-concepts/components/overview.md - - KubernetesApp: user/core-concepts/components/kubernetes-app.md - - KafkaApp: user/core-concepts/components/kafka-app.md - - StreamsApp: user/core-concepts/components/streams-app.md - - ProducerApp: user/core-concepts/components/producer-app.md - - KafkaConnector: user/core-concepts/components/kafka-connector.md - - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md - - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md - - Config: user/core-concepts/config.md - - Defaults: user/core-concepts/defaults.md - - Variables: - - Environment variables: user/core-concepts/variables/environment_variables.md - - Substitution: user/core-concepts/variables/substitution.md - - References: - - Migration guide: - - Migrate from v1 to v2: user/migration-guide/v1-v2.md - - Migrate from v2 to v3: user/migration-guide/v2-v3.md - - CLI usage: user/references/cli-commands.md - - Editor integration: user/references/editor-integration.md - - CI integration: - - GitHub Actions: user/references/ci-integration/github-actions.md + - What is KPOps: user/what-is-kpops.md + - Changelog: user/changelog.md + - Getting Started: + - Setup: user/getting-started/setup.md + - Quick start: user/getting-started/quick-start.md + - Teardown: user/getting-started/teardown.md + - Examples: + - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md + - Core Concepts: + - Components: + - Overview: user/core-concepts/components/overview.md + - KubernetesApp: user/core-concepts/components/kubernetes-app.md + - HelmApp: user/core-concepts/components/helm-app.md + - KafkaApp: user/core-concepts/components/kafka-app.md + - StreamsApp: user/core-concepts/components/streams-app.md + - ProducerApp: user/core-concepts/components/producer-app.md + - KafkaConnector: user/core-concepts/components/kafka-connector.md + - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md + - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md + - Config: user/core-concepts/config.md + - Defaults: user/core-concepts/defaults.md + - Variables: + - Environment variables: user/core-concepts/variables/environment_variables.md + - Substitution: user/core-concepts/variables/substitution.md + - References: + - Migration guide: + - Migrate from v1 to v2: user/migration-guide/v1-v2.md + - Migrate from v2 to v3: user/migration-guide/v2-v3.md + - CLI usage: user/references/cli-commands.md + - Editor integration: user/references/editor-integration.md + - CI integration: + - GitHub Actions: user/references/ci-integration/github-actions.md - Developer Guide: - - Getting Started: developer/getting-started.md - - Contributing: developer/contributing.md - - Code base: - - Auto generation: developer/auto-generation.md + - Getting Started: developer/getting-started.md + - Contributing: developer/contributing.md + - Code base: + - Auto generation: developer/auto-generation.md diff --git a/hooks/gen_docs/gen_docs_cli_usage.py b/hooks/gen_docs/gen_docs_cli_usage.py index 25f7ecd8c..84476e69f 100644 --- a/hooks/gen_docs/gen_docs_cli_usage.py +++ b/hooks/gen_docs/gen_docs_cli_usage.py @@ -7,7 +7,7 @@ PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py" PATH_CLI_COMMANDS_DOC = ROOT / "docs/docs/user/references/cli-commands.md" -# TODO(@sujuka99): try to use typer_cli.main.docs here instead +# TODO(Ivan Yordanov): try to use typer_cli.main.docs here instead # https://github.com/bakdata/kpops/issues/297 if __name__ == "__main__": diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 6fb78f767..203294c05 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -40,11 +40,12 @@ ).type for component in KPOPS_COMPONENTS } + KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name - for field_name, model in component.__fields__.items() - if not model.field_info.exclude + for field_name, field_info in component.model_fields.items() + if not field_info.exclude ] for component in KPOPS_COMPONENTS } diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 3a2bd5587..30a7e15bf 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -2,17 +2,20 @@ import csv import shutil -from collections.abc import Callable, Iterator +from collections.abc import Callable +from contextlib import suppress from dataclasses import dataclass from pathlib import Path from textwrap import fill from typing import Any -from pydantic import BaseSettings -from pydantic.fields import ModelField +from pydantic import BaseModel +from pydantic_core import PydanticUndefined from pytablewriter import MarkdownTableWriter from typer.models import ArgumentInfo, OptionInfo +from kpops.utils.dict_ops import generate_substitution + try: from typing import Self except ImportError: @@ -127,7 +130,7 @@ def csv_append_env_var( width=68, ) required = False - if default_value == Ellipsis: + if default_value in [Ellipsis, PydanticUndefined]: required = True default_value = "" elif default_value is None: @@ -254,35 +257,60 @@ def fill_csv_pipeline_config(target: Path) -> None: :param target: The path to the `.csv` file. Note that it must already contain the column names """ - for field in collect_fields(KpopsConfig): - field_info = KpopsConfig.Config.get_field_info(field.name) + for (field_name, field_value), env_var_name in zip( + generate_substitution(collect_fields(KpopsConfig), separator=".").items(), + generate_substitution(collect_fields(KpopsConfig), separator="__").keys(), + strict=True, + ): + with suppress(KeyError): # In case the prefix is ever removed from KpopsConfig + env_var_name = KpopsConfig.model_config["env_prefix"] + env_var_name field_description: str = ( - field.field_info.description + field_value.description or "No description available, please refer to the pipeline config documentation." ) - field_default = field.field_info.default - if config_env_var := field_info.get( - "env", - ) or field.field_info.extra.get("env"): - csv_append_env_var( - target, - config_env_var, - field_default, - field_description, - field.name, - ) + field_default = field_value.default + csv_append_env_var( + target, + env_var_name.upper(), + field_default, + field_description, + field_name, + ) -def collect_fields(settings: type[BaseSettings]) -> Iterator[ModelField]: - """Collect and yield all fields in a settings class. +def collect_fields(model: type[BaseModel]) -> dict[str, Any]: + """Collect and return a ``dict`` of all fields in a settings class. :param model: settings class - :yield: all settings including nested ones in settings classes + :return: ``dict`` of all fields in a settings class """ - for field in settings.__fields__.values(): - if issubclass(field_type := field.type_, BaseSettings): - yield from collect_fields(field_type) - yield field + + def patched_issubclass_of_basemodel(cls): + """Pydantic breaks issubclass. + + ``issubclass(set[str], set) # True`` + ``issubclass(BaseSettings, BaseModel) # True`` + ``issubclass(set[str], BaseModel) # raises exception`` + + :param cls: class to check + :return: Whether cls is subclass of ``BaseModel`` + """ + try: + return issubclass(cls, BaseModel) + except TypeError as e: + if str(e) == "issubclass() arg 1 must be a class": + return False + raise + + seen_fields = {} + for field_name, field_value in model.model_fields.items(): + if field_value.annotation and patched_issubclass_of_basemodel( + field_value.annotation + ): + seen_fields[field_name] = collect_fields(field_value.annotation) + else: + seen_fields[field_name] = field_value + return seen_fields def fill_csv_cli(target: Path) -> None: diff --git a/kpops/cli/main.py b/kpops/cli/main.py index e4331113e..0aa0d5f67 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -21,6 +21,7 @@ from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema +from kpops.utils.pydantic import YamlConfigSettingsSource if TYPE_CHECKING: from collections.abc import Iterator @@ -31,6 +32,18 @@ app = dtyper.Typer(pretty_exceptions_enable=False) +DOTENV_PATH_OPTION: Optional[list[Path]] = typer.Option( + default=None, + exists=True, + dir_okay=False, + file_okay=True, + envvar=f"{ENV_PREFIX}DOTENV_PATH", + help=( + "Path to dotenv file. Multiple files can be provided. " + "The files will be loaded in order, with each file overriding the previous one." + ), +) + BASE_DIR_PATH_OPTION: Path = typer.Option( default=Path(), exists=True, @@ -194,14 +207,16 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_kpops_config( - config: Path, defaults: Optional[Path], verbose: bool + config: Path, defaults: Optional[Path], verbose: bool, dotenv: Optional[list[Path]] ) -> KpopsConfig: setup_logging_level(verbose) - KpopsConfig.Config.config_path = config + YamlConfigSettingsSource.path_to_config = config + kpops_config = KpopsConfig( + _env_file=dotenv # pyright: ignore[reportGeneralTypeIssues] + ) if defaults: - kpops_config = KpopsConfig(defaults_path=defaults) + kpops_config.defaults_path = defaults else: - kpops_config = KpopsConfig() kpops_config.defaults_path = config.parent / kpops_config.defaults_path return kpops_config @@ -243,6 +258,7 @@ def generate( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, template: bool = typer.Option(False, help="Run Helm template"), @@ -250,7 +266,7 @@ def generate( filter_type: FilterType = FILTER_TYPE, verbose: bool = VERBOSE_OPTION, ) -> Pipeline: - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -276,6 +292,7 @@ def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -283,7 +300,7 @@ def deploy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -299,6 +316,7 @@ def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -306,7 +324,7 @@ def destroy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -321,6 +339,7 @@ def reset( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -328,7 +347,7 @@ def reset( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -344,6 +363,7 @@ def clean( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -351,7 +371,7 @@ def clean( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index af21abb3f..0a155bb0d 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -3,23 +3,23 @@ from pathlib import Path import yaml -from pydantic import BaseConfig, BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.exception import ParseError from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class HelmDiffConfig(BaseModel): ignore: set[str] = Field( default_factory=set, description="Set of keys that should not be checked.", - example="- name\n- imageTag", + examples=["- name\n- imageTag"], ) -class RepoAuthFlags(BaseModel): +class RepoAuthFlags(DescConfigModel): """Authorisation-related flags for `helm repo`. :param username: Username, defaults to None @@ -46,9 +46,6 @@ class RepoAuthFlags(BaseModel): default=False, description=describe_attr("insecure_skip_tls_verify", __doc__) ) - class Config(DescConfig): - pass - def to_command(self) -> list[str]: command: list[str] = [] if self.username: @@ -64,7 +61,7 @@ def to_command(self) -> list[str]: return command -class HelmRepoConfig(BaseModel): +class HelmRepoConfig(DescConfigModel): """Helm repository configuration. :param repository_name: Name of the Helm repository @@ -80,11 +77,8 @@ class HelmRepoConfig(BaseModel): default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__) ) - class Config(DescConfig): - pass - -class HelmConfig(BaseModel): +class HelmConfig(DescConfigModel): """Global Helm configuration. :param context: Name of kubeconfig context (`--kube-context`) @@ -95,7 +89,7 @@ class HelmConfig(BaseModel): context: str | None = Field( default=None, description=describe_attr("context", __doc__), - example="dev-storage", + examples=["dev-storage"], ) debug: bool = Field( default=False, @@ -107,9 +101,6 @@ class HelmConfig(BaseModel): description=describe_attr("api_version", __doc__), ) - class Config(DescConfig): - pass - class HelmFlags(RepoAuthFlags): set_file: dict[str, Path] = Field(default_factory=dict) @@ -120,8 +111,9 @@ class HelmFlags(RepoAuthFlags): wait: bool = True wait_for_jobs: bool = False - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) @override def to_command(self) -> list[str]: diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 07bbf38e6..06f21eff2 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -46,10 +46,10 @@ def create_connector( :param connector_config: The config of the connector :return: The current connector info if successful. """ - config_json = connector_config.dict() + config_json = connector_config.model_dump() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self.url}/connectors", headers=HEADERS, json=connect_data + url=f"{self.url}connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -66,14 +66,12 @@ def create_connector( def get_connector(self, connector_name: str) -> KafkaConnectResponse: """Get information about the connector. - API Reference: - https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) - + API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. """ response = httpx.get( - url=f"{self.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -102,9 +100,9 @@ def update_connector_config( :return: Information about the connector after the change has been made. """ connector_name = connector_config.name - config_json = connector_config.dict() + config_json = connector_config.model_dump() response = httpx.put( - url=f"{self.url}/connectors/{connector_name}/config", + url=f"{self.url}connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -135,9 +133,9 @@ def validate_connector_config( :return: List of all found errors """ response = httpx.put( - url=f"{self.url}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self.url}connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, - json=connector_config.dict(), + json=connector_config.model_dump(), ) if response.status_code == httpx.codes.OK: @@ -165,7 +163,7 @@ def delete_connector(self, connector_name: str) -> None: :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( - url=f"{self.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 744662796..fb644dd7a 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -97,14 +97,14 @@ def __dry_run_connector_creation( connector = self._connect_wrapper.get_connector(connector_name) log.info(f"Connector Creation: connector {connector_name} already exists.") - if diff := render_diff(connector.config, connector_config.dict()): + if diff := render_diff(connector.config, connector_config.model_dump()): log.info(f"Updating config:\n{diff}") - log.debug(connector_config.dict()) + log.debug(connector_config.model_dump()) log.debug(f"PUT /connectors/{connector_name}/config HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.url}") except ConnectorNotFoundException: - diff = render_diff({}, connector_config.dict()) + diff = render_diff({}, connector_config.model_dump()) log.info( f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" ) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index e83e33e5d..a7ec45af9 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,10 +1,23 @@ from enum import Enum from typing import Any, Literal -from pydantic import BaseConfig, BaseModel, Extra, Field, validator +from pydantic import ( + BaseModel, + ConfigDict, + SerializationInfo, + field_validator, + model_serializer, +) +from pydantic.json_schema import SkipJsonSchema from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfig, DescConfig, to_dot +from kpops.utils.pydantic import ( + CamelCaseConfigModel, + DescConfigModel, + by_alias, + exclude_by_value, + to_dot, +) class KafkaConnectorType(str, Enum): @@ -12,23 +25,27 @@ class KafkaConnectorType(str, Enum): SOURCE = "source" -class KafkaConnectorConfig(BaseModel): +class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors.""" connector_class: str - name: str = Field(default=..., hidden_from_schema=True) + name: SkipJsonSchema[str] - class Config(DescConfig): - extra = Extra.allow - alias_generator = to_dot - - @override - @classmethod - def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: - super().schema_extra(schema, model) - schema["additionalProperties"] = {"type": "string"} - - @validator("connector_class") + @override + @staticmethod + def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: + super(KafkaConnectorConfig, KafkaConnectorConfig).json_schema_extra( + schema, model + ) + schema["additional_properties"] = {"type": "string"} + + model_config = ConfigDict( + extra="allow", + alias_generator=to_dot, + json_schema_extra=json_schema_extra, + ) + + @field_validator("connector_class") def connector_class_must_contain_dot(cls, connector_class: str) -> str: if "." not in connector_class: msg = f"Invalid connector class {connector_class}" @@ -39,9 +56,11 @@ def connector_class_must_contain_dot(cls, connector_class: str) -> str: def class_name(self) -> str: return self.connector_class.split(".")[-1] - @override - def dict(self, **_) -> dict[str, Any]: - return super().dict(by_alias=True, exclude_none=True) + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + result = exclude_by_value(handler(self), None) + return {by_alias(self, name): value for name, value in result.items()} class ConnectorTask(BaseModel): @@ -53,10 +72,9 @@ class KafkaConnectResponse(BaseModel): name: str config: dict[str, str] tasks: list[ConnectorTask] - type: str | None + type: str | None = None - class Config(BaseConfig): - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class KafkaConnectConfigError(BaseModel): @@ -74,24 +92,21 @@ class KafkaConnectConfigErrorResponse(BaseModel): configs: list[KafkaConnectConfigDescription] -class KafkaConnectResetterConfig(BaseModel): +class KafkaConnectResetterConfig(CamelCaseConfigModel): brokers: str connector: str delete_consumer_group: bool | None = None offset_topic: str | None = None - class Config(CamelCaseConfig): - pass - -class KafkaConnectResetterValues(BaseModel): +class KafkaConnectResetterValues(CamelCaseConfigModel): connector_type: Literal["source", "sink"] config: KafkaConnectResetterConfig name_override: str - class Config(CamelCaseConfig): - pass - + # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` + # BEWARE! All default values are enforced, hard to replicate without + # access to ``model_dump`` @override - def dict(self, **_) -> dict[str, Any]: - return super().dict(by_alias=True, exclude_none=True) + def model_dump(self, **_) -> dict[str, Any]: + return super().model_dump(by_alias=True, exclude_none=True) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index e4eba9931..fae2da0e7 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -30,7 +30,7 @@ def __init__( components_module: str | None, ) -> None: self.schema_registry_client = SchemaRegistryClient( - kpops_config.schema_registry.url + str(kpops_config.schema_registry.url) ) self.components_module = components_module diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 8f6b198d3..9a08e5512 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -131,7 +131,7 @@ def __dry_run_topic_creation( log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") log.debug(f"Host: {self.proxy_wrapper.url}") log.debug(HEADERS) - log.debug(topic_spec.dict()) + log.debug(topic_spec.model_dump()) @staticmethod def __check_partition_count( @@ -203,7 +203,7 @@ def __prepare_body(cls, topic_name: str, topic_config: TopicConfig) -> TopicSpec :param topic_config: The topic config :return: Topic specification """ - topic_spec_json: dict = topic_config.dict( + topic_spec_json: dict = topic_config.model_dump( include={ "partitions_count": True, "replication_factor": True, diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index b58445f81..5c0cf024d 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -1,13 +1,14 @@ from enum import Enum +from typing import Any -from pydantic import BaseConfig, BaseModel, Extra +from pydantic import BaseModel, ConfigDict class TopicSpec(BaseModel): topic_name: str - partitions_count: int | None - replication_factor: int | None - configs: list[dict[str, str]] | None + partitions_count: int | None = None + replication_factor: int | None = None + configs: list[dict[str, Any]] | None = None class TopicResponse(BaseModel): @@ -43,8 +44,9 @@ class KafkaTopicConfigSynonyms(BaseModel): value: str source: KafkaTopicConfigSource - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaTopicConfig(BaseModel): @@ -53,15 +55,17 @@ class KafkaTopicConfig(BaseModel): value: str name: str - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class TopicConfigResponse(BaseModel): data: list[KafkaTopicConfig] - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaBrokerConfigSource(str, Enum): @@ -75,8 +79,9 @@ class KafkaBrokerConfigSynonyms(BaseModel): value: str | None source: KafkaBrokerConfigSource - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaBrokerConfig(BaseModel): @@ -85,12 +90,14 @@ class KafkaBrokerConfig(BaseModel): value: str | None name: str - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class BrokerConfigResponse(BaseModel): data: list[KafkaBrokerConfig] - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index a80205506..aa1db6283 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -46,7 +46,7 @@ def cluster_id(self) -> str: :raises KafkaRestProxyError: Kafka REST proxy error :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._config.url}/v3/clusters") + response = httpx.get(url=f"{self._config.url!s}v3/clusters") if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -67,9 +67,9 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics", headers=HEADERS, - json=topic_spec.dict(exclude_none=True), + json=topic_spec.model_dump(exclude_none=True), ) if response.status_code == httpx.codes.CREATED: log.info(f"Topic {topic_spec.topic_name} created.") @@ -88,7 +88,7 @@ def delete_topic(self, topic_name: str) -> None: :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.delete( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: @@ -109,7 +109,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: :return: Response of the get topic API. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.OK: @@ -139,7 +139,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: :return: The topic configuration. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", headers=HEADERS, ) @@ -169,7 +169,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": json_body}, ) @@ -189,7 +189,7 @@ def get_broker_config(self) -> BrokerConfigResponse: :return: The broker configuration. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/brokers/-/configs", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/brokers/-/configs", headers=HEADERS, ) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 73bf54c7c..293d17dcc 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -8,7 +8,8 @@ from typing import TypeVar import typer -from pydantic import BaseModel, Field +from pydantic import AliasChoices, ConfigDict, Field +from pydantic.json_schema import SkipJsonSchema from kpops.component_handlers import ComponentHandlers from kpops.config import KpopsConfig @@ -16,7 +17,7 @@ from kpops.utils.dict_ops import update_nested from kpops.utils.docstring import describe_attr from kpops.utils.environment import ENV -from kpops.utils.pydantic import DescConfig, to_dash +from kpops.utils.pydantic import DescConfigModel, to_dash from kpops.utils.yaml_loading import load_yaml_file try: @@ -27,7 +28,7 @@ log = logging.getLogger("BaseDefaultsComponent") -class BaseDefaultsComponent(BaseModel, ABC): +class BaseDefaultsComponent(DescConfigModel, ABC): """Base for all components, handles defaults. Component defaults are usually provided in a yaml file called @@ -40,36 +41,33 @@ class BaseDefaultsComponent(BaseModel, ABC): :param validate: Whether to run custom validation on the component, defaults to True """ - enrich: bool = Field( + model_config = ConfigDict( + arbitrary_types_allowed=True, + ignored_types=(cached_property, cached_classproperty), + ) + + enrich: SkipJsonSchema[bool] = Field( default=False, description=describe_attr("enrich", __doc__), exclude=True, - hidden_from_schema=True, ) - config: KpopsConfig = Field( + config: SkipJsonSchema[KpopsConfig] = Field( default=..., description=describe_attr("config", __doc__), exclude=True, - hidden_from_schema=True, ) - handlers: ComponentHandlers = Field( + handlers: SkipJsonSchema[ComponentHandlers] = Field( default=..., description=describe_attr("handlers", __doc__), exclude=True, - hidden_from_schema=True, ) - validate_: bool = Field( - alias="validate", + validate_: SkipJsonSchema[bool] = Field( + validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), exclude=True, - hidden_from_schema=True, ) - class Config(DescConfig): - arbitrary_types_allowed = True - keep_untouched = (cached_property, cached_classproperty) - def __init__(self, **kwargs) -> None: if kwargs.get("enrich", True): kwargs = self.extend_with_defaults(**kwargs) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index f98abd648..5d70bacfd 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -4,7 +4,7 @@ from functools import cached_property from typing import Any -from pydantic import Field +from pydantic import Field, SerializationInfo, model_serializer from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -19,6 +19,7 @@ from kpops.components.base_components.kubernetes_app import KubernetesApp from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr +from kpops.utils.pydantic import exclude_by_name log = logging.getLogger("HelmApp") @@ -79,7 +80,9 @@ def helm_chart(self) -> str: @property def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} + auth_flags = ( + self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} + ) return HelmFlags( **auth_flags, version=self.version, @@ -90,7 +93,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command.""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -108,7 +111,7 @@ def template(self) -> None: @property def deploy_flags(self) -> HelmUpgradeInstallFlags: """Return flags for Helm upgrade install command.""" - return HelmUpgradeInstallFlags(**self.helm_flags.dict()) + return HelmUpgradeInstallFlags(**self.helm_flags.model_dump()) @override def deploy(self, dry_run: bool) -> None: @@ -139,7 +142,9 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) + return self.app.model_dump( + by_alias=True, exclude_none=True, exclude_defaults=True + ) def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component. @@ -156,11 +161,8 @@ def print_helm_diff(self, stdout: str) -> None: new_release = Helm.load_manifest(stdout) self.helm_diff.log_helm_diff(log, current_release, new_release) - @override - def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: - # HACK: workaround for Pydantic to exclude cached properties during model export - if exclude is None: - exclude = set() - exclude.add("helm") - exclude.add("helm_diff") - return super().dict(exclude=exclude, **kwargs) + # HACK: workaround for Pydantic to exclude cached properties during model export + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + return exclude_by_name(handler(self), "helm", "helm_diff") diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index cf8e5f4ef..b62e54bab 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -3,7 +3,7 @@ import logging from abc import ABC -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( @@ -14,12 +14,12 @@ from kpops.components.base_components.helm_app import HelmApp from kpops.components.base_components.kubernetes_app import KubernetesAppConfig from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KafkaApp") -class KafkaStreamsConfig(BaseModel): +class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): """Kafka Streams config. :param brokers: Brokers @@ -31,8 +31,9 @@ class KafkaStreamsConfig(BaseModel): default=None, description=describe_attr("schema_registry_url", __doc__) ) - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaAppConfig(KubernetesAppConfig): diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 8f0163025..7af2c5ae4 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,9 +3,9 @@ import logging from abc import ABC from functools import cached_property -from typing import Any, NoReturn +from typing import NoReturn -from pydantic import Field, validator +from pydantic import Field, PrivateAttr, ValidationInfo, field_validator from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -46,7 +46,6 @@ class KafkaConnector(PipelineComponent, ABC): :param version: Helm chart version, defaults to "1.0.4" :param resetter_values: Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc., defaults to dict - :param _connector_type: Defines the type of the connector (Source or Sink) """ namespace: str = Field( @@ -71,24 +70,24 @@ class KafkaConnector(PipelineComponent, ABC): default_factory=dict, description=describe_attr("resetter_values", __doc__), ) + _connector_type: KafkaConnectorType = PrivateAttr() - _connector_type: KafkaConnectorType = Field(default=..., hidden_from_schema=True) - - @validator("app", pre=True) + @field_validator("app", mode="before") + @classmethod def connector_config_should_have_component_name( cls, app: KafkaConnectorConfig | dict[str, str], - values: dict[str, Any], - ) -> dict[str, str]: + info: ValidationInfo, + ) -> KafkaConnectorConfig: if isinstance(app, KafkaConnectorConfig): - app = app.dict() - component_name = values["prefix"] + values["name"] + app = app.model_dump() + component_name: str = info.data["prefix"] + info.data["name"] connector_name: str | None = app.get("name") if connector_name is not None and connector_name != component_name: msg = f"Connector name '{connector_name}' should be the same as component name '{component_name}'" raise ValueError(msg) app["name"] = component_name - return app + return KafkaConnectorConfig(**app) @cached_property def helm(self) -> Helm: @@ -121,7 +120,7 @@ def dry_run_handler(self) -> DryRunHandler: def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" return HelmFlags( - **self.repo_config.repo_auth_flags.dict(), + **self.repo_config.repo_auth_flags.model_dump(), version=self.version, create_namespace=self.config.create_namespace, ) @@ -130,7 +129,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command.""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -246,7 +245,7 @@ def _get_kafka_connect_resetter_values( ), connector_type=self._connector_type.value, name_override=self.full_name, - ).dict(), + ).model_dump(), **self.resetter_values, } @@ -276,7 +275,7 @@ class KafkaSourceConnector(KafkaConnector): description=describe_attr("offset_topic", __doc__), ) - _connector_type = KafkaConnectorType.SOURCE + _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SOURCE) @override def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: @@ -321,7 +320,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None: class KafkaSinkConnector(KafkaConnector): """Kafka sink connector model.""" - _connector_type = KafkaConnectorType.SINK + _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SINK) @override def add_input_topics(self, topics: list[str]) -> None: diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 4b4e24c1a..cae474cee 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -4,12 +4,12 @@ import re from abc import ABC -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KubernetesApp") @@ -18,11 +18,12 @@ ) -class KubernetesAppConfig(BaseModel): +class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): """Settings specific to Kubernetes apps.""" - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KubernetesApp(PipelineComponent, ABC): diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 153133639..5f1dae193 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,11 +1,11 @@ from enum import Enum from typing import Any, NewType -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import ConfigDict, Field, model_validator from kpops.components.base_components.models import TopicName from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class InputTopicTypes(str, Enum): @@ -18,7 +18,7 @@ class InputTopicTypes(str, Enum): PATTERN = "pattern" -class FromTopic(BaseModel): +class FromTopic(DescConfigModel): """Input topic. :param type: Topic type, defaults to None @@ -31,23 +31,24 @@ class FromTopic(BaseModel): ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) - class Config(DescConfig): - extra = Extra.forbid - use_enum_values = True + model_config = ConfigDict( + extra="forbid", + use_enum_values=True, + ) - @root_validator - def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + def extra_topic_role(self) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" - if values["type"] == InputTopicTypes.INPUT and values["role"]: + if self.type == InputTopicTypes.INPUT and self.role: msg = "Define role only if `type` is `pattern` or `None`" raise ValueError(msg) - return values + return self ComponentName = NewType("ComponentName", str) -class FromSection(BaseModel): +class FromSection(DescConfigModel): """Holds multiple input topics. :param topics: Input topics @@ -63,5 +64,6 @@ class FromSection(BaseModel): description=describe_attr("components", __doc__), ) - class Config(DescConfig): - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 03f1d7141..56da461c8 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -1,11 +1,11 @@ from enum import Enum from typing import Any -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import ConfigDict, Field, model_validator from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class OutputTopicTypes(str, Enum): @@ -18,7 +18,7 @@ class OutputTopicTypes(str, Enum): ERROR = "error" -class TopicConfig(BaseModel): +class TopicConfig(DescConfigModel): """Configure an output topic. :param type: Topic type @@ -58,21 +58,22 @@ class TopicConfig(BaseModel): ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) - class Config(DescConfig): - extra = Extra.forbid - allow_population_by_field_name = True - use_enum_values = True + model_config = ConfigDict( + extra="forbid", + use_enum_values=True, + populate_by_name=True, + ) - @root_validator - def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + def extra_topic_role(self) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" - if values["type"] and values["role"]: + if self.type and self.role: msg = "Define `role` only if `type` is undefined" raise ValueError(msg) - return values + return self -class ToSection(BaseModel): +class ToSection(DescConfigModel): """Holds multiple output topics. :param topics: Output topics @@ -86,5 +87,6 @@ class ToSection(BaseModel): default={}, description=describe_attr("models", __doc__) ) - class Config(DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="forbid", + ) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index d05d4d4c1..7be212300 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -2,7 +2,7 @@ from abc import ABC -from pydantic import Extra, Field +from pydantic import AliasChoices, ConfigDict, Field from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, @@ -18,7 +18,6 @@ ToSection, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig class PipelineComponent(BaseDefaultsComponent, ABC): @@ -41,7 +40,8 @@ class PipelineComponent(BaseDefaultsComponent, ABC): ) from_: FromSection | None = Field( default=None, - alias="from", + serialization_alias="from", + validation_alias=AliasChoices("from", "from_"), title="From", description=describe_attr("from_", __doc__), ) @@ -50,8 +50,9 @@ class PipelineComponent(BaseDefaultsComponent, ABC): description=describe_attr("to", __doc__), ) - class Config(DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) def __init__(self, **kwargs) -> None: super().__init__(**kwargs) diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 8af1a68c6..01bda1dbc 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,4 +1,4 @@ -from pydantic import BaseConfig, Extra, Field +from pydantic import ConfigDict, Field from kpops.components.base_components.kafka_app import ( KafkaAppConfig, @@ -32,5 +32,4 @@ class ProducerValues(KafkaAppConfig): default=..., description=describe_attr("streams", __doc__) ) - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict(extra="allow") diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index ca2db77ae..2c8b952ce 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,8 +1,7 @@ -from collections.abc import Mapping, Set +from collections.abc import Callable from typing import Any -from pydantic import BaseConfig, BaseModel, Extra, Field -from typing_extensions import override +from pydantic import ConfigDict, Field, SerializationInfo, model_serializer from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( @@ -10,7 +9,12 @@ KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import ( + CamelCaseConfigModel, + DescConfigModel, + exclude_by_value, + exclude_defaults, +) class StreamsConfig(KafkaStreamsConfig): @@ -47,7 +51,7 @@ class StreamsConfig(KafkaStreamsConfig): error_topic: str | None = Field( default=None, description=describe_attr("error_topic", __doc__) ) - config: dict[str, str] = Field( + config: dict[str, Any] = Field( default={}, description=describe_attr("config", __doc__) ) @@ -72,40 +76,15 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) - @override - def dict( - self, - *, - include: None | Set[int | str] | Mapping[int | str, Any] = None, - exclude: None | Set[int | str] | Mapping[int | str, Any] = None, - by_alias: bool = False, - skip_defaults: bool | None = None, - exclude_unset: bool = False, - **kwargs, - ) -> dict: - """Generate a dictionary representation of the model. - - Optionally, specify which fields to include or exclude. - - :param include: Fields to include - :param include: Fields to exclude - :param by_alias: Use the fields' aliases in the dictionary - :param skip_defaults: Whether to skip defaults - :param exclude_unset: Whether to exclude unset fields - """ - return super().dict( - include=include, - exclude=exclude, - by_alias=by_alias, - skip_defaults=skip_defaults, - exclude_unset=exclude_unset, - # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values - exclude_defaults=True, - exclude_none=True, - ) + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution + @model_serializer(mode="wrap", when_used="always") + def serialize_model( + self, handler: Callable, info: SerializationInfo + ) -> dict[str, Any]: + return exclude_defaults(self, exclude_by_value(handler(self), None)) -class StreamsAppAutoScaling(BaseModel): +class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config. :param enabled: Whether to enable auto-scaling using KEDA., defaults to False @@ -184,9 +163,7 @@ class StreamsAppAutoScaling(BaseModel): default=[], description=describe_attr("topics", __doc__), ) - - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config = ConfigDict(extra="allow") class StreamsAppConfig(KafkaAppConfig): @@ -206,6 +183,4 @@ class StreamsAppConfig(KafkaAppConfig): default=None, description=describe_attr("autoscaling", __doc__), ) - - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict(extra="allow") diff --git a/kpops/config.py b/kpops/config.py index 718568fb6..850418d21 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -1,15 +1,18 @@ from __future__ import annotations -from collections.abc import Callable from pathlib import Path -from typing import Any -from pydantic import AnyHttpUrl, BaseConfig, BaseSettings, Field, parse_obj_as -from pydantic.env_settings import SettingsSourceCallable +from pydantic import AnyHttpUrl, Field, TypeAdapter +from pydantic_settings import ( + BaseSettings, + PydanticBaseSettingsSource, + SettingsConfigDict, +) +from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.pydantic import YamlConfigSettingsSource ENV_PREFIX = "KPOPS_" @@ -35,10 +38,7 @@ class SchemaRegistryConfig(BaseSettings): description="Whether the Schema Registry handler should be initialized.", ) url: AnyHttpUrl = Field( - # For validating URLs use parse_obj_as - # https://github.com/pydantic/pydantic/issues/1106 - default=parse_obj_as(AnyHttpUrl, "http://localhost:8081"), - env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8081"), description="Address of the Schema Registry.", ) @@ -47,8 +47,7 @@ class KafkaRestConfig(BaseSettings): """Configuration for Kafka REST Proxy.""" url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), - env=f"{ENV_PREFIX}KAFKA_REST_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8082"), description="Address of the Kafka REST Proxy.", ) @@ -57,8 +56,7 @@ class KafkaConnectConfig(BaseSettings): """Configuration for Kafka Connect.""" url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), - env=f"{ENV_PREFIX}KAFKA_CONNECT_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8083"), description="Address of Kafka Connect.", ) @@ -68,22 +66,25 @@ class KpopsConfig(BaseSettings): defaults_path: Path = Field( default=Path(), - example="defaults", + examples=["defaults", "."], description="The path to the folder containing the defaults.yaml file and the environment defaults files. " "Paths can either be absolute or relative to `config.yaml`", ) environment: str = Field( default=..., - env=f"{ENV_PREFIX}ENVIRONMENT", - example="development", + examples=[ + "development", + "production", + ], description="The environment you want to generate and deploy the pipeline to. " "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", ) kafka_brokers: str = Field( default=..., - env=f"{ENV_PREFIX}KAFKA_BROKERS", + examples=[ + "broker1:9092,broker2:9092,broker3:9092", + ], description="The comma separated Kafka brokers address.", - example="broker1:9092,broker2:9092,broker3:9092", ) defaults_filename_prefix: str = Field( default="defaults", @@ -107,7 +108,6 @@ class KpopsConfig(BaseSettings): ) timeout: int = Field( default=300, - env=f"{ENV_PREFIX}TIMEOUT", description="The timeout in seconds that specifies when actions like deletion or deploy timeout.", ) create_namespace: bool = Field( @@ -124,38 +124,25 @@ class KpopsConfig(BaseSettings): ) retain_clean_jobs: bool = Field( default=False, - env=f"{ENV_PREFIX}RETAIN_CLEAN_JOBS", description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - class Config(BaseConfig): - config_path = Path("config.yaml") - env_file = ".env" - env_file_encoding = "utf-8" - env_prefix = ENV_PREFIX - - @classmethod - def customise_sources( - cls, - init_settings: SettingsSourceCallable, - env_settings: SettingsSourceCallable, - file_secret_settings: SettingsSourceCallable, - ) -> tuple[ - SettingsSourceCallable | Callable[[KpopsConfig], dict[str, Any]], ... - ]: - return ( - env_settings, - init_settings, - yaml_config_settings_source, - file_secret_settings, - ) - - -def yaml_config_settings_source(settings: KpopsConfig) -> dict[str, Any]: - path_to_config = settings.Config.config_path - if path_to_config.exists(): - if isinstance(source := load_yaml_file(path_to_config), dict): - return source - err_msg = f"{path_to_config} must be a mapping." - raise TypeError(err_msg) - return {} + model_config = SettingsConfigDict(env_prefix=ENV_PREFIX, env_nested_delimiter="__") + + @override + @classmethod + def settings_customise_sources( + cls, + settings_cls: type[BaseSettings], + init_settings: PydanticBaseSettingsSource, + env_settings: PydanticBaseSettingsSource, + dotenv_settings: PydanticBaseSettingsSource, + file_secret_settings: PydanticBaseSettingsSource, + ): + return ( + env_settings, + init_settings, + YamlConfigSettingsSource(settings_cls), + dotenv_settings, + file_secret_settings, + ) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index cdfe14fa4..f4676105c 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING import yaml -from pydantic import BaseModel +from pydantic import BaseModel, SerializeAsAny from rich.console import Console from rich.syntax import Syntax @@ -38,7 +38,7 @@ class ValidationError(Exception): class PipelineComponents(BaseModel): """Stores the pipeline components.""" - components: list[PipelineComponent] = [] + components: list[SerializeAsAny[PipelineComponent]] = [] @property def last(self) -> PipelineComponent: @@ -192,7 +192,6 @@ def apply_component( **component_data, ) component = self.enrich_component(component) - # inflate & enrich components for inflated_component in component.inflate(): # TODO: recursively enriched_component = self.enrich_component(inflated_component) @@ -230,8 +229,7 @@ def enrich_component( component.validate_ = True env_component_as_dict = update_nested_pair( self.env_components_index.get(component.name, {}), - # HACK: Pydantic .dict() doesn't create jsonable dict - json.loads(component.json(by_alias=True)), + component.model_dump(mode="json", by_alias=True), ) # HACK: make sure component type is set for inflated components, because property is not serialized by Pydantic env_component_as_dict["type"] = component.type @@ -266,9 +264,7 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( - json.loads( # HACK: serialize types on Pydantic model export, which are not serialized by .dict(); e.g. pathlib.Path - self.components.json(exclude_none=True, by_alias=True) - ) + self.components.model_dump(mode="json", by_alias=True, exclude_none=True) ) def __len__(self) -> int: @@ -283,7 +279,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: config = self.config # Leftover variables that were previously introduced in the component by the substitution # functions, still hardcoded, because of their names. - # TODO: Get rid of them + # TODO(Ivan Yordanov): Get rid of them substitution_hardcoded = { "error_topic_name": config.topic_name_config.default_error_topic_name, "output_topic_name": config.topic_name_config.default_output_topic_name, @@ -294,7 +290,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.json()), existing_substitution=component_substitution + config.model_dump(mode="json"), existing_substitution=component_substitution ) return json.loads( diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 14cc849e3..c53cc383d 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from typing import Any +from typing import Any, TypeVar def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: @@ -66,18 +66,22 @@ def flatten_mapping( if prefix: key = prefix + separator + key if isinstance(value, Mapping): - nested_mapping = flatten_mapping(value, key) + nested_mapping = flatten_mapping(value, key, separator) top = update_nested_pair(top, nested_mapping) else: top[key] = value return top +_V = TypeVar("_V") + + def generate_substitution( - input: dict, + input: dict[str, _V], prefix: str | None = None, existing_substitution: dict | None = None, -) -> dict: + separator: str | None = None, +) -> dict[str, _V]: """Generate a complete substitution dict from a given dict. Finds all attributes that belong to a model and expands them to create @@ -88,4 +92,10 @@ def generate_substitution( :param substitution: existing substitution to include :returns: Substitution dict of all variables related to the model. """ - return update_nested(existing_substitution or {}, flatten_mapping(input, prefix)) + if separator is None: + return update_nested( + existing_substitution or {}, flatten_mapping(input, prefix) + ) + return update_nested( + existing_substitution or {}, flatten_mapping(input, prefix, separator) + ) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index ac64da49a..18ac3c5a4 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,18 +1,24 @@ import inspect +import json import logging from abc import ABC from collections.abc import Sequence from enum import Enum -from typing import Annotated, Any, Literal, Union - -from pydantic import BaseConfig, Field, schema, schema_json_of -from pydantic.fields import FieldInfo, ModelField -from pydantic.schema import SkipField +from typing import Annotated, Literal, Union + +from pydantic import Field, RootModel +from pydantic.fields import FieldInfo +from pydantic.json_schema import GenerateJsonSchema, SkipJsonSchema, model_json_schema +from pydantic_core.core_schema import ( + DefinitionsSchema, + LiteralSchema, + ModelField, + ModelFieldsSchema, +) from kpops.cli.registry import _find_classes -from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.components import PipelineComponent from kpops.config import KpopsConfig -from kpops.utils.docstring import describe_object class SchemaScope(str, Enum): @@ -20,20 +26,10 @@ class SchemaScope(str, Enum): CONFIG = "config" -original_field_schema = schema.field_schema - - -# adapted from https://github.com/tiangolo/fastapi/issues/1378#issuecomment-764966955 -def field_schema(field: ModelField, **kwargs: Any) -> Any: - if field.field_info.extra.get("hidden_from_schema"): - msg = f"{field.name} field is being hidden" - raise SkipField(msg) - else: - return original_field_schema(field, **kwargs) +class MultiComponentGenerateJsonSchema(GenerateJsonSchema): + ... -schema.field_schema = field_schema - log = logging.getLogger("") @@ -57,8 +53,9 @@ def _is_valid_component( def _add_components( - components_module: str, components: tuple[type[PipelineComponent]] | None = None -) -> tuple[type[PipelineComponent]]: + components_module: str, + components: tuple[type[PipelineComponent], ...] | None = None, +) -> tuple[type[PipelineComponent], ...]: """Add components to a components tuple. If an empty tuple is provided or it is not provided at all, the components @@ -96,7 +93,7 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled - components: tuple[type[PipelineComponent]] = tuple() # noqa: C408 + components: tuple[type[PipelineComponent], ...] = () if include_stock_components: components = _add_components("kpops.components") # Add custom components if provided @@ -105,42 +102,45 @@ def gen_pipeline_schema( if not components: msg = "No valid components found." raise RuntimeError(msg) - # Create a type union that will hold the union of all component types - PipelineComponents = Union[components] # type: ignore[valid-type] # re-assign component type as Literal to work as discriminator for component in components: - component.__fields__["type"] = ModelField( - name="type", - type_=Literal[component.type], # type: ignore[reportGeneralTypeIssues] - required=False, + component.model_fields["type"] = FieldInfo( + annotation=Literal[component.type], # type:ignore[valid-type] default=component.type, - final=True, - field_info=FieldInfo( - title="Component type", - description=describe_object(component.__doc__), + exclude=True, + ) + core_schema: DefinitionsSchema = component.__pydantic_core_schema__ # pyright:ignore[reportGeneralTypeIssues] + model_schema: ModelFieldsSchema = core_schema["schema"]["schema"] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess] + model_schema["fields"]["type"] = ModelField( + type="model-field", + schema=LiteralSchema( + type="literal", + expected=[component.type], + metadata={ + "pydantic.internal.needs_apply_discriminated_union": False, + "pydantic_js_annotation_functions": [ + SkipJsonSchema().__get_pydantic_json_schema__ # pyright:ignore[reportGeneralTypeIssues] + ], + }, ), - model_config=BaseConfig, - class_validators=None, ) + PipelineComponents = Union[components] # type: ignore[valid-type] AnnotatedPipelineComponents = Annotated[ PipelineComponents, Field(discriminator="type") ] - schema = schema_json_of( - Sequence[AnnotatedPipelineComponents], - title="KPOps pipeline schema", - by_alias=True, - indent=4, - sort_keys=True, - ) - print(schema) + class PipelineSchema(RootModel): + root: Sequence[ + AnnotatedPipelineComponents # pyright:ignore[reportGeneralTypeIssues] + ] + + schema = PipelineSchema.model_json_schema(by_alias=True) + print(json.dumps(schema, indent=4, sort_keys=True)) def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config.""" - schema = schema_json_of( - KpopsConfig, title="KPOps config schema", indent=4, sort_keys=True - ) - print(schema) + schema = model_json_schema(KpopsConfig) + print(json.dumps(schema, indent=4, sort_keys=True)) diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 2eb0fa641..3ac64d82d 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,9 +1,15 @@ +from pathlib import Path from typing import Any import humps -from pydantic import BaseConfig, BaseModel +from pydantic import BaseModel, ConfigDict, Field +from pydantic.alias_generators import to_snake +from pydantic.fields import FieldInfo +from pydantic_settings import PydanticBaseSettingsSource +from typing_extensions import TypeVar, override from kpops.utils.docstring import describe_object +from kpops.utils.yaml_loading import load_yaml_file def to_camel(s: str) -> str: @@ -21,12 +27,127 @@ def to_dot(s: str) -> str: return s.replace("_", ".") -class CamelCaseConfig(BaseConfig): - alias_generator = to_camel - allow_population_by_field_name = True +def by_alias(model: BaseModel, field_name: str) -> str: + """Return field alias if exists else field name. + :param field_name: Name of the field to get alias of + :param model: Model that owns the field + """ + return model.model_fields.get(field_name, Field()).alias or field_name -class DescConfig(BaseConfig): - @classmethod - def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: + +_V = TypeVar("_V") + + +def exclude_by_value( + dumped_model: dict[str, _V], *excluded_values: Any +) -> dict[str, _V]: + """Strip all key-value pairs with certain values. + + :param dumped_model: Dumped model + :param excluded_values: Excluded field values + :return: Dumped model without excluded fields + """ + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_value not in excluded_values + } + + +def exclude_by_name( + dumped_model: dict[str, _V], *excluded_fields: str +) -> dict[str, _V]: + """Strip all key-value pairs with certain field names. + + :param dumped_model: Dumped model + :param excluded_fields: Excluded field names + :return: Dumped model without excluded fields + """ + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_name not in excluded_fields + } + + +def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, _V]: + """Strip all key-value pairs with default values. + + :param model: Model + :param dumped_model: Dumped model + :return: Dumped model without defaults + """ + default_fields = { + field_name: field_info.default + for field_name, field_info in model.model_fields.items() + } + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_value + not in ( + default_fields.get(field_name), + default_fields.get(to_snake(field_name)), + ) + } + + +class CamelCaseConfigModel(BaseModel): + model_config = ConfigDict( + alias_generator=to_camel, + populate_by_name=True, + ) + + +class DescConfigModel(BaseModel): + @staticmethod + def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: schema["description"] = describe_object(model.__doc__) + + model_config = ConfigDict(json_schema_extra=json_schema_extra) + + +class YamlConfigSettingsSource(PydanticBaseSettingsSource): + """Loads variables from a YAML file at the project's root.""" + + path_to_config = Path("config.yaml") + + @override + def get_field_value( + self, + field: FieldInfo, + field_name: str, + ) -> tuple[Any, str, bool]: + if self.path_to_config.exists() and isinstance( + (file_content_yaml := load_yaml_file(self.path_to_config)), dict + ): + field_value = file_content_yaml.get(field_name) + return field_value, field_name, False + return None, field_name, False + + @override + def prepare_field_value( + self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool + ) -> Any: + return value + + @override + def __call__(self) -> dict[str, Any]: + d: dict[str, Any] = {} + + for field_name, field in self.settings_cls.model_fields.items(): + field_value, field_key, value_is_complex = self.get_field_value( + field, + field_name, + ) + field_value = self.prepare_field_value( + field_name, + field, + field_value, + value_is_complex, + ) + if field_value is not None: + d[field_key] = field_value + + return d diff --git a/poetry.lock b/poetry.lock index 05591986a..b9593a550 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "aiofiles" @@ -11,6 +11,17 @@ files = [ {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, ] +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + [[package]] name = "anyio" version = "3.6.2" @@ -816,56 +827,154 @@ virtualenv = ">=20.0.8" [[package]] name = "pydantic" -version = "1.10.8" -description = "Data validation and settings management using python type hints" +version = "2.5.2" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [package.dependencies] -python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.14.5" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.0.3" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_settings-2.0.3-py3-none-any.whl", hash = "sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625"}, + {file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"}, +] + +[package.dependencies] +pydantic = ">=2.0.1" +python-dotenv = ">=0.21.0" [[package]] name = "pygments" @@ -1377,28 +1486,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.1.3" -description = "An extremely fast Python linter, written in Rust." +version = "0.1.7" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.3-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b46d43d51f7061652eeadb426a9e3caa1e0002470229ab2fc19de8a7b0766901"}, - {file = "ruff-0.1.3-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b8afeb9abd26b4029c72adc9921b8363374f4e7edb78385ffaa80278313a15f9"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca3cf365bf32e9ba7e6db3f48a4d3e2c446cd19ebee04f05338bc3910114528b"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4874c165f96c14a00590dcc727a04dca0cfd110334c24b039458c06cf78a672e"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec2dd31eed114e48ea42dbffc443e9b7221976554a504767ceaee3dd38edeb8"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dc3ec4edb3b73f21b4aa51337e16674c752f1d76a4a543af56d7d04e97769613"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e3de9ed2e39160800281848ff4670e1698037ca039bda7b9274f849258d26ce"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c595193881922cc0556a90f3af99b1c5681f0c552e7a2a189956141d8666fe8"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f75e670d529aa2288cd00fc0e9b9287603d95e1536d7a7e0cafe00f75e0dd9d"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76dd49f6cd945d82d9d4a9a6622c54a994689d8d7b22fa1322983389b4892e20"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:918b454bc4f8874a616f0d725590277c42949431ceb303950e87fef7a7d94cb3"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8859605e729cd5e53aa38275568dbbdb4fe882d2ea2714c5453b678dca83784"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0b6c55f5ef8d9dd05b230bb6ab80bc4381ecb60ae56db0330f660ea240cb0d4a"}, - {file = "ruff-0.1.3-py3-none-win32.whl", hash = "sha256:3e7afcbdcfbe3399c34e0f6370c30f6e529193c731b885316c5a09c9e4317eef"}, - {file = "ruff-0.1.3-py3-none-win_amd64.whl", hash = "sha256:7a18df6638cec4a5bd75350639b2bb2a2366e01222825562c7346674bdceb7ea"}, - {file = "ruff-0.1.3-py3-none-win_arm64.whl", hash = "sha256:12fd53696c83a194a2db7f9a46337ce06445fb9aa7d25ea6f293cf75b21aca9f"}, - {file = "ruff-0.1.3.tar.gz", hash = "sha256:3ba6145369a151401d5db79f0a47d50e470384d0d89d0d6f7fab0b589ad07c34"}, + {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7f80496854fdc65b6659c271d2c26e90d4d401e6a4a31908e7e334fab4645aac"}, + {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1ea109bdb23c2a4413f397ebd8ac32cb498bee234d4191ae1a310af760e5d287"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0c2de9dd9daf5e07624c24add25c3a490dbf74b0e9bca4145c632457b3b42a"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:69a4bed13bc1d5dabf3902522b5a2aadfebe28226c6269694283c3b0cecb45fd"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de02ca331f2143195a712983a57137c5ec0f10acc4aa81f7c1f86519e52b92a1"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45b38c3f8788a65e6a2cab02e0f7adfa88872696839d9882c13b7e2f35d64c5f"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c64cb67b2025b1ac6d58e5ffca8f7b3f7fd921f35e78198411237e4f0db8e73"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dcc6bb2f4df59cb5b4b40ff14be7d57012179d69c6565c1da0d1f013d29951b"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2bb4bb6bbe921f6b4f5b6fdd8d8468c940731cb9406f274ae8c5ed7a78c478"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:276a89bcb149b3d8c1b11d91aa81898fe698900ed553a08129b38d9d6570e717"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:90c958fe950735041f1c80d21b42184f1072cc3975d05e736e8d66fc377119ea"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b05e3b123f93bb4146a761b7a7d57af8cb7384ccb2502d29d736eaade0db519"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:290ecab680dce94affebefe0bbca2322a6277e83d4f29234627e0f8f6b4fa9ce"}, + {file = "ruff-0.1.7-py3-none-win32.whl", hash = "sha256:416dfd0bd45d1a2baa3b1b07b1b9758e7d993c256d3e51dc6e03a5e7901c7d80"}, + {file = "ruff-0.1.7-py3-none-win_amd64.whl", hash = "sha256:4af95fd1d3b001fc41325064336db36e3d27d2004cdb6d21fd617d45a172dd96"}, + {file = "ruff-0.1.7-py3-none-win_arm64.whl", hash = "sha256:0683b7bfbb95e6df3c7c04fe9d78f631f8e8ba4868dfc932d43d690698057e2e"}, + {file = "ruff-0.1.7.tar.gz", hash = "sha256:dffd699d07abf54833e5f6cc50b85a6ff043715da8788c4a79bcd4ab4734d306"}, ] [[package]] @@ -1613,13 +1722,13 @@ typer = ">=0.4.0,<=0.7.0" [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] @@ -1761,4 +1870,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "cfa6447fe58b9cffa8247c4d2e09fa988bbb7484a2a728e432a63c05872e3bd8" +content-hash = "2dac8180567353aea454a8d6f9dc5f6fcddce9d6c6ec9026c23fe31627385635" diff --git a/pyproject.toml b/pyproject.toml index 8e749efe0..d4096b732 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,8 @@ kpops = "kpops.cli.main:app" [tool.poetry.dependencies] python = "^3.10" -pydantic = { extras = ["dotenv"], version = "^1.10.8" } +pydantic = { extras = ["dotenv"], version = "^2.5.2" } +pydantic-settings = "^2.0.3" rich = "^12.4.4" PyYAML = "^6.0" typer = { extras = ["all"], version = "^0.6.1" } @@ -45,7 +46,7 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -ruff = "^0.1.3" +ruff = "^0.1.7" typer-cli = "^0.0.13" pyright = "^1.1.314" pytest-rerunfailures = "^11.1.2" diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 2dd92b512..f8f75d870 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -8,16 +8,21 @@ snapshots = Snapshot() snapshots['TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation'] = '''{ - "definitions": { + "$defs": { "EmptyPipelineComponent": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -33,21 +38,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "empty-pipeline-component", - "enum": [ - "empty-pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -62,7 +62,7 @@ "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -71,7 +71,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -87,16 +87,28 @@ "description": "Input topic.", "properties": { "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/InputTopicTypes" + }, { - "$ref": "#/definitions/InputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type" } }, @@ -122,14 +134,19 @@ "type": "string" }, "SubPipelineComponent": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -145,21 +162,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component", - "enum": [ - "sub-pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -169,14 +181,19 @@ "type": "object" }, "SubPipelineComponentCorrect": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -192,21 +209,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-correct", - "enum": [ - "sub-pipeline-component-correct" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -216,6 +228,7 @@ "type": "object" }, "SubPipelineComponentCorrectDocstr": { + "additionalProperties": true, "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", "properties": { "example_attr": { @@ -224,11 +237,15 @@ "type": "string" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -244,22 +261,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-correct-docstr", - "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", - "enum": [ - "sub-pipeline-component-correct-docstr" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -270,14 +281,19 @@ "type": "object" }, "SubPipelineComponentNoSchemaTypeNoType": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -293,21 +309,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-no-schema-type-no-type", - "enum": [ - "sub-pipeline-component-no-schema-type-no-type" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -317,6 +328,7 @@ "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics.", "properties": { "models": { @@ -330,7 +342,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -362,38 +374,82 @@ "type": "object" }, "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Key schema class name", - "title": "Key schema", - "type": "string" + "title": "Key schema" }, "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Number of partitions into which the topic is divided", - "title": "Partitions count", - "type": "integer" + "title": "Partitions count" }, "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Replication factor of the topic", - "title": "Replication factor", - "type": "integer" + "title": "Replication factor" }, "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, { - "$ref": "#/definitions/OutputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type", "title": "Topic type" }, "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Value schema class name", - "title": "Value schema", - "type": "string" + "title": "Value schema" } }, "title": "TopicConfig", @@ -403,33 +459,33 @@ "items": { "discriminator": { "mapping": { - "empty-pipeline-component": "#/definitions/EmptyPipelineComponent", - "sub-pipeline-component": "#/definitions/SubPipelineComponent", - "sub-pipeline-component-correct": "#/definitions/SubPipelineComponentCorrect", - "sub-pipeline-component-correct-docstr": "#/definitions/SubPipelineComponentCorrectDocstr", - "sub-pipeline-component-no-schema-type-no-type": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + "empty-pipeline-component": "#/$defs/EmptyPipelineComponent", + "sub-pipeline-component": "#/$defs/SubPipelineComponent", + "sub-pipeline-component-correct": "#/$defs/SubPipelineComponentCorrect", + "sub-pipeline-component-correct-docstr": "#/$defs/SubPipelineComponentCorrectDocstr", + "sub-pipeline-component-no-schema-type-no-type": "#/$defs/SubPipelineComponentNoSchemaTypeNoType" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/definitions/EmptyPipelineComponent" + "$ref": "#/$defs/EmptyPipelineComponent" }, { - "$ref": "#/definitions/SubPipelineComponent" + "$ref": "#/$defs/SubPipelineComponent" }, { - "$ref": "#/definitions/SubPipelineComponentCorrect" + "$ref": "#/$defs/SubPipelineComponentCorrect" }, { - "$ref": "#/definitions/SubPipelineComponentCorrectDocstr" + "$ref": "#/$defs/SubPipelineComponentCorrectDocstr" }, { - "$ref": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + "$ref": "#/$defs/SubPipelineComponentNoSchemaTypeNoType" } ] }, - "title": "KPOps pipeline schema", + "title": "PipelineSchema", "type": "array" } ''' diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 33db1560a..717a67e46 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -1,7 +1,7 @@ from pathlib import Path import pytest -from pydantic import AnyHttpUrl, ValidationError, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter, ValidationError from kpops.config import ( KafkaConnectConfig, @@ -27,9 +27,9 @@ def test_kpops_config_with_default_values(): == "${pipeline_name}-${component_name}-error" ) assert default_config.schema_registry.enabled is False - assert default_config.schema_registry.url == "http://localhost:8081" - assert default_config.kafka_rest.url == "http://localhost:8082" - assert default_config.kafka_connect.url == "http://localhost:8083" + assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081") + assert default_config.kafka_rest.url == AnyHttpUrl("http://localhost:8082") + assert default_config.kafka_connect.url == AnyHttpUrl("http://localhost:8083") assert default_config.timeout == 300 assert default_config.create_namespace is False assert default_config.helm_config.context is None @@ -45,7 +45,7 @@ def test_kpops_config_with_different_invalid_urls(): environment="development", kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( - url=parse_obj_as(AnyHttpUrl, "invalid-host") + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") ), ) @@ -53,7 +53,9 @@ def test_kpops_config_with_different_invalid_urls(): KpopsConfig( environment="development", kafka_brokers="http://broker:9092", - kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "invalid-host")), + kafka_rest=KafkaRestConfig( + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") + ), ) with pytest.raises(ValidationError): @@ -62,6 +64,6 @@ def test_kpops_config_with_different_invalid_urls(): kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, - url=parse_obj_as(AnyHttpUrl, "invalid-host"), + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host"), ), ) diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index cbb855d14..d860a0b9c 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -26,7 +26,7 @@ # type is inherited from PipelineComponent class EmptyPipelineComponent(PipelineComponent): class Config: - anystr_strip_whitespace = True + str_strip_whitespace = True # abstract component inheriting from ABC should be excluded diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 159b33359..1c38a8b1a 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -65,7 +65,7 @@ def test_should_create_post_requests_for_given_connector_configuration( headers=HEADERS, json={ "name": "test-connector", - "config": KafkaConnectorConfig(**configs).dict(), + "config": KafkaConnectorConfig(**configs).model_dump(), }, ) @@ -235,7 +235,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**configs).dict(), + json=KafkaConnectorConfig(**configs).model_dump(), ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") @@ -455,7 +455,7 @@ def test_should_create_correct_validate_connector_config_request( mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=connector_config.dict(), + json=connector_config.model_dump(), ) @patch("httpx.put") @@ -477,7 +477,9 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connector-plugins/{connector_name}/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**{"name": connector_name, **configs}).dict(), + json=KafkaConnectorConfig( + **{"name": connector_name, **configs} + ).model_dump(), ) def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 9f1fe143f..00718f3bd 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock import pytest -from pydantic import AnyHttpUrl, BaseModel, parse_obj_as +from pydantic import AnyHttpUrl, BaseModel, TypeAdapter from pytest_mock import MockerFixture from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion @@ -74,7 +74,8 @@ def kpops_config_with_sr_enabled() -> KpopsConfig: environment="development", kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( - enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") + enabled=True, + url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081"), ), ) @@ -87,7 +88,7 @@ def test_load_schema_handler(kpops_config_with_sr_enabled: KpopsConfig): SchemaHandler, ) - config_disable = kpops_config_with_sr_enabled.copy() + config_disable = kpops_config_with_sr_enabled.model_copy() config_disable.schema_registry = SchemaRegistryConfig(enabled=False) assert ( diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index bbd87bc1e..3cee5f06b 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock, patch import pytest +from pydantic import AnyHttpUrl from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture @@ -45,7 +46,7 @@ def _setup(self, httpx_mock: HTTPXMock): json=cluster_response, status_code=200, ) - assert self.proxy_wrapper.url == DEFAULT_HOST + assert self.proxy_wrapper.url == AnyHttpUrl(DEFAULT_HOST) assert self.proxy_wrapper.cluster_id == "cluster-1" @patch("httpx.post") diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 748eb39a9..25fa67498 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -73,7 +73,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics": topic_name} + **{**connector_config.model_dump(), "topics": topic_name} ), namespace="test-namespace", ) @@ -85,7 +85,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics.regex": topic_pattern} + **{**connector_config.model_dump(), "topics.regex": topic_pattern} ), namespace="test-namespace", ) diff --git a/tests/defaults.yaml b/tests/defaults.yaml new file mode 100644 index 000000000..09fd863b3 --- /dev/null +++ b/tests/defaults.yaml @@ -0,0 +1,2 @@ +streams-app: + namespace: "namespace" diff --git a/tests/pipeline/resources/dotenv/.env b/tests/pipeline/resources/dotenv/.env new file mode 100644 index 000000000..9829b1218 --- /dev/null +++ b/tests/pipeline/resources/dotenv/.env @@ -0,0 +1,3 @@ +KPOPS_environment="default" +KPOPS_schema_registry__enabled="true" +KPOPS_schema_registry__url="http://localhost:8081" diff --git a/tests/pipeline/resources/dotenv/config.yaml b/tests/pipeline/resources/dotenv/config.yaml new file mode 100644 index 000000000..196b583f5 --- /dev/null +++ b/tests/pipeline/resources/dotenv/config.yaml @@ -0,0 +1,12 @@ +# environment: development +defaults_path: ../defaults.yaml +topic_name_config: + default_error_topic_name: "${component_name}-dead-letter-topic" + default_output_topic_name: "${component_name}-test-topic" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" +helm_config: + api_version: "2.1.1" diff --git a/tests/pipeline/resources/dotenv/custom.env b/tests/pipeline/resources/dotenv/custom.env new file mode 100644 index 000000000..3e5371e98 --- /dev/null +++ b/tests/pipeline/resources/dotenv/custom.env @@ -0,0 +1,3 @@ +KPOPS_environment="custom" +KPOPS_schema_registry__enabled="false" +KPOPS_schema_registry__url="http://notlocalhost:8081" diff --git a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml new file mode 100644 index 000000000..55754eba1 --- /dev/null +++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml @@ -0,0 +1,23 @@ +kubernetes-app: + namespace: example-namespace + +kafka-app: + app: + streams: + brokers: "${kafka_brokers}" + schema_registry_url: "${schema_registry_url}" + version: "2.4.2" + +streams-app: # inherits from kafka-app + app: + streams: + config: + large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator + to: + topics: + ${error_topic_name}: + type: error + value_schema: com.bakdata.kafka.DeadLetter + partitions_count: 1 + configs: + cleanup.policy: compact,delete diff --git a/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml new file mode 100644 index 000000000..d61d6c9ba --- /dev/null +++ b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml @@ -0,0 +1,6 @@ +- type: streams-app + name: in-order-to-have-len-fifty-two-name-should-end--here + app: + streams: + config: + max.poll.records: 100 diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 2ef44b969..406679c8b 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -28,7 +28,7 @@ }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, 'suspend': True }, @@ -79,7 +79,7 @@ }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, 'suspend': True }, @@ -136,7 +136,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'name': 'transaction-joiner', @@ -198,7 +198,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'name': 'fraud-detector', @@ -265,7 +265,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'from': { diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index c2e339fbc..d1e6f1776 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -25,7 +25,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-custom-config-app1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -67,7 +67,7 @@ 'resources-custom-config-app1' ], 'outputTopic': 'resources-custom-config-app2', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -120,7 +120,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -187,7 +187,7 @@ 'resources-pipeline-with-inflate-scheduled-producer' ], 'outputTopic': 'resources-pipeline-with-inflate-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -262,7 +262,7 @@ 'resources-pipeline-with-inflate-converter' ], 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'should-inflate', @@ -359,7 +359,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'should-inflate-inflated-streams-app', @@ -413,7 +413,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -506,7 +506,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-first-pipeline-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -573,7 +573,7 @@ 'resources-first-pipeline-scheduled-producer' ], 'outputTopic': 'resources-first-pipeline-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -648,7 +648,7 @@ 'resources-first-pipeline-converter' ], 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', @@ -698,7 +698,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'out', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'account-producer', @@ -744,7 +744,7 @@ 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', 'inputPattern': '.*', 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -804,7 +804,7 @@ 'inputTopics': [ 'example-output' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -863,7 +863,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -926,7 +926,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-pipeline-with-envs-input-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'input-producer', @@ -993,7 +993,7 @@ 'resources-pipeline-with-envs-input-producer' ], 'outputTopic': 'resources-pipeline-with-envs-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -1068,7 +1068,7 @@ 'resources-pipeline-with-envs-converter' ], 'outputTopic': 'resources-pipeline-with-envs-filter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'filter', @@ -1127,7 +1127,7 @@ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' }, 'suspend': True }, @@ -1157,7 +1157,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-read-from-component-producer1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'producer1', @@ -1192,7 +1192,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-read-from-component-producer2', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'producer2', @@ -1247,7 +1247,7 @@ 'resources-read-from-component-producer2' ], 'outputTopic': 'resources-read-from-component-inflate-step', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step', @@ -1344,7 +1344,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-inflated-streams-app', @@ -1407,7 +1407,7 @@ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-without-prefix', @@ -1504,7 +1504,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-without-prefix-inflated-streams-app', @@ -1552,7 +1552,7 @@ 'resources-read-from-component-producer1' ], 'outputTopic': 'resources-read-from-component-consumer1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1609,7 +1609,7 @@ 'resources-read-from-component-producer1', 'resources-read-from-component-consumer1' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1664,7 +1664,7 @@ 'resources-read-from-component-producer1', 'resources-read-from-component-producer2' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1718,7 +1718,7 @@ 'inputTopics': [ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1769,7 +1769,7 @@ 'inputTopics': [ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1832,7 +1832,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-component-type-substitution-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -1899,7 +1899,7 @@ 'resources-component-type-substitution-scheduled-producer' ], 'outputTopic': 'resources-component-type-substitution-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -1981,7 +1981,7 @@ 'resources-component-type-substitution-converter' ], 'outputTopic': 'resources-component-type-substitution-filter-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'filter-app', @@ -2039,7 +2039,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -2081,7 +2081,7 @@ 'app1-test-topic' ], 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -2136,7 +2136,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -2178,7 +2178,7 @@ 'app1-test-topic' ], 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -2231,7 +2231,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 86e2c8b8e..84698c0b4 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -71,7 +71,7 @@ def inflate(self) -> list[PipelineComponent]: f"{self.full_name}-" + "${component_name}" ): TopicConfig(type=OutputTopicTypes.OUTPUT) } - ).dict(), + ).model_dump(), ) inflate_steps.append(streams_app) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index d09ae6795..894d88c8d 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -481,6 +481,30 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): == "env_broker" ) + def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv( + name="KPOPS_SCHEMA_REGISTRY__URL", value="http://somename:1234" + ) + + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--config", + str(RESOURCE_PATH / "custom-config/config.yaml"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + == "http://somename:1234/" + ) + def test_model_serialization(self, snapshot: SnapshotTest): """Test model serialization of component containing pathlib.Path attribute.""" result = runner.invoke( @@ -501,6 +525,33 @@ def test_model_serialization(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") + def test_dotenv_support(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--defaults", + str(RESOURCE_PATH), + "--config", + str(RESOURCE_PATH / "dotenv/config.yaml"), + "--dotenv", + str(RESOURCE_PATH / "dotenv/.env"), + "--dotenv", + str(RESOURCE_PATH / "dotenv/custom.env"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][1]["app"]["streams"]["schemaRegistryUrl"] + == "http://notlocalhost:8081/" + ) + def test_short_topic_definition(self): result = runner.invoke( app, @@ -583,3 +634,23 @@ def test_validate_unique_step_names(self): ], catch_exceptions=False, ) + + def test_temp_trim_release_name(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "temp-trim-release-name/pipeline.yaml"), + "--defaults", + str(RESOURCE_PATH / "temp-trim-release-name"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["name"] + == "in-order-to-have-len-fifty-two-name-should-end--here" + ) diff --git a/tests/utils/resources/nested_base_settings.py b/tests/utils/resources/nested_base_settings.py index f7f92358a..97e755e71 100644 --- a/tests/utils/resources/nested_base_settings.py +++ b/tests/utils/resources/nested_base_settings.py @@ -1,4 +1,5 @@ -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings class NestedSettings(BaseSettings): @@ -10,5 +11,5 @@ class ParentSettings(BaseSettings): nested_field: NestedSettings = Field(...) field_with_env_defined: str = Field( default=..., - env="FIELD_WITH_ENV_DEFINED", + alias="FIELD_WITH_ENV_DEFINED", ) diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py index 1ea410770..224934d87 100644 --- a/tests/utils/test_dict_ops.py +++ b/tests/utils/test_dict_ops.py @@ -70,7 +70,7 @@ class SimpleModel(BaseModel): }, }, problems=99, - ).json() + ).model_dump_json() ) existing_substitution = { "key1": "Everything", diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py index d234bd79d..5ad065f2c 100644 --- a/tests/utils/test_doc_gen.py +++ b/tests/utils/test_doc_gen.py @@ -6,25 +6,13 @@ from hooks.gen_docs.gen_docs_env_vars import ( EnvVarAttrs, append_csv_to_dotenv_file, - collect_fields, csv_append_env_var, write_csv_to_md_file, write_title_to_dotenv_file, ) -from tests.utils.resources.nested_base_settings import ParentSettings class TestEnvDocGen: - def test_collect_fields(self): - expected: list[Any] = [ - "not_nested_field", - "attr", - Ellipsis, - Ellipsis, - ] - actual = [field.field_info.default for field in collect_fields(ParentSettings)] - assert actual == expected - @pytest.mark.parametrize( ("var_name", "default_value", "description", "extra_args", "expected_outcome"), [ From 03aa318a66bcc8a222d6980fc9fc6eda6b2a2829 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 12 Dec 2023 14:34:19 +0200 Subject: [PATCH 12/34] Allow overriding config files (#391) closes #153 --------- Co-authored-by: Salomon Popp --- config.yaml | 1 - .../docs/resources/variables/cli_env_vars.env | 8 +- docs/docs/resources/variables/cli_env_vars.md | 17 +-- .../resources/variables/config_env_vars.env | 5 - .../resources/variables/config_env_vars.md | 37 +++--- docs/docs/schema/config.json | 10 -- docs/docs/user/migration-guide/v2-v3.md | 18 +++ docs/docs/user/references/cli-commands.md | 15 ++- .../bakdata/atm-fraud-detection/config.yaml | 2 - kpops/cli/main.py | 66 ++++++---- .../base_defaults_component.py | 16 ++- kpops/config.py | 9 -- kpops/pipeline_generator/pipeline.py | 27 ++++- kpops/utils/pydantic.py | 43 +++++-- pyproject.toml | 1 - tests/cli/test_handlers.py | 2 - tests/cli/test_kpops_config.py | 7 +- tests/compiler/test_pipeline_name.py | 7 +- .../kafka_connect/test_connect_wrapper.py | 5 +- .../schema_handler/test_schema_handler.py | 1 - .../topic/test_proxy_wrapper.py | 2 +- .../test_base_defaults_component.py | 6 +- tests/components/test_helm_app.py | 1 - tests/components/test_kafka_app.py | 1 - tests/components/test_kafka_connector.py | 1 - tests/components/test_kubernetes_app.py | 2 +- tests/components/test_producer_app.py | 1 - tests/components/test_streams_app.py | 1 - tests/conftest.py | 8 ++ .../resources/custom-config/config.yaml | 1 - tests/pipeline/resources/dotenv/config.yaml | 1 - .../config_production.yaml | 14 +++ .../kafka-connect-sink-config/config.yaml | 1 - .../resources/multi-config/config.yaml | 14 +++ .../multi-config/config_development.yaml | 3 + .../multi-config/config_production.yaml | 3 + tests/pipeline/test_example.py | 2 +- tests/pipeline/test_pipeline.py | 113 ++++++++++++++++-- tests/pipeline/test_template.py | 6 +- 39 files changed, 333 insertions(+), 145 deletions(-) create mode 100644 tests/pipeline/resources/env-specific-config-only/config_production.yaml create mode 100644 tests/pipeline/resources/multi-config/config.yaml create mode 100644 tests/pipeline/resources/multi-config/config_development.yaml create mode 100644 tests/pipeline/resources/multi-config/config_production.yaml diff --git a/config.yaml b/config.yaml index 8fe8bb213..fdd145829 100644 --- a/config.yaml +++ b/config.yaml @@ -1,2 +1 @@ -environment: development kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index dc44ac3a6..0f393ddf8 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -8,13 +8,17 @@ # Base directory to the pipelines (default is current working # directory) KPOPS_PIPELINE_BASE_DIR=. -# Path to the config.yaml file -KPOPS_CONFIG_PATH=config.yaml +# Path to the dir containing config.yaml files +KPOPS_CONFIG_PATH=. # Path to defaults folder KPOPS_DEFAULT_PATH # No default value, not required # Path to dotenv file. Multiple files can be provided. The files will # be loaded in order, with each file overriding the previous one. KPOPS_DOTENV_PATH # No default value, not required +# The environment you want to generate and deploy the pipeline to. +# Suffix your environment files with this value (e.g. +# defaults_development.yaml for environment=development). +KPOPS_ENVIRONMENT # No default value, not required # Path to YAML with pipeline definition KPOPS_PIPELINE_PATH # No default value, required # Comma separated list of steps to apply the command on diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index ed0880bee..9a9b0012c 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -1,10 +1,11 @@ These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags. -| Name |Default Value|Required| Description | -|-----------------------|-------------|--------|-----------------------------------------------------------------------------------------------------------------------------------| -|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | -|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | -|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | -|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one.| -|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | -|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | +| Name |Default Value|Required| Description | +|-----------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | +|KPOPS_CONFIG_PATH |. |False |Path to the dir containing config.yaml files | +|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | +|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. | +|KPOPS_ENVIRONMENT | |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).| +|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | +|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 2b99d2172..27ea591a7 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -9,11 +9,6 @@ # environment defaults files. Paths can either be absolute or relative # to `config.yaml` KPOPS_DEFAULTS_PATH=. -# environment -# The environment you want to generate and deploy the pipeline to. -# Suffix your environment files with this value (e.g. -# defaults_development.yaml for environment=development). -KPOPS_ENVIRONMENT # No default value, required # kafka_brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index f81eb8f56..53f540387 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,21 +1,20 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value |Required| Description | Setting name | -|--------------------------------------------------|----------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| -|KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | -|KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component_name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component_name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | -|KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | -|KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | -|KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | -|KPOPS_KAFKA_CONNECT__URL |http://localhost:8083/ |False |Address of Kafka Connect. |kafka_connect.url | -|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | -|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context | -|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug | -|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version | -|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | -|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|--------------------------------------------------|----------------------------------------|--------|------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| +|KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`|defaults_path | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | +|KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component_name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component_name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | +|KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | +|KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | +|KPOPS_KAFKA_CONNECT__URL |http://localhost:8083/ |False |Address of Kafka Connect. |kafka_connect.url | +|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | +|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context | +|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug | +|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version | +|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | +|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 09a848235..524049a09 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -161,15 +161,6 @@ "title": "Defaults Path", "type": "string" }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "examples": [ - "development", - "production" - ], - "title": "Environment", - "type": "string" - }, "helm_config": { "allOf": [ { @@ -262,7 +253,6 @@ } }, "required": [ - "environment", "kafka_brokers" ], "title": "KpopsConfig", diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index def10c0f0..6f5c5a1bf 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -63,3 +63,21 @@ steps: command: deploy --execute # ... ``` + +## [Allow overriding config files](https://github.com/bakdata/kpops/pull/391) + +Specifying the environment is no longer mandatory. If not defined, only the global files will be used. + +`environment` is no longer specified in `config.yaml`. Instead, it can be either set via the CLI flag `--environment` or with the environment variable `KPOPS_ENVIRONMENT`. + +The `--config` flag in the CLI now points to the directory that contains `config*.yaml` files. The files to be used are resolved based on the provided (or not) `environment`. + +#### config.yaml + +```diff +- environment: development + kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" + schema_registry: + enabled: true + url: "http://my-custom-sr.url:8081" +``` diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index cb9b2ff5b..ed321367b 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -42,11 +42,12 @@ $ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] -* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops deploy` @@ -69,11 +70,12 @@ $ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] -* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops destroy` @@ -96,11 +98,12 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] -* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops generate` @@ -123,11 +126,12 @@ $ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] -* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--template / --no-template`: Run Helm template [default: no-template] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops reset` @@ -150,11 +154,12 @@ $ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] -* `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops schema` diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index d03a12c64..2f158fd62 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -1,5 +1,3 @@ -environment: development - topic_name_config: default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" default_output_topic_name: "${pipeline_name}-${component_name}-topic" diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 0aa0d5f67..285c1154f 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -63,13 +63,13 @@ ) CONFIG_PATH_OPTION: Path = typer.Option( - default=Path("config.yaml"), - exists=False, - dir_okay=False, - file_okay=True, + default=Path(), + exists=True, + dir_okay=True, + file_okay=False, readable=True, envvar=f"{ENV_PREFIX}CONFIG_PATH", - help="Path to the config.yaml file", + help="Path to the dir containing config.yaml files", ) PIPELINE_PATH_ARG: Path = typer.Argument( @@ -113,6 +113,15 @@ class FilterType(str, Enum): help="Custom Python module containing your project-specific components", ) +ENVIRONMENT: str | None = typer.Option( + default=None, + envvar=f"{ENV_PREFIX}ENVIRONMENT", + help=( + "The environment you want to generate and deploy the pipeline to. " + "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). " + ), +) + logger = logging.getLogger() logging.getLogger("httpx").setLevel(logging.WARNING) stream_handler = logging.StreamHandler() @@ -127,6 +136,7 @@ def setup_pipeline( pipeline_path: Path, components_module: str | None, kpops_config: KpopsConfig, + environment: str | None, ) -> Pipeline: registry = Registry() if components_module: @@ -135,7 +145,7 @@ def setup_pipeline( handlers = setup_handlers(components_module, kpops_config) return Pipeline.load_from_yaml( - pipeline_base_dir, pipeline_path, registry, kpops_config, handlers + pipeline_base_dir, pipeline_path, environment, registry, kpops_config, handlers ) @@ -207,17 +217,22 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_kpops_config( - config: Path, defaults: Optional[Path], verbose: bool, dotenv: Optional[list[Path]] + config: Path, + defaults: Optional[Path], + verbose: bool, + dotenv: Optional[list[Path]], + environment: Optional[str], ) -> KpopsConfig: setup_logging_level(verbose) - YamlConfigSettingsSource.path_to_config = config + YamlConfigSettingsSource.config_dir = config + YamlConfigSettingsSource.environment = environment kpops_config = KpopsConfig( _env_file=dotenv # pyright: ignore[reportGeneralTypeIssues] ) if defaults: kpops_config.defaults_path = defaults else: - kpops_config.defaults_path = config.parent / kpops_config.defaults_path + kpops_config.defaults_path = config / kpops_config.defaults_path return kpops_config @@ -265,10 +280,11 @@ def generate( steps: Optional[str] = PIPELINE_STEPS, filter_type: FilterType = FILTER_TYPE, verbose: bool = VERBOSE_OPTION, + environment: Optional[str] = ENVIRONMENT, ) -> Pipeline: - kpops_config = create_kpops_config(config, defaults, verbose, dotenv) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config + pipeline_base_dir, pipeline_path, components_module, kpops_config, environment ) if not template: @@ -299,10 +315,11 @@ def deploy( filter_type: FilterType = FILTER_TYPE, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, -): - kpops_config = create_kpops_config(config, defaults, verbose, dotenv) + environment: Optional[str] = ENVIRONMENT, +) -> None: + kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config + pipeline_base_dir, pipeline_path, components_module, kpops_config, environment ) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) @@ -323,10 +340,11 @@ def destroy( filter_type: FilterType = FILTER_TYPE, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, -): - kpops_config = create_kpops_config(config, defaults, verbose, dotenv) + environment: Optional[str] = ENVIRONMENT, +) -> None: + kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config + pipeline_base_dir, pipeline_path, components_module, kpops_config, environment ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -346,10 +364,11 @@ def reset( filter_type: FilterType = FILTER_TYPE, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, -): - kpops_config = create_kpops_config(config, defaults, verbose, dotenv) + environment: Optional[str] = ENVIRONMENT, +) -> None: + kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config + pipeline_base_dir, pipeline_path, components_module, kpops_config, environment ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -370,10 +389,11 @@ def clean( filter_type: FilterType = FILTER_TYPE, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, -): - kpops_config = create_kpops_config(config, defaults, verbose, dotenv) + environment: Optional[str] = ENVIRONMENT, +) -> None: + kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config + pipeline_base_dir, pipeline_path, components_module, kpops_config, environment ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 293d17dcc..7ef978705 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -99,7 +99,7 @@ def extend_with_defaults(self, **kwargs) -> dict: ) ) main_default_file_path, environment_default_file_path = get_defaults_file_paths( - config + config, ENV.get("environment") ) defaults = load_defaults( self.__class__, main_default_file_path, environment_default_file_path @@ -175,7 +175,9 @@ def defaults_from_yaml(path: Path, key: str) -> dict: return value -def get_defaults_file_paths(config: KpopsConfig) -> tuple[Path, Path]: +def get_defaults_file_paths( + config: KpopsConfig, environment: str | None +) -> tuple[Path, Path]: """Return the paths to the main and the environment defaults-files. The files need not exist, this function will only check if the dir set in @@ -183,6 +185,7 @@ def get_defaults_file_paths(config: KpopsConfig) -> tuple[Path, Path]: calculated from it. It is up to the caller to handle any false paths. :param config: Pipeline configuration + :param environment: Environment :returns: The defaults files paths """ defaults_dir = Path(config.defaults_path).resolve() @@ -190,9 +193,12 @@ def get_defaults_file_paths(config: KpopsConfig) -> tuple[Path, Path]: config.defaults_filename_prefix ).with_suffix(".yaml") - environment_default_file_path = defaults_dir / Path( - f"{config.defaults_filename_prefix}_{config.environment}" - ).with_suffix(".yaml") + environment_default_file_path = ( + defaults_dir + / Path(f"{config.defaults_filename_prefix}_{environment}").with_suffix(".yaml") + if environment is not None + else main_default_file_path + ) return main_default_file_path, environment_default_file_path diff --git a/kpops/config.py b/kpops/config.py index 850418d21..6e3188359 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -70,15 +70,6 @@ class KpopsConfig(BaseSettings): description="The path to the folder containing the defaults.yaml file and the environment defaults files. " "Paths can either be absolute or relative to `config.yaml`", ) - environment: str = Field( - default=..., - examples=[ - "development", - "production", - ], - description="The environment you want to generate and deploy the pipeline to. " - "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - ) kafka_brokers: str = Field( default=..., examples=[ diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index f4676105c..b0bcf2676 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -119,6 +119,7 @@ def load_from_yaml( cls, base_dir: Path, path: Path, + environment: str | None, registry: Registry, config: KpopsConfig, handlers: ComponentHandlers, @@ -137,13 +138,19 @@ def load_from_yaml( :returns: Initialized pipeline object """ Pipeline.set_pipeline_name_env_vars(base_dir, path) + Pipeline.set_environment_name(environment) main_content = load_yaml_file(path, substitution=ENV) if not isinstance(main_content, list): msg = f"The pipeline definition {path} should contain a list of components" raise TypeError(msg) env_content = [] - if (env_file := Pipeline.pipeline_filename_environment(path, config)).exists(): + if ( + environment + and ( + env_file := Pipeline.pipeline_filename_environment(path, environment) + ).exists() + ): env_content = load_yaml_file(env_file, substitution=ENV) if not isinstance(env_content, list): msg = f"The pipeline definition {env_file} should contain a list of components" @@ -304,14 +311,14 @@ def validate(self) -> None: self.components.validate_unique_names() @staticmethod - def pipeline_filename_environment(path: Path, config: KpopsConfig) -> Path: + def pipeline_filename_environment(pipeline_path: Path, environment: str) -> Path: """Add the environment name from the KpopsConfig to the pipeline.yaml path. - :param path: Path to pipeline.yaml file + :param pipeline_path: Path to pipeline.yaml file :param config: The KpopsConfig :returns: An absolute path to the pipeline_.yaml """ - return path.with_stem(f"{path.stem}_{config.environment}") + return pipeline_path.with_stem(f"{pipeline_path.stem}_{environment}") @staticmethod def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: @@ -336,3 +343,15 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: ENV["pipeline_name"] = pipeline_name for level, parent in enumerate(path_without_file): ENV[f"pipeline_name_{level}"] = parent + + @staticmethod + def set_environment_name(environment: str | None) -> None: + """Set the environment name. + + It will be used to find environment-specific pipeline definitions, + defaults and configs. + + :param environment: Environment name + """ + if environment is not None: + ENV["environment"] = environment diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 3ac64d82d..8d8a55493 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,3 +1,4 @@ +import logging from pathlib import Path from typing import Any @@ -8,6 +9,7 @@ from pydantic_settings import PydanticBaseSettingsSource from typing_extensions import TypeVar, override +from kpops.utils.dict_ops import update_nested_pair from kpops.utils.docstring import describe_object from kpops.utils.yaml_loading import load_yaml_file @@ -111,7 +113,37 @@ def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: class YamlConfigSettingsSource(PydanticBaseSettingsSource): """Loads variables from a YAML file at the project's root.""" - path_to_config = Path("config.yaml") + log = logging.getLogger() + + config_dir = Path() + config_file_base_name = "config" + environment: str | None = None + + def __init__(self, settings_cls) -> None: + super().__init__(settings_cls) + default_config = self.load_config( + self.config_dir / f"{self.config_file_base_name}.yaml" + ) + env_config = ( + self.load_config( + self.config_dir + / f"{self.config_file_base_name}_{self.environment}.yaml" + ) + if self.environment + else {} + ) + self.config = update_nested_pair(env_config, default_config) + + @staticmethod + def load_config(file: Path) -> dict: + """Load yaml file if it exists. + + :param file: Path to a ``config*.yaml`` + :return: Dict containing the config or empty dict if file doesn't exist + """ + if file.exists() and isinstance((loaded_file := load_yaml_file(file)), dict): + return loaded_file + return {} @override def get_field_value( @@ -119,12 +151,7 @@ def get_field_value( field: FieldInfo, field_name: str, ) -> tuple[Any, str, bool]: - if self.path_to_config.exists() and isinstance( - (file_content_yaml := load_yaml_file(self.path_to_config)), dict - ): - field_value = file_content_yaml.get(field_name) - return field_value, field_name, False - return None, field_name, False + return self.config.get(field_name), field_name, False @override def prepare_field_value( @@ -135,7 +162,6 @@ def prepare_field_value( @override def __call__(self) -> dict[str, Any]: d: dict[str, Any] = {} - for field_name, field in self.settings_cls.model_fields.items(): field_value, field_key, value_is_complex = self.get_field_value( field, @@ -149,5 +175,4 @@ def __call__(self) -> dict[str, Any]: ) if field_value is not None: d[field_key] = field_value - return d diff --git a/pyproject.toml b/pyproject.toml index d4096b732..2f929d944 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -143,7 +143,6 @@ select = [ "ICN", # flake8-import-conventions "INP", # flake8-no-pep420 "PIE", # flake8-pie - "PT", # flake8-pytest-style "Q", # flake8-quotes "RSE", # flake8-raise "RET", # flake8-return diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index 40c496497..c9534d73c 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -18,7 +18,6 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): config = KpopsConfig( defaults_path=Path("fake"), - environment="development", kafka_brokers="broker:9092", ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") @@ -52,7 +51,6 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): config = KpopsConfig( defaults_path=Path("fake"), - environment="development", schema_registry=SchemaRegistryConfig(enabled=True), kafka_brokers="broker:9092", ) diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 717a67e46..14994acb0 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -12,9 +12,7 @@ def test_kpops_config_with_default_values(): - default_config = KpopsConfig( - environment="development", kafka_brokers="http://broker:9092" - ) + default_config = KpopsConfig(kafka_brokers="http://broker:9092") assert default_config.defaults_path == Path() assert default_config.defaults_filename_prefix == "defaults" @@ -42,7 +40,6 @@ def test_kpops_config_with_default_values(): def test_kpops_config_with_different_invalid_urls(): with pytest.raises(ValidationError): KpopsConfig( - environment="development", kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") @@ -51,7 +48,6 @@ def test_kpops_config_with_different_invalid_urls(): with pytest.raises(ValidationError): KpopsConfig( - environment="development", kafka_brokers="http://broker:9092", kafka_rest=KafkaRestConfig( url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") @@ -60,7 +56,6 @@ def test_kpops_config_with_different_invalid_urls(): with pytest.raises(ValidationError): KpopsConfig( - environment="development", kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 6d0aa780c..5ca0da6ee 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -2,7 +2,6 @@ import pytest -from kpops.config import KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.environment import ENV @@ -57,9 +56,7 @@ def test_should_not_set_pipeline_name_with_the_same_base_dir(): def test_pipeline_file_name_environment(): - config = KpopsConfig( - defaults_path=DEFAULTS_PATH, - environment="some_environment", + environment = Pipeline.pipeline_filename_environment( + PIPELINE_PATH, "some_environment" ) - environment = Pipeline.pipeline_filename_environment(PIPELINE_PATH, config) assert environment.name == "pipeline_some_environment.yaml" diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 1c38a8b1a..86eb0690b 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -27,10 +27,7 @@ class TestConnectorApiWrapper: @pytest.fixture(autouse=True) def _setup(self): - config = KpopsConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - ) + config = KpopsConfig(defaults_path=DEFAULTS_PATH) self.connect_wrapper = ConnectWrapper(config.kafka_connect) @pytest.fixture() diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 00718f3bd..6f1f0b623 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -71,7 +71,6 @@ def to_section(topic_config: TopicConfig) -> ToSection: @pytest.fixture() def kpops_config_with_sr_enabled() -> KpopsConfig: return KpopsConfig( - environment="development", kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 3cee5f06b..f46c4b87f 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -32,7 +32,7 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def _setup(self, httpx_mock: HTTPXMock): - config = KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") + config = KpopsConfig(defaults_path=DEFAULTS_PATH) self.proxy_wrapper = ProxyWrapper(config.kafka_rest) with Path( diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 317bb9e8d..fe478e7b0 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -39,10 +39,7 @@ class EnvVarTest(BaseDefaultsComponent): @pytest.fixture() def config() -> KpopsConfig: - return KpopsConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - ) + return KpopsConfig(defaults_path=DEFAULTS_PATH) @pytest.fixture() @@ -117,6 +114,7 @@ def test_load_defaults_with_environment( ) def test_inherit_defaults(self, config: KpopsConfig, handlers: ComponentHandlers): + ENV["environment"] = "development" component = Child(config=config, handlers=handlers) assert ( diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index 946739da0..0b933b1e9 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -29,7 +29,6 @@ class TestHelmApp: def config(self) -> KpopsConfig: return KpopsConfig( defaults_path=DEFAULTS_PATH, - environment="development", helm_diff_config=HelmDiffConfig(), ) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index 31fc10e25..06af5d4f5 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -21,7 +21,6 @@ class TestKafkaApp: def config(self) -> KpopsConfig: return KpopsConfig( defaults_path=DEFAULTS_PATH, - environment="development", helm_diff_config=HelmDiffConfig(), ) diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 20c2cb0b3..8ea178ef2 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -23,7 +23,6 @@ class TestKafkaConnector: def config(self) -> KpopsConfig: return KpopsConfig( defaults_path=DEFAULTS_PATH, - environment="development", topic_name_config=TopicNameConfig( default_error_topic_name="${component_type}-error-topic", default_output_topic_name="${component_type}-output-topic", diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 87f9527ee..95ab11f6c 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -21,7 +21,7 @@ class KubernetesTestValue(KubernetesAppConfig): class TestKubernetesApp: @pytest.fixture() def config(self) -> KpopsConfig: - return KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") + return KpopsConfig(defaults_path=DEFAULTS_PATH) @pytest.fixture() def handlers(self) -> ComponentHandlers: diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 30fb8fb39..89ca25bdd 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -33,7 +33,6 @@ def handlers(self) -> ComponentHandlers: def config(self) -> KpopsConfig: return KpopsConfig( defaults_path=DEFAULTS_PATH, - environment="development", topic_name_config=TopicNameConfig( default_error_topic_name="${component_type}-error-topic", default_output_topic_name="${component_type}-output-topic", diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 9916ebc30..93f6022f2 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -37,7 +37,6 @@ def handlers(self) -> ComponentHandlers: def config(self) -> KpopsConfig: return KpopsConfig( defaults_path=DEFAULTS_PATH, - environment="development", topic_name_config=TopicNameConfig( default_error_topic_name="${component_type}-error-topic", default_output_topic_name="${component_type}-output-topic", diff --git a/tests/conftest.py b/tests/conftest.py index cb88c2294..9da841b07 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,6 +4,8 @@ import pytest +from kpops.utils.yaml_loading import load_yaml_file + @pytest.fixture() def mock_env() -> Iterator[os._Environ[str]]: @@ -14,3 +16,9 @@ def mock_env() -> Iterator[os._Environ[str]]: """ with mock.patch.dict(os.environ, clear=True): yield os.environ + + +@pytest.fixture() +def load_yaml_file_clear_cache() -> Iterator[None]: + yield + load_yaml_file.cache.clear() diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 8a9ca81c3..74910f62d 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -1,4 +1,3 @@ -environment: development defaults_path: ../no-topics-defaults topic_name_config: default_error_topic_name: "${component_name}-dead-letter-topic" diff --git a/tests/pipeline/resources/dotenv/config.yaml b/tests/pipeline/resources/dotenv/config.yaml index 196b583f5..66fb3e410 100644 --- a/tests/pipeline/resources/dotenv/config.yaml +++ b/tests/pipeline/resources/dotenv/config.yaml @@ -1,4 +1,3 @@ -# environment: development defaults_path: ../defaults.yaml topic_name_config: default_error_topic_name: "${component_name}-dead-letter-topic" diff --git a/tests/pipeline/resources/env-specific-config-only/config_production.yaml b/tests/pipeline/resources/env-specific-config-only/config_production.yaml new file mode 100644 index 000000000..74910f62d --- /dev/null +++ b/tests/pipeline/resources/env-specific-config-only/config_production.yaml @@ -0,0 +1,14 @@ +defaults_path: ../no-topics-defaults +topic_name_config: + default_error_topic_name: "${component_name}-dead-letter-topic" + default_output_topic_name: "${component_name}-test-topic" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" +schema_registry: + enabled: true + url: "http://localhost:8081" +helm_config: + api_version: "2.1.1" diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 14c488c5f..151484205 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -1,4 +1,3 @@ -environment: development defaults_path: .. kafka_brokers: "broker:9092" topic_name_config: diff --git a/tests/pipeline/resources/multi-config/config.yaml b/tests/pipeline/resources/multi-config/config.yaml new file mode 100644 index 000000000..74910f62d --- /dev/null +++ b/tests/pipeline/resources/multi-config/config.yaml @@ -0,0 +1,14 @@ +defaults_path: ../no-topics-defaults +topic_name_config: + default_error_topic_name: "${component_name}-dead-letter-topic" + default_output_topic_name: "${component_name}-test-topic" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" +schema_registry: + enabled: true + url: "http://localhost:8081" +helm_config: + api_version: "2.1.1" diff --git a/tests/pipeline/resources/multi-config/config_development.yaml b/tests/pipeline/resources/multi-config/config_development.yaml new file mode 100644 index 000000000..faa196075 --- /dev/null +++ b/tests/pipeline/resources/multi-config/config_development.yaml @@ -0,0 +1,3 @@ +schema_registry: + enabled: true + url: "http://development:8081" diff --git a/tests/pipeline/resources/multi-config/config_production.yaml b/tests/pipeline/resources/multi-config/config_production.yaml new file mode 100644 index 000000000..3d2b6cc98 --- /dev/null +++ b/tests/pipeline/resources/multi-config/config_production.yaml @@ -0,0 +1,3 @@ +schema_registry: + enabled: true + url: "http://production:8081" diff --git a/tests/pipeline/test_example.py b/tests/pipeline/test_example.py index e3b3e5286..0a4c42321 100644 --- a/tests/pipeline/test_example.py +++ b/tests/pipeline/test_example.py @@ -19,7 +19,7 @@ def test_atm_fraud(self, snapshot: SnapshotTest): "--pipeline-base-dir", "examples", "--config", - "./examples/bakdata/atm-fraud-detection/config.yaml", + "./examples/bakdata/atm-fraud-detection", ], catch_exceptions=False, ) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 894d88c8d..4a43ce9d5 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -16,7 +16,7 @@ PIPELINE_BASE_DIR_PATH = RESOURCE_PATH.parent -@pytest.mark.usefixtures("mock_env") +@pytest.mark.usefixtures("mock_env", "load_yaml_file_clear_cache") class TestPipeline: def test_python_api(self): pipeline = kpops.generate( @@ -111,6 +111,8 @@ def test_pipelines_with_env_values(self, snapshot: SnapshotTest): "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), + "--environment", + "development", ], catch_exceptions=False, ) @@ -222,7 +224,7 @@ def test_kafka_connector_config_parsing(self): "--defaults", str(RESOURCE_PATH), "--config", - str(RESOURCE_PATH / "kafka-connect-sink-config/config.yaml"), + str(RESOURCE_PATH / "kafka-connect-sink-config"), ], catch_exceptions=False, ) @@ -322,6 +324,8 @@ def test_with_env_defaults(self, snapshot: SnapshotTest): str(RESOURCE_PATH / "kafka-connect-sink/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "pipeline-with-env-defaults"), + "--environment", + "development", ], catch_exceptions=False, ) @@ -365,7 +369,9 @@ def test_with_custom_config_with_relative_defaults_path( str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", - str(RESOURCE_PATH / "custom-config/config.yaml"), + str(RESOURCE_PATH / "custom-config"), + "--environment", + "development", ], catch_exceptions=False, ) @@ -396,7 +402,7 @@ def test_with_custom_config_with_absolute_defaults_path( config_dict["defaults_path"] = str( (RESOURCE_PATH / "no-topics-defaults").absolute(), ) - temp_config_path = RESOURCE_PATH / "custom-config/temp_config.yaml" + temp_config_path = RESOURCE_PATH / "custom-config/config_custom.yaml" try: with temp_config_path.open("w") as abs_config_yaml: yaml.dump(config_dict, abs_config_yaml) @@ -408,7 +414,9 @@ def test_with_custom_config_with_absolute_defaults_path( str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", - str(temp_config_path), + str(temp_config_path.parent), + "--environment", + "development", ], catch_exceptions=False, ) @@ -440,6 +448,8 @@ def test_default_config(self, snapshot: SnapshotTest): str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), + "--environment", + "development", ], catch_exceptions=False, ) @@ -470,7 +480,9 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", - str(RESOURCE_PATH / "custom-config/config.yaml"), + str(RESOURCE_PATH / "custom-config"), + "--environment", + "development", ], catch_exceptions=False, ) @@ -494,7 +506,9 @@ def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", - str(RESOURCE_PATH / "custom-config/config.yaml"), + str(RESOURCE_PATH / "custom-config"), + "--environment", + "development", ], catch_exceptions=False, ) @@ -505,6 +519,89 @@ def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): == "http://somename:1234/" ) + def test_env_specific_config_env_def_in_env_var( + self, monkeypatch: pytest.MonkeyPatch + ): + monkeypatch.setenv(name="KPOPS_ENVIRONMENT", value="production") + config_path = str(RESOURCE_PATH / "multi-config") + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--config", + config_path, + "--defaults", + str(RESOURCE_PATH), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + == "http://production:8081/" + ) + + @pytest.mark.parametrize( + ("config_dir", "expected_url"), + [ + pytest.param("multi-config", "http://production:8081/", id="multi-config"), + pytest.param( + "env-specific-config-only", + "http://localhost:8081/", + id="env-specific-config-only", + ), + ], + ) + def test_env_specific_config_env_def_in_cli( + self, config_dir: str, expected_url: str + ): + config_path = str(RESOURCE_PATH / config_dir) + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--config", + config_path, + "--defaults", + str(RESOURCE_PATH), + "--environment", + "production", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + == expected_url + ) + + def test_config_dir_doesnt_exist(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--config", + "./non-existent-dir", + "--defaults", + str(RESOURCE_PATH), + "--environment", + "production", + ], + catch_exceptions=False, + ) + assert result.exit_code != 0 + def test_model_serialization(self, snapshot: SnapshotTest): """Test model serialization of component containing pathlib.Path attribute.""" result = runner.invoke( @@ -536,7 +633,7 @@ def test_dotenv_support(self): "--defaults", str(RESOURCE_PATH), "--config", - str(RESOURCE_PATH / "dotenv/config.yaml"), + str(RESOURCE_PATH / "dotenv"), "--dotenv", str(RESOURCE_PATH / "dotenv/.env"), "--dotenv", diff --git a/tests/pipeline/test_template.py b/tests/pipeline/test_template.py index a43fbec5b..40b9dadbf 100644 --- a/tests/pipeline/test_template.py +++ b/tests/pipeline/test_template.py @@ -32,6 +32,8 @@ def test_default_template_config(self, run_command: MagicMock): "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), "--template", + "--environment", + "development", ], catch_exceptions=False, ) @@ -69,8 +71,10 @@ def test_template_config_with_flags(self, run_command: MagicMock): "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), "--config", - str(RESOURCE_PATH / "custom-config/config.yaml"), + str(RESOURCE_PATH / "custom-config"), "--template", + "--environment", + "development", ], catch_exceptions=False, ) From dac1cc9640d8204f725b220cc9a1a0a5b62891d0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 12 Dec 2023 16:41:28 +0200 Subject: [PATCH 13/34] Change substitution variables separator to `.` (#388) Co-authored-by: Salomon Popp --- .../resources/variables/config_env_vars.env | 4 +- .../resources/variables/config_env_vars.md | 4 +- .../variables/variable_substitution.yaml | 16 ++++---- docs/docs/schema/config.json | 8 ++-- .../core-concepts/variables/substitution.md | 6 +-- docs/docs/user/migration-guide/v2-v3.md | 29 ++++++++++++++ .../bakdata/atm-fraud-detection/config.yaml | 4 +- .../bakdata/atm-fraud-detection/defaults.yaml | 2 +- kpops/config.py | 4 +- kpops/pipeline_generator/pipeline.py | 5 ++- kpops/utils/dict_ops.py | 40 +++++++++++++++++++ kpops/utils/yaml_loading.py | 10 ++++- tests/cli/test_kpops_config.py | 4 +- .../infinite_pipeline.yaml | 6 +-- .../component-type-substitution/pipeline.yaml | 16 ++++---- .../resources/custom-config/config.yaml | 4 +- tests/pipeline/resources/defaults.yaml | 4 +- .../kafka-connect-sink-config/config.yaml | 4 +- .../no-topics-defaults/defaults.yaml | 2 +- .../defaults_development.yaml | 2 +- .../defaults.yaml | 2 +- .../pipeline-with-env-defaults/defaults.yaml | 4 +- .../defaults_development.yaml | 2 +- tests/pipeline/test_components/components.py | 6 +-- 24 files changed, 133 insertions(+), 55 deletions(-) diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 27ea591a7..b7e1a2ced 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -18,10 +18,10 @@ KPOPS_KAFKA_BROKERS # No default value, required KPOPS_DEFAULTS_FILENAME_PREFIX=defaults # topic_name_config.default_output_topic_name # Configures the value for the variable ${output_topic_name} -KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component.name} # topic_name_config.default_error_topic_name # Configures the value for the variable ${error_topic_name} -KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component_name}-error +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component.name}-error # schema_registry.enabled # Whether the Schema Registry handler should be initialized. KPOPS_SCHEMA_REGISTRY__ENABLED=False diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 53f540387..f9b9854ac 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -5,8 +5,8 @@ These variables are a lower priority alternative to the settings in `config.yaml |KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`|defaults_path | |KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | |KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component_name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component_name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component.name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | |KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | |KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | |KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | diff --git a/docs/docs/resources/variables/variable_substitution.yaml b/docs/docs/resources/variables/variable_substitution.yaml index 16e042586..8a4cf60ea 100644 --- a/docs/docs/resources/variables/variable_substitution.yaml +++ b/docs/docs/resources/variables/variable_substitution.yaml @@ -1,9 +1,9 @@ - type: scheduled-producer app: labels: - app_type: "${component_type}" - app_name: "${component_name}" - app_schedule: "${component_app_schedule}" + app_type: "${component.type}" + app_name: "${component.name}" + app_schedule: "${component.app.schedule}" commandLine: FAKE_ARG: "fake-arg-value" schedule: "30 3/8 * * *" @@ -20,11 +20,11 @@ name: "filter-app" app: labels: - app_type: "${component_type}" - app_name: "${component_name}" - app_resources_requests_memory: "${component_app_resources_requests_memory}" - ${component_type}: "${component_app_labels_app_name}-${component_app_labels_app_type}" - test_placeholder_in_placeholder: "${component_app_labels_${component_type}}" + app_type: "${component.type}" + app_name: "${component.name}" + app_resources_requests_memory: "${component.app.resources.requests.memory}" + ${component.type}: "${component.app.labels.app_name}-${component.app.labels.app_type}" + test_placeholder_in_placeholder: "${component.app.labels.${component.type}}" commandLine: TYPE: "nothing" resources: diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 524049a09..7708ed04c 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -119,13 +119,13 @@ "description": "Configure the topic name variables you can use in the pipeline definition.", "properties": { "default_error_topic_name": { - "default": "${pipeline_name}-${component_name}-error", + "default": "${pipeline_name}-${component.name}-error", "description": "Configures the value for the variable ${error_topic_name}", "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { - "default": "${pipeline_name}-${component_name}", + "default": "${pipeline_name}-${component.name}", "description": "Configures the value for the variable ${output_topic_name}", "title": "Default Output Topic Name", "type": "string" @@ -246,8 +246,8 @@ } ], "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" + "default_error_topic_name": "${pipeline_name}-${component.name}-error", + "default_output_topic_name": "${pipeline_name}-${component.name}" }, "description": "Configure the topic name variables you can use in the pipeline definition." } diff --git a/docs/docs/user/core-concepts/variables/substitution.md b/docs/docs/user/core-concepts/variables/substitution.md index 4ef9b1b25..71782180d 100644 --- a/docs/docs/user/core-concepts/variables/substitution.md +++ b/docs/docs/user/core-concepts/variables/substitution.md @@ -6,7 +6,7 @@ KPOps supports the usage of placeholders and environment variables in [pipeline These variables can be used in a component's definition to refer to any of its attributes, including ones that the user has defined in the defaults. -All of them are prefixed with `component_` and follow the following form: `component_{attribute_name}`. If the attribute itself contains attributes, they can be referred to like this: `component_{attribute_name}_{subattribute_name}`. +All of them are prefixed with `component.` and follow the following form: `component.{attribute_name}`. If the attribute itself contains attributes, they can be referred to like this: `component.{attribute_name}.{subattribute_name}`. @@ -26,8 +26,8 @@ These variables include all fields in the [config](../config.md) and refer to th !!! info Aliases - `error_topic_name` is an alias for `topic_name_config_default_error_topic_name` - `output_topic_name` is an alias for `topic_name_config_default_output_topic_name` + `error_topic_name` is an alias for `topic_name_config.default_error_topic_name` + `output_topic_name` is an alias for `topic_name_config.default_output_topic_name` diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 6f5c5a1bf..d44c49503 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -81,3 +81,32 @@ The `--config` flag in the CLI now points to the directory that contains `config enabled: true url: "http://my-custom-sr.url:8081" ``` + +## [Change substitution variables separator to `.`](https://github.com/bakdata/kpops/pull/388) + +The delimiter in the substitution variables is changed to `.`. + +#### pipeline.yaml and default.yaml + +```diff +steps: + - type: scheduled-producer + app: + labels: +- app_type: "${component_type}" +- app_name: "${component_name}" +- app_schedule: "${component_app_schedule}" ++ app_type: "${component.type}" ++ app_name: "${component.name}" ++ app_schedule: "${component.app.schedule}" +``` + +#### config.yaml + +```diff +topic_name_config: +- default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" +- default_output_topic_name: "${pipeline_name}-${component_name}-topic" ++ default_error_topic_name: "${pipeline_name}-${component.name}-dead-letter-topic" ++ default_output_topic_name: "${pipeline_name}-${component.name}-topic" +``` diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index 2f158fd62..41740ae77 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -1,6 +1,6 @@ topic_name_config: - default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" - default_output_topic_name: "${pipeline_name}-${component_name}-topic" + default_error_topic_name: "${pipeline_name}-${component.name}-dead-letter-topic" + default_output_topic_name: "${pipeline_name}-${component.name}-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml index e3ba49c67..2e9079f4f 100644 --- a/examples/bakdata/atm-fraud-detection/defaults.yaml +++ b/examples/bakdata/atm-fraud-detection/defaults.yaml @@ -11,7 +11,7 @@ kafka-app: app: streams: brokers: ${kafka_brokers} - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} optimizeLeaveGroupBehavior: false producer-app: diff --git a/kpops/config.py b/kpops/config.py index 6e3188359..172ff4305 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -21,11 +21,11 @@ class TopicNameConfig(BaseSettings): """Configure the topic name variables you can use in the pipeline definition.""" default_output_topic_name: str = Field( - default="${pipeline_name}-${component_name}", + default="${pipeline_name}-${component.name}", description="Configures the value for the variable ${output_topic_name}", ) default_error_topic_name: str = Field( - default="${pipeline_name}-${component_name}-error", + default="${pipeline_name}-${component.name}-error", description="Configures the value for the variable ${error_topic_name}", ) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index b0bcf2676..07ea94e1e 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -295,9 +295,12 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: component_as_dict, "component", substitution_hardcoded, + separator=".", ) substitution = generate_substitution( - config.model_dump(mode="json"), existing_substitution=component_substitution + config.model_dump(mode="json"), + existing_substitution=component_substitution, + separator=".", ) return json.loads( diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index c53cc383d..fa8aecd7b 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -1,6 +1,11 @@ +import re +from collections import ChainMap as _ChainMap from collections.abc import Mapping +from string import Template from typing import Any, TypeVar +from typing_extensions import override + def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: """Nested update for 2 dictionaries. @@ -99,3 +104,38 @@ def generate_substitution( return update_nested( existing_substitution or {}, flatten_mapping(input, prefix, separator) ) + + +_sentinel_dict = {} + + +class ImprovedTemplate(Template): + """Introduces the dot as an allowed character in placeholders.""" + + idpattern = r"(?a:[_a-z][_.a-z0-9]*)" + + @override + def safe_substitute(self, mapping=_sentinel_dict, /, **kws) -> str: + if mapping is _sentinel_dict: + mapping = kws + elif kws: + mapping = _ChainMap(kws, mapping) + + # Helper function for .sub() + def convert(mo: re.Match): + named = mo.group("named") or mo.group("braced") + if named is not None: + try: + if "." not in named: + return str(mapping[named]) + return str(mapping[named.replace(".", "__")]) + except KeyError: + return mo.group() + if mo.group("escaped") is not None: + return self.delimiter + if mo.group("invalid") is not None: + return mo.group() + msg = "Unrecognized named group in pattern" + raise ValueError(msg, self.pattern) + + return self.pattern.sub(convert, self.template) diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index fb810c193..668a609cc 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -1,12 +1,13 @@ from collections.abc import Mapping from pathlib import Path -from string import Template from typing import Any import yaml from cachetools import cached from cachetools.keys import hashkey +from kpops.utils.dict_ops import ImprovedTemplate + def generate_hashkey( file_path: Path, substitution: Mapping[str, Any] | None = None @@ -33,7 +34,12 @@ def substitute(input: str, substitution: Mapping[str, Any] | None = None) -> str """ if not substitution: return input - return Template(input).safe_substitute(**substitution) + + def prepare_substitution(substitution: Mapping[str, Any]) -> dict[str, Any]: + """Replace dots with underscores in the substitution keys.""" + return {k.replace(".", "__"): v for k, v in substitution.items()} + + return ImprovedTemplate(input).safe_substitute(**prepare_substitution(substitution)) def substitute_nested(input: str, **kwargs) -> str: diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 14994acb0..e52b2345a 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -18,11 +18,11 @@ def test_kpops_config_with_default_values(): assert default_config.defaults_filename_prefix == "defaults" assert ( default_config.topic_name_config.default_output_topic_name - == "${pipeline_name}-${component_name}" + == "${pipeline_name}-${component.name}" ) assert ( default_config.topic_name_config.default_error_topic_name - == "${pipeline_name}-${component_name}-error" + == "${pipeline_name}-${component.name}-error" ) assert default_config.schema_registry.enabled is False assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081") diff --git a/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml b/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml index e01434ceb..17eba50a2 100644 --- a/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml +++ b/tests/pipeline/resources/component-type-substitution/infinite_pipeline.yaml @@ -1,6 +1,6 @@ - type: converter app: labels: - l_1: ${component_app_labels_l_2} - l_2: ${component_app_labels_l_1} - infinite_nesting: ${component_app_labels} + l_1: ${component.app.labels.l_2} + l_2: ${component.app.labels.l_1} + infinite_nesting: ${component.app.labels} diff --git a/tests/pipeline/resources/component-type-substitution/pipeline.yaml b/tests/pipeline/resources/component-type-substitution/pipeline.yaml index 16e042586..8a4cf60ea 100644 --- a/tests/pipeline/resources/component-type-substitution/pipeline.yaml +++ b/tests/pipeline/resources/component-type-substitution/pipeline.yaml @@ -1,9 +1,9 @@ - type: scheduled-producer app: labels: - app_type: "${component_type}" - app_name: "${component_name}" - app_schedule: "${component_app_schedule}" + app_type: "${component.type}" + app_name: "${component.name}" + app_schedule: "${component.app.schedule}" commandLine: FAKE_ARG: "fake-arg-value" schedule: "30 3/8 * * *" @@ -20,11 +20,11 @@ name: "filter-app" app: labels: - app_type: "${component_type}" - app_name: "${component_name}" - app_resources_requests_memory: "${component_app_resources_requests_memory}" - ${component_type}: "${component_app_labels_app_name}-${component_app_labels_app_type}" - test_placeholder_in_placeholder: "${component_app_labels_${component_type}}" + app_type: "${component.type}" + app_name: "${component.name}" + app_resources_requests_memory: "${component.app.resources.requests.memory}" + ${component.type}: "${component.app.labels.app_name}-${component.app.labels.app_type}" + test_placeholder_in_placeholder: "${component.app.labels.${component.type}}" commandLine: TYPE: "nothing" resources: diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 74910f62d..60410489d 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -1,7 +1,7 @@ defaults_path: ../no-topics-defaults topic_name_config: - default_error_topic_name: "${component_name}-dead-letter-topic" - default_output_topic_name: "${component_name}-test-topic" + default_error_topic_name: "${component.name}-dead-letter-topic" + default_output_topic_name: "${component.name}-test-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" kafka_connect: url: "http://localhost:8083" diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index e1223203b..b78293627 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -1,12 +1,12 @@ kubernetes-app: - name: "${component_type}" + name: "${component.type}" namespace: example-namespace kafka-app: app: streams: brokers: "${kafka_brokers}" - schema_registry_url: "${schema_registry_url}" + schema_registry_url: "${schema_registry.url}" version: "2.4.2" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 151484205..572b695c7 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -1,8 +1,8 @@ defaults_path: .. kafka_brokers: "broker:9092" topic_name_config: - default_error_topic_name: ${component_type}-error-topic - default_output_topic_name: ${component_type}-output-topic + default_error_topic_name: ${component.type}-error-topic + default_output_topic_name: ${component.type}-output-topic helm_diff_config: enable: false kafka_connect: diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml index 87d21d47d..ea3dd7d9e 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml @@ -2,7 +2,7 @@ kafka-app: app: streams: brokers: "${kafka_brokers}" - schemaRegistryUrl: "${schema_registry_url}" + schemaRegistryUrl: "${schema_registry.url}" producer-app: to: diff --git a/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml b/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml index 035691c2e..b6a05220f 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults_development.yaml @@ -1,3 +1,3 @@ kubernetes-app: - name: "${component_type}-development" + name: "${component.type}-development" namespace: development-namespace diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml index c67f869d9..b5954da19 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml @@ -8,4 +8,4 @@ kafka-app: app: streams: brokers: ${kafka_brokers} - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index 44229f00e..f9505c0ab 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -1,11 +1,11 @@ kubernetes-app: - name: ${component_type} + name: ${component.type} namespace: example-namespace kafka-app: app: streams: brokers: "${kafka_brokers}" - schemaRegistryUrl: "${schema_registry_url}" + schemaRegistryUrl: "${schema_registry.url}" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml index c7b863a92..80987e36e 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults_development.yaml @@ -1,3 +1,3 @@ kubernetes-app: - name: ${component_type}-development + name: ${component.type}-development namespace: development-namespace diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 84698c0b4..d45882ea1 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -51,10 +51,10 @@ def inflate(self) -> list[PipelineComponent]: }, to=ToSection( topics={ - TopicName("${component_type}"): TopicConfig( + TopicName("${component.type}"): TopicConfig( type=OutputTopicTypes.OUTPUT ), - TopicName("${component_name}"): TopicConfig( + TopicName("${component.name}"): TopicConfig( type=None, role="test" ), } @@ -68,7 +68,7 @@ def inflate(self) -> list[PipelineComponent]: to=ToSection( # type: ignore[reportGeneralTypeIssues] topics={ TopicName( - f"{self.full_name}-" + "${component_name}" + f"{self.full_name}-" + "${component.name}" ): TopicConfig(type=OutputTopicTypes.OUTPUT) } ).model_dump(), From f95afe23e09704f9bbfa5d8c3c59d472b2eb1533 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 20 Dec 2023 15:55:33 +0100 Subject: [PATCH 14/34] Refactor pipeline generator & representation (#392) Fixes #381 --- hooks/gen_docs/gen_docs_components.py | 2 +- kpops/cli/main.py | 10 +- .../base_defaults_component.py | 2 +- kpops/{pipeline_generator => }/pipeline.py | 140 +- kpops/pipeline_generator/__init__.py | 0 kpops/utils/pydantic.py | 2 +- kpops/utils/{yaml_loading.py => yaml.py} | 19 + tests/cli/test_pipeline_steps.py | 19 +- tests/compiler/test_pipeline_name.py | 18 +- tests/compiler/test_yaml_loading.py | 2 +- .../test_base_defaults_component.py | 4 +- tests/conftest.py | 2 +- tests/pipeline/snapshots/snap_test_example.py | 612 ++- .../pipeline/snapshots/snap_test_pipeline.py | 4338 ++++++++--------- tests/pipeline/test_pipeline.py | 61 +- 15 files changed, 2593 insertions(+), 2638 deletions(-) rename kpops/{pipeline_generator => }/pipeline.py (78%) delete mode 100644 kpops/pipeline_generator/__init__.py rename kpops/utils/{yaml_loading.py => yaml.py} (83%) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 203294c05..f1acf9973 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -11,7 +11,7 @@ from kpops.cli.registry import _find_classes from kpops.components import KafkaConnector, PipelineComponent from kpops.utils.colorify import redify, yellowify -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.yaml import load_yaml_file PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py" PATH_CLI_COMMANDS_DOC = ROOT / "docs/docs/user/references/cli-commands.md" diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 285c1154f..5a7c758e4 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -19,9 +19,10 @@ from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper from kpops.config import ENV_PREFIX, KpopsConfig -from kpops.pipeline_generator.pipeline import Pipeline +from kpops.pipeline import Pipeline, PipelineGenerator from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema from kpops.utils.pydantic import YamlConfigSettingsSource +from kpops.utils.yaml import print_yaml if TYPE_CHECKING: from collections.abc import Iterator @@ -144,9 +145,8 @@ def setup_pipeline( registry.find_components("kpops.components") handlers = setup_handlers(components_module, kpops_config) - return Pipeline.load_from_yaml( - pipeline_base_dir, pipeline_path, environment, registry, kpops_config, handlers - ) + parser = PipelineGenerator(kpops_config, registry, handlers) + return parser.load_yaml(pipeline_base_dir, pipeline_path, environment) def setup_handlers( @@ -288,7 +288,7 @@ def generate( ) if not template: - pipeline.print_yaml() + print_yaml(pipeline.to_yaml()) if template: steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 7ef978705..fff9135da 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -18,7 +18,7 @@ from kpops.utils.docstring import describe_attr from kpops.utils.environment import ENV from kpops.utils.pydantic import DescConfigModel, to_dash -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.yaml import load_yaml_file try: from typing import Self diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline.py similarity index 78% rename from kpops/pipeline_generator/pipeline.py rename to kpops/pipeline.py index 07ea94e1e..de37576d0 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline.py @@ -4,17 +4,16 @@ import logging from collections import Counter from contextlib import suppress +from dataclasses import dataclass, field from typing import TYPE_CHECKING import yaml -from pydantic import BaseModel, SerializeAsAny -from rich.console import Console -from rich.syntax import Syntax +from pydantic import Field, RootModel, SerializeAsAny from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV -from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested +from kpops.utils.yaml import load_yaml_file, substitute_nested if TYPE_CHECKING: from collections.abc import Iterator @@ -35,17 +34,19 @@ class ValidationError(Exception): pass -class PipelineComponents(BaseModel): - """Stores the pipeline components.""" +class Pipeline(RootModel): + """Pipeline representation.""" - components: list[SerializeAsAny[PipelineComponent]] = [] + root: list[SerializeAsAny[PipelineComponent]] = Field( + default=[], title="Components" + ) @property def last(self) -> PipelineComponent: - return self.components[-1] + return self.root[-1] def find(self, component_name: str) -> PipelineComponent: - for component in self.components: + for component in self.root: if component_name == component.name: return component msg = f"Component {component_name} not found" @@ -53,19 +54,25 @@ def find(self, component_name: str) -> PipelineComponent: def add(self, component: PipelineComponent) -> None: self._populate_component_name(component) - self.components.append(component) + self.root.append(component) def __bool__(self) -> bool: - return bool(self.components) + return bool(self.root) def __iter__(self) -> Iterator[PipelineComponent]: - return iter(self.components) + return iter(self.root) def __len__(self) -> int: - return len(self.components) + return len(self.root) + + def to_yaml(self) -> str: + return yaml.dump(self.model_dump(mode="json", by_alias=True, exclude_none=True)) + + def validate(self) -> None: + self.validate_unique_names() def validate_unique_names(self) -> None: - step_names = [component.full_name for component in self.components] + step_names = [component.full_name for component in self.root] duplicates = [name for name, count in Counter(step_names).items() if count > 1] if duplicates: msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}" @@ -97,32 +104,31 @@ def create_env_components_index( return index -class Pipeline: - def __init__( +@dataclass +class PipelineGenerator: + config: KpopsConfig + registry: Registry + handlers: ComponentHandlers + pipeline: Pipeline = field(init=False, default_factory=Pipeline) + + def parse( self, - component_list: list[dict], + components: list[dict], environment_components: list[dict], - registry: Registry, - config: KpopsConfig, - handlers: ComponentHandlers, - ) -> None: - self.components: PipelineComponents = PipelineComponents() - self.handlers = handlers - self.config = config - self.registry = registry + ) -> Pipeline: + """Parse pipeline from sequence of component dictionaries. + + :param components: List of components + :param environment_components: List of environment-specific components + :returns: Initialized pipeline object + """ self.env_components_index = create_env_components_index(environment_components) - self.parse_components(component_list) - self.validate() - - @classmethod - def load_from_yaml( - cls, - base_dir: Path, - path: Path, - environment: str | None, - registry: Registry, - config: KpopsConfig, - handlers: ComponentHandlers, + self.parse_components(components) + self.pipeline.validate() + return self.pipeline + + def load_yaml( + self, base_dir: Path, path: Path, environment: str | None ) -> Pipeline: """Load pipeline definition from yaml. @@ -130,15 +136,12 @@ def load_from_yaml( :param base_dir: Base directory to the pipelines (default is current working directory) :param path: Path to pipeline definition yaml file - :param registry: Pipeline components registry - :param config: Pipeline config - :param handlers: Component handlers :raises TypeError: The pipeline definition should contain a list of components :raises TypeError: The env-specific pipeline definition should contain a list of components :returns: Initialized pipeline object """ - Pipeline.set_pipeline_name_env_vars(base_dir, path) - Pipeline.set_environment_name(environment) + PipelineGenerator.set_pipeline_name_env_vars(base_dir, path) + PipelineGenerator.set_environment_name(environment) main_content = load_yaml_file(path, substitution=ENV) if not isinstance(main_content, list): @@ -148,7 +151,9 @@ def load_from_yaml( if ( environment and ( - env_file := Pipeline.pipeline_filename_environment(path, environment) + env_file := PipelineGenerator.pipeline_filename_environment( + path, environment + ) ).exists() ): env_content = load_yaml_file(env_file, substitution=ENV) @@ -156,17 +161,17 @@ def load_from_yaml( msg = f"The pipeline definition {env_file} should contain a list of components" raise TypeError(msg) - return cls(main_content, env_content, registry, config, handlers) + return self.parse(main_content, env_content) - def parse_components(self, component_list: list[dict]) -> None: + def parse_components(self, components: list[dict]) -> None: """Instantiate, enrich and inflate a list of components. - :param component_list: List of components + :param components: List of components :raises ValueError: Every component must have a type defined :raises ParsingException: Error enriching component :raises ParsingException: All undefined exceptions """ - for component_data in component_list: + for component_data in components: try: try: component_type: str = component_data["type"] @@ -208,21 +213,21 @@ def apply_component( original_from_component_name, from_topic, ) in enriched_component.from_.components.items(): - original_from_component = self.components.find( + original_from_component = self.pipeline.find( original_from_component_name ) inflated_from_component = original_from_component.inflate()[-1] - resolved_from_component = self.components.find( + resolved_from_component = self.pipeline.find( inflated_from_component.name ) enriched_component.weave_from_topics( resolved_from_component.to, from_topic ) - elif self.components: + elif self.pipeline: # read from previous component - prev_component = self.components.last + prev_component = self.pipeline.last enriched_component.weave_from_topics(prev_component.to) - self.components.add(enriched_component) + self.pipeline.add(enriched_component) def enrich_component( self, @@ -251,32 +256,6 @@ def enrich_component( **component_data, ) - def print_yaml(self, substitution: dict | None = None) -> None: - """Print the generated pipeline definition. - - :param substitution: Substitution dictionary, defaults to None - """ - syntax = Syntax( - substitute(str(self), substitution), - "yaml", - background_color="default", - theme="ansi_dark", - ) - Console( - width=1000 # HACK: overwrite console width to avoid truncating output - ).print(syntax) - - def __iter__(self) -> Iterator[PipelineComponent]: - return iter(self.components) - - def __str__(self) -> str: - return yaml.dump( - self.components.model_dump(mode="json", by_alias=True, exclude_none=True) - ) - - def __len__(self) -> int: - return len(self.components) - def substitute_in_component(self, component_as_dict: dict) -> dict: """Substitute all $-placeholders in a component in dict representation. @@ -310,15 +289,12 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: ) ) - def validate(self) -> None: - self.components.validate_unique_names() - @staticmethod def pipeline_filename_environment(pipeline_path: Path, environment: str) -> Path: """Add the environment name from the KpopsConfig to the pipeline.yaml path. :param pipeline_path: Path to pipeline.yaml file - :param config: The KpopsConfig + :param environment: Environment name :returns: An absolute path to the pipeline_.yaml """ return pipeline_path.with_stem(f"{pipeline_path.stem}_{environment}") diff --git a/kpops/pipeline_generator/__init__.py b/kpops/pipeline_generator/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 8d8a55493..3b643af51 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -11,7 +11,7 @@ from kpops.utils.dict_ops import update_nested_pair from kpops.utils.docstring import describe_object -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.yaml import load_yaml_file def to_camel(s: str) -> str: diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml.py similarity index 83% rename from kpops/utils/yaml_loading.py rename to kpops/utils/yaml.py index 668a609cc..2f0e16608 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml.py @@ -5,6 +5,8 @@ import yaml from cachetools import cached from cachetools.keys import hashkey +from rich.console import Console +from rich.syntax import Syntax from kpops.utils.dict_ops import ImprovedTemplate @@ -79,3 +81,20 @@ def substitute_nested(input: str, **kwargs) -> str: msg = "An infinite loop condition detected. Check substitution variables." raise ValueError(msg) return old_str + + +def print_yaml(input: str, *, substitution: dict | None = None) -> None: + """Print YAML to console with syntax highlighting. + + :param s: YAML content + :param substitution: Substitution dictionary, defaults to None + """ + syntax = Syntax( + substitute(input, substitution), + "yaml", + background_color="default", + theme="ansi_dark", + ) + Console( + width=1000 # HACK: overwrite console width to avoid truncating output + ).print(syntax) diff --git a/tests/cli/test_pipeline_steps.py b/tests/cli/test_pipeline_steps.py index a09d7b064..f9a345ae7 100644 --- a/tests/cli/test_pipeline_steps.py +++ b/tests/cli/test_pipeline_steps.py @@ -6,7 +6,8 @@ from pytest_mock import MockerFixture from kpops.cli.main import FilterType, get_steps_to_apply -from kpops.pipeline_generator.pipeline import Pipeline +from kpops.components import PipelineComponent +from kpops.pipeline import Pipeline PREFIX = "example-prefix-" @@ -25,17 +26,11 @@ class TestComponent: @pytest.fixture(autouse=True) def pipeline() -> Pipeline: - class TestPipeline: - components = [ - test_component_1, - test_component_2, - test_component_3, - ] - - def __iter__(self): - return iter(self.components) - - return cast(Pipeline, TestPipeline()) + pipeline = Pipeline() + pipeline.add(cast(PipelineComponent, test_component_1)) + pipeline.add(cast(PipelineComponent, test_component_2)) + pipeline.add(cast(PipelineComponent, test_component_3)) + return pipeline @pytest.fixture(autouse=True) diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 5ca0da6ee..cca9fe88c 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -2,7 +2,7 @@ import pytest -from kpops.pipeline_generator.pipeline import Pipeline +from kpops.pipeline import PipelineGenerator from kpops.utils.environment import ENV DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -11,7 +11,7 @@ def test_should_set_pipeline_name_with_default_base_dir(): - Pipeline.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH) + PipelineGenerator.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH) assert ENV["pipeline_name"] == "some-random-path-for-testing" assert ENV["pipeline_name_0"] == "some" @@ -22,7 +22,9 @@ def test_should_set_pipeline_name_with_default_base_dir(): def test_should_set_pipeline_name_with_specific_relative_base_dir(): - Pipeline.set_pipeline_name_env_vars(Path("./some/random/path"), PIPELINE_PATH) + PipelineGenerator.set_pipeline_name_env_vars( + Path("./some/random/path"), PIPELINE_PATH + ) assert ENV["pipeline_name"] == "for-testing" assert ENV["pipeline_name_0"] == "for" @@ -30,7 +32,9 @@ def test_should_set_pipeline_name_with_specific_relative_base_dir(): def test_should_set_pipeline_name_with_specific_absolute_base_dir(): - Pipeline.set_pipeline_name_env_vars(Path("some/random/path"), PIPELINE_PATH) + PipelineGenerator.set_pipeline_name_env_vars( + Path("some/random/path"), PIPELINE_PATH + ) assert ENV["pipeline_name"] == "for-testing" assert ENV["pipeline_name_0"] == "for" @@ -38,7 +42,7 @@ def test_should_set_pipeline_name_with_specific_absolute_base_dir(): def test_should_set_pipeline_name_with_absolute_base_dir(): - Pipeline.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH) + PipelineGenerator.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH) assert ENV["pipeline_name"] == "some-random-path-for-testing" assert ENV["pipeline_name_0"] == "some" @@ -52,11 +56,11 @@ def test_should_not_set_pipeline_name_with_the_same_base_dir(): with pytest.raises( ValueError, match="The pipeline-base-dir should not equal the pipeline-path" ): - Pipeline.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH) + PipelineGenerator.set_pipeline_name_env_vars(PIPELINE_PATH, PIPELINE_PATH) def test_pipeline_file_name_environment(): - environment = Pipeline.pipeline_filename_environment( + environment = PipelineGenerator.pipeline_filename_environment( PIPELINE_PATH, "some_environment" ) assert environment.name == "pipeline_some_environment.yaml" diff --git a/tests/compiler/test_yaml_loading.py b/tests/compiler/test_yaml_loading.py index c4bdcc3cf..47db51bfd 100644 --- a/tests/compiler/test_yaml_loading.py +++ b/tests/compiler/test_yaml_loading.py @@ -4,7 +4,7 @@ import pytest import yaml -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.yaml import load_yaml_file RESOURCE_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index fe478e7b0..e449da9a5 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -137,11 +137,11 @@ def test_inherit(self, config: KpopsConfig, handlers: ComponentHandlers): component = Child( config=config, handlers=handlers, - name="name-defined-in-pipeline_generator", + name="name-defined-in-pipeline_parser", ) assert ( - component.name == "name-defined-in-pipeline_generator" + component.name == "name-defined-in-pipeline_parser" ), "Kwargs should should overwrite all other values" assert component.nice == { "fake-value": "fake" diff --git a/tests/conftest.py b/tests/conftest.py index 9da841b07..479672e86 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,7 +4,7 @@ import pytest -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.yaml import load_yaml_file @pytest.fixture() diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 406679c8b..14d3d650c 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -7,348 +7,346 @@ snapshots = Snapshot() -snapshots['TestExample.test_atm_fraud atm-fraud-pipeline'] = { - 'components': [ - { - 'app': { - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', - 'imageTag': '1.0.0', - 'nameOverride': 'account-producer', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'schedule': '0 12 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' - }, - 'suspend': True +snapshots['TestExample.test_atm_fraud atm-fraud-pipeline'] = [ + { + 'app': { + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', + 'imageTag': '1.0.0', + 'nameOverride': 'account-producer', + 'prometheus': { + 'jmx': { + 'enabled': False + } }, - 'name': 'account-producer', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, - 'to': { - 'models': { - }, - 'topics': { - 'bakdata-atm-fraud-detection-account-producer-topic': { - 'configs': { - }, - 'partitions_count': 3 - } - } + 'suspend': True + }, + 'name': 'account-producer', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - 'type': 'producer-app', - 'version': '2.9.0' + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' }, - { - 'app': { - 'commandLine': { - 'ITERATION': 20, - 'REAL_TX': 19 - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer', - 'imageTag': '1.0.0', - 'nameOverride': 'transaction-avro-producer', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'schedule': '0 12 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { + 'to': { + 'models': { + }, + 'topics': { + 'bakdata-atm-fraud-detection-account-producer-topic': { + 'configs': { }, - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' - }, - 'suspend': True + 'partitions_count': 3 + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'commandLine': { + 'ITERATION': 20, + 'REAL_TX': 19 }, - 'name': 'transaction-avro-producer', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer', + 'imageTag': '1.0.0', + 'nameOverride': 'transaction-avro-producer', + 'prometheus': { + 'jmx': { + 'enabled': False + } }, - 'to': { - 'models': { + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { }, - 'topics': { - 'bakdata-atm-fraud-detection-transaction-avro-producer-topic': { - 'configs': { - }, - 'partitions_count': 3 - } - } + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, - 'type': 'producer-app', - 'version': '2.9.0' + 'suspend': True }, - { - 'app': { - 'annotations': { - 'consumerGroup': 'atm-transactionjoiner-atm-fraud-joinedtransactions-topic' - }, - 'commandLine': { - 'PRODUCTIVE': False - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-transactionjoiner', - 'imageTag': '1.0.0', - 'labels': { - 'pipeline': 'bakdata-atm-fraud-detection' - }, - 'nameOverride': 'transaction-joiner', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', - 'inputTopics': [ - 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' - ], - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' - } + 'name': 'transaction-avro-producer', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - 'name': 'transaction-joiner', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'to': { - 'models': { - }, - 'topics': { - 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' + 'topics': { + 'bakdata-atm-fraud-detection-transaction-avro-producer-topic': { + 'configs': { }, - 'bakdata-atm-fraud-detection-transaction-joiner-topic': { - 'configs': { - }, - 'partitions_count': 3 - } + 'partitions_count': 3 } - }, - 'type': 'streams-app', - 'version': '2.9.0' + } }, - { - 'app': { - 'annotations': { - 'consumerGroup': 'atm-frauddetector-atm-fraud-possiblefraudtransactions-topic' - }, - 'commandLine': { - 'PRODUCTIVE': False - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-frauddetector', - 'imageTag': '1.0.0', - 'labels': { - 'pipeline': 'bakdata-atm-fraud-detection' - }, - 'nameOverride': 'fraud-detector', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', - 'inputTopics': [ - 'bakdata-atm-fraud-detection-transaction-joiner-topic' - ], - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'annotations': { + 'consumerGroup': 'atm-transactionjoiner-atm-fraud-joinedtransactions-topic' + }, + 'commandLine': { + 'PRODUCTIVE': False + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-transactionjoiner', + 'imageTag': '1.0.0', + 'labels': { + 'pipeline': 'bakdata-atm-fraud-detection' + }, + 'nameOverride': 'transaction-joiner', + 'prometheus': { + 'jmx': { + 'enabled': False } }, - 'name': 'fraud-detector', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' + 'replicaCount': 1, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', + 'inputTopics': [ + 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' + ], + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' + } + }, + 'name': 'transaction-joiner', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - 'to': { - 'models': { + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' }, - 'topics': { - 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' + 'bakdata-atm-fraud-detection-transaction-joiner-topic': { + 'configs': { }, - 'bakdata-atm-fraud-detection-fraud-detector-topic': { - 'configs': { - }, - 'partitions_count': 3 - } + 'partitions_count': 3 } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + }, + { + 'app': { + 'annotations': { + 'consumerGroup': 'atm-frauddetector-atm-fraud-possiblefraudtransactions-topic' + }, + 'commandLine': { + 'PRODUCTIVE': False + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-frauddetector', + 'imageTag': '1.0.0', + 'labels': { + 'pipeline': 'bakdata-atm-fraud-detection' }, - 'type': 'streams-app', - 'version': '2.9.0' + 'nameOverride': 'fraud-detector', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', + 'inputTopics': [ + 'bakdata-atm-fraud-detection-transaction-joiner-topic' + ], + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' + } }, - { - 'app': { - 'annotations': { - 'consumerGroup': 'atm-accountlinker-atm-fraud-output-topic' - }, - 'commandLine': { - 'PRODUCTIVE': False - }, - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-accountlinker', - 'imageTag': '1.0.0', - 'labels': { - 'pipeline': 'bakdata-atm-fraud-detection' - }, - 'nameOverride': 'account-linker', - 'prometheus': { - 'jmx': { - 'enabled': False - } + 'name': 'fraud-detector', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' }, - 'replicaCount': 1, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', - 'extraInputTopics': { - 'accounts': [ - 'bakdata-atm-fraud-detection-account-producer-topic' - ] + 'bakdata-atm-fraud-detection-fraud-detector-topic': { + 'configs': { }, - 'inputTopics': [ - 'bakdata-atm-fraud-detection-fraud-detector-topic' - ], - 'optimizeLeaveGroupBehavior': False, - 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' + 'partitions_count': 3 } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + }, + { + 'app': { + 'annotations': { + 'consumerGroup': 'atm-accountlinker-atm-fraud-output-topic' }, - 'from': { - 'components': { - 'account-producer': { - 'role': 'accounts' - }, - 'fraud-detector': { - 'type': 'input' - } - }, - 'topics': { + 'commandLine': { + 'PRODUCTIVE': False + }, + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountlinker', + 'imageTag': '1.0.0', + 'labels': { + 'pipeline': 'bakdata-atm-fraud-detection' + }, + 'nameOverride': 'account-linker', + 'prometheus': { + 'jmx': { + 'enabled': False } }, - 'name': 'account-linker', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False + 'replicaCount': 1, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', + 'extraInputTopics': { + 'accounts': [ + 'bakdata-atm-fraud-detection-account-producer-topic' + ] }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { + 'inputTopics': [ + 'bakdata-atm-fraud-detection-fraud-detector-topic' + ], + 'optimizeLeaveGroupBehavior': False, + 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' + } + }, + 'from': { + 'components': { + 'account-producer': { + 'role': 'accounts' }, - 'topics': { - 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - }, - 'bakdata-atm-fraud-detection-account-linker-topic': { - 'configs': { - }, - 'partitions_count': 3 - } + 'fraud-detector': { + 'type': 'input' } }, - 'type': 'streams-app', - 'version': '2.9.0' + 'topics': { + } }, - { - 'app': { - 'auto.create': True, - 'connection.ds.pool.size': 5, - 'connection.password': 'AppPassword', - 'connection.url': 'jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db', - 'connection.user': 'app1', - 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector', - 'errors.deadletterqueue.context.headers.enable': True, - 'errors.deadletterqueue.topic.name': 'postgres-request-sink-dead-letters', - 'errors.deadletterqueue.topic.replication.factor': 1, - 'errors.tolerance': 'all', - 'insert.mode': 'insert', - 'insert.mode.databaselevel': True, - 'key.converter': 'org.apache.kafka.connect.storage.StringConverter', - 'name': 'postgresql-connector', - 'pk.mode': 'record_value', - 'table.name.format': 'fraud_transactions', - 'tasks.max': 1, - 'topics': 'bakdata-atm-fraud-detection-account-linker-topic', - 'transforms': 'flatten', - 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value', - 'value.converter': 'io.confluent.connect.avro.AvroConverter', - 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'name': 'account-linker', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - 'name': 'postgresql-connector', - 'namespace': '${NAMESPACE}', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'resetter_values': { + 'topics': { + 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'bakdata-atm-fraud-detection-account-linker-topic': { + 'configs': { + }, + 'partitions_count': 3 + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + }, + { + 'app': { + 'auto.create': True, + 'connection.ds.pool.size': 5, + 'connection.password': 'AppPassword', + 'connection.url': 'jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db', + 'connection.user': 'app1', + 'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector', + 'errors.deadletterqueue.context.headers.enable': True, + 'errors.deadletterqueue.topic.name': 'postgres-request-sink-dead-letters', + 'errors.deadletterqueue.topic.replication.factor': 1, + 'errors.tolerance': 'all', + 'insert.mode': 'insert', + 'insert.mode.databaselevel': True, + 'key.converter': 'org.apache.kafka.connect.storage.StringConverter', + 'name': 'postgresql-connector', + 'pk.mode': 'record_value', + 'table.name.format': 'fraud_transactions', + 'tasks.max': 1, + 'topics': 'bakdata-atm-fraud-detection-account-linker-topic', + 'transforms': 'flatten', + 'transforms.flatten.type': 'org.apache.kafka.connect.transforms.Flatten$Value', + 'value.converter': 'io.confluent.connect.avro.AvroConverter', + 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + }, + 'name': 'postgresql-connector', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - } - ] -} + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } +] diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index d1e6f1776..c9fee4d4b 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -7,2303 +7,2275 @@ snapshots = Snapshot() -snapshots['TestPipeline.test_default_config test-pipeline'] = { - 'components': [ - { - 'app': { - 'nameOverride': 'resources-custom-config-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-custom-config-app1', - 'schemaRegistryUrl': 'http://localhost:8081/' +snapshots['TestPipeline.test_default_config test-pipeline'] = [ + { + 'app': { + 'nameOverride': 'resources-custom-config-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' } }, - 'name': 'app1', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-custom-config-app1': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-custom-config-app1', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'producer-app', - 'version': '2.9.0' - }, - { - 'app': { - 'image': 'some-image', - 'labels': { - 'pipeline': 'resources-custom-config' - }, - 'nameOverride': 'resources-custom-config-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'resources-custom-config-app2-error', - 'inputTopics': [ - 'resources-custom-config-app1' - ], - 'outputTopic': 'resources-custom-config-app2', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-custom-config-app1': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'nameOverride': 'resources-custom-config-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'resources-custom-config-app2-error', + 'inputTopics': [ + 'resources-custom-config-app1' + ], + 'outputTopic': 'resources-custom-config-app2', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'app2', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-custom-config-app2': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - }, - 'resources-custom-config-app2-error': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - } + 'topics': { + 'resources-custom-config-app2': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + }, + 'resources-custom-config-app2-error': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' } - }, - 'type': 'streams-app', - 'version': '2.9.0' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } +] -snapshots['TestPipeline.test_inflate_pipeline test-pipeline'] = { - 'components': [ - { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'fake-arg-value' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer', - 'schedule': '30 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'scheduled-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-pipeline-with-inflate-scheduled-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } - } - }, - 'type': 'scheduled-producer', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-pipeline-with-inflate-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-inflate-converter-error', - 'inputTopics': [ - 'resources-pipeline-with-inflate-scheduled-producer' - ], - 'outputTopic': 'resources-pipeline-with-inflate-converter', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-inflate-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-inflate-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'converter', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-pipeline-with-inflate-should-inflate' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error', - 'inputTopics': [ - 'resources-pipeline-with-inflate-converter' - ], - 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'should-inflate', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-inflate-should-inflate': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-inflate-should-inflate-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'should-inflate', - 'version': '2.4.2' - }, - { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'resources-pipeline-with-inflate-should-inflate', - 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1' - }, - 'name': 'should-inflate-inflated-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'to': { - 'models': { - }, - 'topics': { - 'kafka-sink-connector': { - 'configs': { - }, - 'type': 'output' - }, - 'should-inflate-inflated-sink-connector': { - 'configs': { - }, - 'role': 'test' - } - } +snapshots['TestPipeline.test_inflate_pipeline test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - }, - { - 'app': { - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error', - 'inputTopics': [ - 'kafka-sink-connector' - ], - 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-pipeline-with-inflate-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-pipeline-with-inflate-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-converter-error', + 'inputTopics': [ + 'resources-pipeline-with-inflate-scheduled-producer' + ], + 'outputTopic': 'resources-pipeline-with-inflate-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'should-inflate-inflated-streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - }, - 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': { - 'configs': { - }, - 'type': 'output' - } + 'topics': { + 'resources-pipeline-with-inflate-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-inflate-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-pipeline-with-inflate-should-inflate' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-pipeline-with-inflate-should-inflate', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error', + 'inputTopics': [ + 'resources-pipeline-with-inflate-converter' + ], + 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'should-inflate', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] -} - -snapshots['TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline'] = { - 'components': [ - { - 'app': { - 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-kafka-connect-sink-streams-app-error', - 'inputTopics': [ - 'example-topic' - ], - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-pipeline-with-inflate-should-inflate': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-inflate-should-inflate-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-pipeline-with-inflate-should-inflate', + 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1' + }, + 'name': 'should-inflate-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { }, - 'from': { - 'components': { + 'topics': { + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' }, - 'topics': { - 'example-topic': { - 'type': 'input' - } + 'should-inflate-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + }, + { + 'app': { + 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' + ], + 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'should-inflate-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-kafka-connect-sink-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + }, + 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' } - }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-kafka-connect-sink-es-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'example-output' - }, - 'name': 'es-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] -snapshots['TestPipeline.test_load_pipeline test-pipeline'] = { - 'components': [ - { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'fake-arg-value' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'nameOverride': 'resources-first-pipeline-scheduled-producer', - 'schedule': '30 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-first-pipeline-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'scheduled-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-first-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-first-pipeline-scheduled-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } - } - }, - 'type': 'scheduled-producer', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-first-pipeline-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-first-pipeline-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-first-pipeline-converter-error', - 'inputTopics': [ - 'resources-first-pipeline-scheduled-producer' - ], - 'outputTopic': 'resources-first-pipeline-converter', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-first-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-first-pipeline-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-first-pipeline-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } +snapshots['TestPipeline.test_kafka_connect_sink_weave_from_topics test-pipeline'] = [ + { + 'app': { + 'image': 'fake-image', + 'nameOverride': 'resources-kafka-connect-sink-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-kafka-connect-sink-streams-app-error', + 'inputTopics': [ + 'example-topic' + ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { }, - 'type': 'converter', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error', - 'inputTopics': [ - 'resources-first-pipeline-converter' - ], - 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'example-topic': { + 'type': 'input' } + } + }, + 'name': 'streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'namespace': 'example-namespace', - 'prefix': 'resources-first-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-kafka-connect-sink-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'type': 'filter', - 'version': '2.4.2' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-kafka-connect-sink-es-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'example-output' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } +] -snapshots['TestPipeline.test_model_serialization test-pipeline'] = { - 'components': [ - { - 'app': { - 'nameOverride': 'resources-pipeline-with-paths-account-producer', - 'streams': { - 'brokers': 'test', - 'extraOutputTopics': { - }, - 'outputTopic': 'out', - 'schemaRegistryUrl': 'http://localhost:8081/' - } +snapshots['TestPipeline.test_load_pipeline test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'nameOverride': 'resources-first-pipeline-scheduled-producer', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-first-pipeline-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' }, - 'name': 'account-producer', - 'namespace': 'test', - 'prefix': 'resources-pipeline-with-paths-', - 'repo_config': { - 'repo_auth_flags': { - 'ca_file': 'my-cert.cert', - 'insecure_skip_tls_verify': False, - 'password': '$CI_JOB_TOKEN', - 'username': 'masked' - }, - 'repository_name': 'masked', - 'url': 'masked' - }, - 'type': 'producer-app', - 'version': '2.4.2' - } - ] -} - -snapshots['TestPipeline.test_no_input_topic test-pipeline'] = { - 'components': [ - { - 'app': { - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-no-input-topic-pipeline-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', - 'inputPattern': '.*', - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-first-pipeline-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-first-pipeline-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-first-pipeline-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-first-pipeline-converter-error', + 'inputTopics': [ + 'resources-first-pipeline-scheduled-producer' + ], + 'outputTopic': 'resources-first-pipeline-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'from': { - 'components': { + 'topics': { + 'resources-first-pipeline-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' }, - 'topics': { - '.*': { - 'type': 'pattern' - } - } - }, - 'name': 'app1', - 'namespace': 'example-namespace', - 'prefix': 'resources-no-input-topic-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-no-input-topic-pipeline-app1-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'nameOverride': 'resources-no-input-topic-pipeline-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-no-input-topic-pipeline-app2-error', - 'extraOutputTopics': { - 'extra': 'example-output-extra', - 'test-output': 'test-output-extra' - }, - 'inputTopics': [ - 'example-output' - ], - 'schemaRegistryUrl': 'http://localhost:8081/' + 'resources-first-pipeline-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error', + 'inputTopics': [ + 'resources-first-pipeline-converter' + ], + 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'app2', - 'namespace': 'example-namespace', - 'prefix': 'resources-no-input-topic-pipeline-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output-extra': { - 'configs': { - }, - 'role': 'extra' - }, - 'resources-no-input-topic-pipeline-app2-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - }, - 'test-output-extra': { - 'configs': { - }, - 'role': 'test-output' - } + 'topics': { + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] -} + } + }, + 'type': 'filter', + 'version': '2.4.2' + } +] + +snapshots['TestPipeline.test_model_serialization test-pipeline'] = [ + { + 'app': { + 'nameOverride': 'resources-pipeline-with-paths-account-producer', + 'streams': { + 'brokers': 'test', + 'extraOutputTopics': { + }, + 'outputTopic': 'out', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'account-producer', + 'namespace': 'test', + 'prefix': 'resources-pipeline-with-paths-', + 'repo_config': { + 'repo_auth_flags': { + 'ca_file': 'my-cert.cert', + 'insecure_skip_tls_verify': False, + 'password': '$CI_JOB_TOKEN', + 'username': 'masked' + }, + 'repository_name': 'masked', + 'url': 'masked' + }, + 'type': 'producer-app', + 'version': '2.4.2' + } +] -snapshots['TestPipeline.test_no_user_defined_components test-pipeline'] = { - 'components': [ - { - 'app': { - 'image': 'fake-image', - 'nameOverride': 'resources-no-user-defined-components-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-no-user-defined-components-streams-app-error', - 'inputTopics': [ - 'example-topic' - ], - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081/' +snapshots['TestPipeline.test_no_input_topic test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-no-input-topic-pipeline-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', + 'inputPattern': '.*', + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + }, + 'topics': { + '.*': { + 'type': 'pattern' } + } + }, + 'name': 'app1', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-input-topic-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'from': { - 'components': { + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' }, - 'topics': { - 'example-topic': { - 'type': 'input' - } + 'resources-no-input-topic-pipeline-app1-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'resources-no-input-topic-pipeline-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-input-topic-pipeline-app2-error', + 'extraOutputTopics': { + 'extra': 'example-output-extra', + 'test-output': 'test-output-extra' + }, + 'inputTopics': [ + 'example-output' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-input-topic-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-no-user-defined-components-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-no-user-defined-components-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'example-output-extra': { + 'configs': { + }, + 'role': 'extra' + }, + 'resources-no-input-topic-pipeline-app2-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + }, + 'test-output-extra': { + 'configs': { + }, + 'role': 'test-output' } - }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] -snapshots['TestPipeline.test_pipelines_with_env_values test-pipeline'] = { - 'components': [ - { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'override-arg' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-envs-input-producer', - 'schedule': '20 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-pipeline-with-envs-input-producer', - 'schemaRegistryUrl': 'http://localhost:8081/' - } +snapshots['TestPipeline.test_no_user_defined_components test-pipeline'] = [ + { + 'app': { + 'image': 'fake-image', + 'nameOverride': 'resources-no-user-defined-components-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-user-defined-components-streams-app-error', + 'inputTopics': [ + 'example-topic' + ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { }, - 'name': 'input-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-envs-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-pipeline-with-envs-input-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } + 'topics': { + 'example-topic': { + 'type': 'input' } + } + }, + 'name': 'streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-user-defined-components-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'scheduled-producer', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-pipeline-with-envs-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-pipeline-with-envs-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-envs-converter-error', - 'inputTopics': [ - 'resources-pipeline-with-envs-input-producer' - ], - 'outputTopic': 'resources-pipeline-with-envs-converter', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-no-user-defined-components-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] + +snapshots['TestPipeline.test_pipelines_with_env_values test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'override-arg' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'nameOverride': 'resources-pipeline-with-envs-input-producer', + 'schedule': '20 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-pipeline-with-envs-input-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'input-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-envs-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-envs-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-envs-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-pipeline-with-envs-input-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-pipeline-with-envs-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-pipeline-with-envs-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-envs-converter-error', + 'inputTopics': [ + 'resources-pipeline-with-envs-input-producer' + ], + 'outputTopic': 'resources-pipeline-with-envs-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'converter', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-pipeline-with-envs-filter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-pipeline-with-envs-filter' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-envs-filter', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-pipeline-with-envs-filter-error', - 'inputTopics': [ - 'resources-pipeline-with-envs-converter' - ], - 'outputTopic': 'resources-pipeline-with-envs-filter', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-pipeline-with-envs-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-envs-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-pipeline-with-envs-filter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-pipeline-with-envs-filter' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-pipeline-with-envs-filter', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-envs-filter-error', + 'inputTopics': [ + 'resources-pipeline-with-envs-converter' + ], + 'outputTopic': 'resources-pipeline-with-envs-filter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'filter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'filter', - 'namespace': 'example-namespace', - 'prefix': 'resources-pipeline-with-envs-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-pipeline-with-envs-filter': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-pipeline-with-envs-filter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-pipeline-with-envs-filter': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-envs-filter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'type': 'filter', - 'version': '2.4.2' - } - ] -} + } + }, + 'type': 'filter', + 'version': '2.4.2' + } +] -snapshots['TestPipeline.test_prefix_pipeline_component test-pipeline'] = { - 'components': [ - { - 'app': { - 'debug': True, - 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', - 'imageTag': '1.0.0', - 'nameOverride': 'from-pipeline-component-account-producer', - 'prometheus': { - 'jmx': { - 'enabled': False - } - }, - 'replicaCount': 1, - 'schedule': '0 12 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'schemaRegistryUrl': 'http://localhost:8081/' - }, - 'suspend': True - }, - 'name': 'account-producer', - 'namespace': '${NAMESPACE}', - 'prefix': 'from-pipeline-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'type': 'producer-app', - 'version': '2.9.0' - } - ] -} +snapshots['TestPipeline.test_prefix_pipeline_component test-pipeline'] = [ + { + 'app': { + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', + 'imageTag': '1.0.0', + 'nameOverride': 'from-pipeline-component-account-producer', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'schemaRegistryUrl': 'http://localhost:8081/' + }, + 'suspend': True + }, + 'name': 'account-producer', + 'namespace': '${NAMESPACE}', + 'prefix': 'from-pipeline-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'type': 'producer-app', + 'version': '2.9.0' + } +] -snapshots['TestPipeline.test_read_from_component test-pipeline'] = { - 'components': [ - { - 'app': { - 'nameOverride': 'resources-read-from-component-producer1', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-read-from-component-producer1', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'producer1', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-producer1': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.4.2' +snapshots['TestPipeline.test_read_from_component test-pipeline'] = [ + { + 'app': { + 'nameOverride': 'resources-read-from-component-producer1', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-read-from-component-producer1', + 'schemaRegistryUrl': 'http://localhost:8081/' + } }, - { - 'app': { - 'nameOverride': 'producer2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-read-from-component-producer2', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'producer2', - 'namespace': 'example-namespace', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-producer2': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'producer-app', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-read-from-component-inflate-step', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-read-from-component-inflate-step' - ] - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'resources-read-from-component-inflate-step', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-error', - 'inputTopics': [ - 'resources-read-from-component-producer2' - ], - 'outputTopic': 'resources-read-from-component-inflate-step', - 'schemaRegistryUrl': 'http://localhost:8081/' - } - }, - 'name': 'inflate-step', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-inflate-step': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-read-from-component-inflate-step-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'should-inflate', - 'version': '2.4.2' - }, - { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'resources-read-from-component-inflate-step', - 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1' - }, - 'name': 'inflate-step-inflated-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'to': { - 'models': { - }, - 'topics': { - 'inflate-step-inflated-sink-connector': { - 'configs': { - }, - 'role': 'test' - }, - 'kafka-sink-connector': { - 'configs': { - }, - 'type': 'output' - } - } - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error', - 'inputTopics': [ - 'kafka-sink-connector' - ], - 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081/' - } + 'name': 'producer1', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'inflate-step-inflated-streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-read-from-component-inflate-step-inflated-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-read-from-component-producer1': { + 'configs': { + }, + 'type': 'output' } + } + }, + 'type': 'producer-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'producer2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-read-from-component-producer2', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'producer2', + 'namespace': 'example-namespace', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-read-from-component-inflate-step-without-prefix' - ] - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'nameOverride': 'inflate-step-without-prefix', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error', - 'inputTopics': [ - 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' - ], - 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-read-from-component-producer2': { + 'configs': { + }, + 'type': 'output' } + } + }, + 'type': 'producer-app', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-read-from-component-inflate-step', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-read-from-component-inflate-step' + ] + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'resources-read-from-component-inflate-step', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-error', + 'inputTopics': [ + 'resources-read-from-component-producer2' + ], + 'outputTopic': 'resources-read-from-component-inflate-step', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'inflate-step-without-prefix', - 'namespace': 'example-namespace', - 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-inflate-step-without-prefix': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-read-from-component-inflate-step-without-prefix-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-read-from-component-inflate-step': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-read-from-component-inflate-step', + 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1' + }, + 'name': 'inflate-step-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { }, - 'type': 'should-inflate', - 'version': '2.4.2' - }, - { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'resources-read-from-component-inflate-step-without-prefix', - 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1' - }, - 'name': 'inflate-step-without-prefix-inflated-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'to': { - 'models': { - }, - 'topics': { - 'inflate-step-without-prefix-inflated-sink-connector': { - 'configs': { - }, - 'role': 'test' - }, - 'kafka-sink-connector': { - 'configs': { - }, - 'type': 'output' - } + 'topics': { + 'inflate-step-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' + ], + 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error', - 'inputTopics': [ - 'kafka-sink-connector' - ], - 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-read-from-component-inflate-step-without-prefix' + ] + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'nameOverride': 'inflate-step-without-prefix', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error', + 'inputTopics': [ + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' + ], + 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step-without-prefix', + 'namespace': 'example-namespace', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'inflate-step-without-prefix-inflated-streams-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-read-from-component-inflate-step-without-prefix': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-without-prefix-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-read-from-component-inflate-step-without-prefix', + 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1' + }, + 'name': 'inflate-step-without-prefix-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer1', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer1-error', - 'inputTopics': [ - 'resources-read-from-component-producer1' - ], - 'outputTopic': 'resources-read-from-component-consumer1', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'inflate-step-without-prefix-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' + ], + 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step-without-prefix-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'from': { - 'components': { - 'producer1': { - 'type': 'input' - } + 'topics': { + 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' }, - 'topics': { + 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'name': 'consumer1', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer1': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-read-from-component-consumer1-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-consumer1', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer1-error', + 'inputTopics': [ + 'resources-read-from-component-producer1' + ], + 'outputTopic': 'resources-read-from-component-consumer1', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'producer1': { + 'type': 'input' } }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer2-error', - 'inputTopics': [ - 'resources-read-from-component-producer1', - 'resources-read-from-component-consumer1' - ], - 'schemaRegistryUrl': 'http://localhost:8081/' - } + 'topics': { + } + }, + 'name': 'consumer1', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'from': { - 'components': { - 'consumer1': { - 'type': 'input' + 'topics': { + 'resources-read-from-component-consumer1': { + 'configs': { }, - 'producer1': { - 'type': 'input' - } + 'type': 'output' }, - 'topics': { - } - }, - 'name': 'consumer2', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer2-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } - }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer3', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer3-error', - 'inputTopics': [ - 'resources-read-from-component-producer1', - 'resources-read-from-component-producer2' - ], - 'schemaRegistryUrl': 'http://localhost:8081/' + 'resources-read-from-component-consumer1-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'from': { - 'components': { - 'producer2': { - 'type': 'input' - } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-consumer2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer2-error', + 'inputTopics': [ + 'resources-read-from-component-producer1', + 'resources-read-from-component-consumer1' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'consumer1': { + 'type': 'input' }, - 'topics': { - 'resources-read-from-component-producer1': { - 'type': 'input' - } + 'producer1': { + 'type': 'input' } }, - 'name': 'consumer3', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer3-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } + 'topics': { + } + }, + 'name': 'consumer2', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer4', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer4-error', - 'inputTopics': [ - 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' - ], - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-read-from-component-consumer2-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'from': { - 'components': { - 'inflate-step': { - 'type': 'input' - } - }, - 'topics': { + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-consumer3', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer3-error', + 'inputTopics': [ + 'resources-read-from-component-producer1', + 'resources-read-from-component-producer2' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'producer2': { + 'type': 'input' } }, - 'name': 'consumer4', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer4-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-read-from-component-producer1': { + 'type': 'input' } + } + }, + 'name': 'consumer3', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'streams-app', - 'version': '2.4.2' - }, - { - 'app': { - 'nameOverride': 'resources-read-from-component-consumer5', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-read-from-component-consumer5-error', - 'inputTopics': [ - 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' - ], - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-read-from-component-consumer3-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'from': { - 'components': { - 'inflate-step-without-prefix': { - 'type': 'input' - } - }, - 'topics': { + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-consumer4', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer4-error', + 'inputTopics': [ + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'inflate-step': { + 'type': 'input' } }, - 'name': 'consumer5', - 'namespace': 'example-namespace', - 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-read-from-component-consumer5-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } - } + 'topics': { + } + }, + 'name': 'consumer4', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'streams-app', - 'version': '2.4.2' - } - ] -} - -snapshots['TestPipeline.test_substitute_in_component test-pipeline'] = { - 'components': [ - { - 'app': { - 'commandLine': { - 'FAKE_ARG': 'fake-arg-value' - }, - 'image': 'example-registry/fake-image', - 'imageTag': '0.0.1', - 'labels': { - 'app_name': 'scheduled-producer', - 'app_schedule': '30 3/8 * * *', - 'app_type': 'scheduled-producer' - }, - 'nameOverride': 'resources-component-type-substitution-scheduled-producer', - 'schedule': '30 3/8 * * *', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'resources-component-type-substitution-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-read-from-component-consumer4-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'name': 'scheduled-producer', - 'namespace': 'example-namespace', - 'prefix': 'resources-component-type-substitution-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - 'com/bakdata/kafka/fake': '1.0.0' - }, - 'topics': { - 'resources-component-type-substitution-scheduled-producer': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 12, - 'type': 'output', - 'value_schema': 'com.bakdata.fake.Produced' - } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'nameOverride': 'resources-read-from-component-consumer5', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer5-error', + 'inputTopics': [ + 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'inflate-step-without-prefix': { + 'type': 'input' } }, - 'type': 'scheduled-producer', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'converter-resources-component-type-substitution-converter', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 1, - 'minReplicas': 0, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - ] - }, - 'commandLine': { - 'CONVERT_XML': True - }, - 'nameOverride': 'resources-component-type-substitution-converter', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-component-type-substitution-converter-error', - 'inputTopics': [ - 'resources-component-type-substitution-scheduled-producer' - ], - 'outputTopic': 'resources-component-type-substitution-converter', - 'schemaRegistryUrl': 'http://localhost:8081/' - } + 'topics': { + } + }, + 'name': 'consumer5', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'converter', - 'namespace': 'example-namespace', - 'prefix': 'resources-component-type-substitution-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-component-type-substitution-converter': { - 'configs': { - 'cleanup.policy': 'compact,delete', - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-component-type-substitution-converter-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 10, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-read-from-component-consumer5-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] + +snapshots['TestPipeline.test_substitute_in_component test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'labels': { + 'app_name': 'scheduled-producer', + 'app_schedule': '30 3/8 * * *', + 'app_type': 'scheduled-producer' + }, + 'nameOverride': 'resources-component-type-substitution-scheduled-producer', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-component-type-substitution-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' }, - 'type': 'converter', - 'version': '2.4.2' - }, - { - 'app': { - 'autoscaling': { - 'consumerGroup': 'filter-resources-component-type-substitution-filter-app', - 'cooldownPeriod': 300, - 'enabled': True, - 'lagThreshold': 10000, - 'maxReplicas': 4, - 'minReplicas': 4, - 'offsetResetPolicy': 'earliest', - 'pollingInterval': 30, - 'topics': [ - 'resources-component-type-substitution-filter-app' - ] - }, - 'commandLine': { - 'TYPE': 'nothing' - }, - 'image': 'fake-registry/filter', - 'imageTag': '2.4.1', - 'labels': { - 'app_name': 'filter-app', - 'app_resources_requests_memory': '3G', - 'app_type': 'filter', - 'filter': 'filter-app-filter', - 'test_placeholder_in_placeholder': 'filter-app-filter' - }, - 'nameOverride': 'resources-component-type-substitution-filter-app', - 'replicaCount': 4, - 'resources': { - 'requests': { - 'memory': '3G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-component-type-substitution-filter-app-error', - 'inputTopics': [ - 'resources-component-type-substitution-converter' - ], - 'outputTopic': 'resources-component-type-substitution-filter-app', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'resources-component-type-substitution-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-component-type-substitution-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'nameOverride': 'resources-component-type-substitution-converter', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-component-type-substitution-converter-error', + 'inputTopics': [ + 'resources-component-type-substitution-scheduled-producer' + ], + 'outputTopic': 'resources-component-type-substitution-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'filter-app', - 'namespace': 'example-namespace', - 'prefix': 'resources-component-type-substitution-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'resources-component-type-substitution-filter-app': { - 'configs': { - 'retention.ms': '-1' - }, - 'partitions_count': 50, - 'type': 'output' - }, - 'resources-component-type-substitution-filter-app-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'resources-component-type-substitution-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-component-type-substitution-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-component-type-substitution-filter-app', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-component-type-substitution-filter-app' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'labels': { + 'app_name': 'filter-app', + 'app_resources_requests_memory': '3G', + 'app_type': 'filter', + 'filter': 'filter-app-filter', + 'test_placeholder_in_placeholder': 'filter-app-filter' + }, + 'nameOverride': 'resources-component-type-substitution-filter-app', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-component-type-substitution-filter-app-error', + 'inputTopics': [ + 'resources-component-type-substitution-converter' + ], + 'outputTopic': 'resources-component-type-substitution-filter-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'filter-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'filter', - 'version': '2.4.2' - } - ] -} + 'topics': { + 'resources-component-type-substitution-filter-app': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-component-type-substitution-filter-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } +] -snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = { - 'components': [ - { - 'app': { - 'nameOverride': 'resources-custom-config-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081/' +snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = [ + { + 'app': { + 'nameOverride': 'resources-custom-config-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' } }, - 'name': 'app1', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app1-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'app1-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'producer-app', - 'version': '2.9.0' - }, - { - 'app': { - 'image': 'some-image', - 'labels': { - 'pipeline': 'resources-custom-config' - }, - 'nameOverride': 'resources-custom-config-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'app2-dead-letter-topic', - 'inputTopics': [ - 'app1-test-topic' - ], - 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'app1-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'nameOverride': 'resources-custom-config-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'app2-dead-letter-topic', + 'inputTopics': [ + 'app1-test-topic' + ], + 'outputTopic': 'app2-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'app2', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app2-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - }, - 'app2-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } + 'topics': { + 'app2-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'app2-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' } - }, - 'type': 'streams-app', - 'version': '2.9.0' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } +] -snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = { - 'components': [ - { - 'app': { - 'nameOverride': 'resources-custom-config-app1', - 'resources': { - 'limits': { - 'memory': '2G' - }, - 'requests': { - 'memory': '2G' - } - }, - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'extraOutputTopics': { - }, - 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081/' +snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = [ + { + 'app': { + 'nameOverride': 'resources-custom-config-app1', + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' } }, - 'name': 'app1', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app1-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } - } + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'app1-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'type': 'producer-app', - 'version': '2.9.0' - }, - { - 'app': { - 'image': 'some-image', - 'labels': { - 'pipeline': 'resources-custom-config' - }, - 'nameOverride': 'resources-custom-config-app2', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'errorTopic': 'app2-dead-letter-topic', - 'inputTopics': [ - 'app1-test-topic' - ], - 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081/' + 'topics': { + 'app1-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'nameOverride': 'resources-custom-config-app2', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'app2-dead-letter-topic', + 'inputTopics': [ + 'app1-test-topic' + ], + 'outputTopic': 'app2-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'app2', - 'namespace': 'development-namespace', - 'prefix': 'resources-custom-config-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'app2-dead-letter-topic': { - 'configs': { - }, - 'partitions_count': 1, - 'type': 'error' - }, - 'app2-test-topic': { - 'configs': { - }, - 'partitions_count': 3, - 'type': 'output' - } + 'topics': { + 'app2-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'app2-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' } - }, - 'type': 'streams-app', - 'version': '2.9.0' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } +] -snapshots['TestPipeline.test_with_env_defaults test-pipeline'] = { - 'components': [ - { - 'app': { - 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app-development', - 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', - 'config': { - 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' - }, - 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error', - 'inputTopics': [ - 'example-topic' - ], - 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081/' - } +snapshots['TestPipeline.test_with_env_defaults test-pipeline'] = [ + { + 'app': { + 'image': 'fake-image', + 'nameOverride': 'resources-kafka-connect-sink-streams-app-development', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error', + 'inputTopics': [ + 'example-topic' + ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { }, - 'from': { - 'components': { - }, - 'topics': { - 'example-topic': { - 'type': 'input' - } + 'topics': { + 'example-topic': { + 'type': 'input' } + } + }, + 'name': 'streams-app-development', + 'namespace': 'development-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { }, - 'name': 'streams-app-development', - 'namespace': 'development-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-streams-bootstrap', - 'url': 'https://bakdata.github.io/streams-bootstrap/' - }, - 'to': { - 'models': { - }, - 'topics': { - 'example-output': { - 'configs': { - }, - 'type': 'output' - }, - 'resources-kafka-connect-sink-streams-app-development-error': { - 'configs': { - 'cleanup.policy': 'compact,delete' - }, - 'partitions_count': 1, - 'type': 'error', - 'value_schema': 'com.bakdata.kafka.DeadLetter' - } + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-kafka-connect-sink-streams-app-development-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' } - }, - 'type': 'streams-app', - 'version': '2.9.0' - }, - { - 'app': { - 'batch.size': '2000', - 'behavior.on.malformed.documents': 'warn', - 'behavior.on.null.values': 'delete', - 'connection.compression': 'true', - 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', - 'key.ignore': 'false', - 'linger.ms': '5000', - 'max.buffered.records': '20000', - 'name': 'resources-kafka-connect-sink-es-sink-connector', - 'read.timeout.ms': '120000', - 'tasks.max': '1', - 'topics': 'example-output' - }, - 'name': 'es-sink-connector', - 'namespace': 'example-namespace', - 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, - 'resetter_values': { - }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' - } - ] -} + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-kafka-connect-sink-es-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'example-output' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } +] diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 4a43ce9d5..9be8edf7c 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -8,7 +8,7 @@ import kpops from kpops.cli.main import app -from kpops.pipeline_generator.pipeline import ParsingException, ValidationError +from kpops.pipeline import ParsingException, ValidationError runner = CliRunner() @@ -97,8 +97,8 @@ def test_name_equal_prefix_name_concatenation(self): enriched_pipeline: dict = yaml.safe_load(result.stdout) - assert enriched_pipeline["components"][0]["prefix"] == "my-fake-prefix-" - assert enriched_pipeline["components"][0]["name"] == "my-streams-app" + assert enriched_pipeline[0]["prefix"] == "my-fake-prefix-" + assert enriched_pipeline[0]["name"] == "my-streams-app" def test_pipelines_with_env_values(self, snapshot: SnapshotTest): result = runner.invoke( @@ -161,33 +161,28 @@ def test_substitute_in_component(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( - enriched_pipeline["components"][0]["prefix"] - == "resources-component-type-substitution-" + enriched_pipeline[0]["prefix"] == "resources-component-type-substitution-" ) - assert enriched_pipeline["components"][0]["name"] == "scheduled-producer" + assert enriched_pipeline[0]["name"] == "scheduled-producer" - labels = enriched_pipeline["components"][0]["app"]["labels"] + labels = enriched_pipeline[0]["app"]["labels"] assert labels["app_name"] == "scheduled-producer" assert labels["app_type"] == "scheduled-producer" assert labels["app_schedule"] == "30 3/8 * * *" assert ( - enriched_pipeline["components"][2]["app"]["labels"][ - "app_resources_requests_memory" - ] + enriched_pipeline[2]["app"]["labels"]["app_resources_requests_memory"] == "3G" ) assert ( "resources-component-type-substitution-scheduled-producer" - in enriched_pipeline["components"][0]["to"]["topics"] + in enriched_pipeline[0]["to"]["topics"] ) assert ( "resources-component-type-substitution-converter-error" - in enriched_pipeline["components"][1]["to"]["topics"] + in enriched_pipeline[1]["to"]["topics"] ) assert ( - enriched_pipeline["components"][2]["app"]["labels"][ - "test_placeholder_in_placeholder" - ] + enriched_pipeline[2]["app"]["labels"]["test_placeholder_in_placeholder"] == "filter-app-filter" ) @@ -229,7 +224,7 @@ def test_kafka_connector_config_parsing(self): catch_exceptions=False, ) enriched_pipeline: dict = yaml.safe_load(result.stdout) - sink_connector = enriched_pipeline["components"][0] + sink_connector = enriched_pipeline[0] assert ( sink_connector["app"]["errors.deadletterqueue.topic.name"] == "kafka-sink-connector-error-topic" @@ -379,11 +374,11 @@ def test_with_custom_config_with_relative_defaults_path( assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) - producer_details = enriched_pipeline["components"][0] + producer_details = enriched_pipeline[0] output_topic = producer_details["app"]["streams"]["outputTopic"] assert output_topic == "app1-test-topic" - streams_app_details = enriched_pipeline["components"][1] + streams_app_details = enriched_pipeline[1] output_topic = streams_app_details["app"]["streams"]["outputTopic"] assert output_topic == "app2-test-topic" error_topic = streams_app_details["app"]["streams"]["errorTopic"] @@ -424,11 +419,11 @@ def test_with_custom_config_with_absolute_defaults_path( assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) - producer_details = enriched_pipeline["components"][0] + producer_details = enriched_pipeline[0] output_topic = producer_details["app"]["streams"]["outputTopic"] assert output_topic == "app1-test-topic" - streams_app_details = enriched_pipeline["components"][1] + streams_app_details = enriched_pipeline[1] output_topic = streams_app_details["app"]["streams"]["outputTopic"] assert output_topic == "app2-test-topic" error_topic = streams_app_details["app"]["streams"]["errorTopic"] @@ -457,11 +452,11 @@ def test_default_config(self, snapshot: SnapshotTest): assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) - producer_details = enriched_pipeline["components"][0] + producer_details = enriched_pipeline[0] output_topic = producer_details["app"]["streams"]["outputTopic"] assert output_topic == "resources-custom-config-app1" - streams_app_details = enriched_pipeline["components"][1] + streams_app_details = enriched_pipeline[1] output_topic = streams_app_details["app"]["streams"]["outputTopic"] assert output_topic == "resources-custom-config-app2" error_topic = streams_app_details["app"]["streams"]["errorTopic"] @@ -488,10 +483,7 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): ) assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) - assert ( - enriched_pipeline["components"][0]["app"]["streams"]["brokers"] - == "env_broker" - ) + assert enriched_pipeline[0]["app"]["streams"]["brokers"] == "env_broker" def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): monkeypatch.setenv( @@ -515,7 +507,7 @@ def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( - enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] == "http://somename:1234/" ) @@ -541,7 +533,7 @@ def test_env_specific_config_env_def_in_env_var( assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( - enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] == "http://production:8081/" ) @@ -579,8 +571,7 @@ def test_env_specific_config_env_def_in_cli( assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( - enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] - == expected_url + enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] == expected_url ) def test_config_dir_doesnt_exist(self): @@ -645,7 +636,7 @@ def test_dotenv_support(self): enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( - enriched_pipeline["components"][1]["app"]["streams"]["schemaRegistryUrl"] + enriched_pipeline[1]["app"]["streams"]["schemaRegistryUrl"] == "http://notlocalhost:8081/" ) @@ -667,9 +658,9 @@ def test_short_topic_definition(self): enriched_pipeline: dict = yaml.safe_load(result.stdout) - output_topics = enriched_pipeline["components"][4]["to"]["topics"] - input_topics = enriched_pipeline["components"][4]["from"]["topics"] - input_components = enriched_pipeline["components"][4]["from"]["components"] + output_topics = enriched_pipeline[4]["to"]["topics"] + input_topics = enriched_pipeline[4]["from"]["topics"] + input_components = enriched_pipeline[4]["from"]["components"] assert "type" not in output_topics["output-topic"] assert output_topics["error-topic"]["type"] == "error" assert "type" not in output_topics["extra-topic"] @@ -748,6 +739,6 @@ def test_temp_trim_release_name(self): assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( - enriched_pipeline["components"][0]["name"] + enriched_pipeline[0]["name"] == "in-order-to-have-len-fifty-two-name-should-end--here" ) From e770163df2c7eef30cb4ffc04614a3a1f031b92a Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 21 Dec 2023 14:00:37 +0100 Subject: [PATCH 15/34] Define custom components module & pipeline base dir globally (#387) for added convenience, ability to set pipeline base dir and components module globally for the project. therefore it won't be necessary to specify these with each invocation will be used for automatic schema generation (editor integration) --- config.yaml | 2 + .../docs/resources/variables/cli_env_vars.env | 3 - docs/docs/resources/variables/cli_env_vars.md | 17 ++- .../resources/variables/config_env_vars.env | 11 +- .../resources/variables/config_env_vars.md | 4 +- docs/docs/schema/config.json | 22 ++- docs/docs/user/migration-guide/v2-v3.md | 20 +++ docs/docs/user/references/cli-commands.md | 36 ++--- .../bakdata/atm-fraud-detection/config.yaml | 2 + hooks/gen_docs/gen_docs_env_vars.py | 4 +- kpops/cli/main.py | 126 +++++++++--------- .../kafka_connect/connect_wrapper.py | 5 +- .../schema_handler/schema_handler.py | 14 +- .../base_defaults_component.py | 4 +- kpops/config.py | 10 +- kpops/pipeline.py | 10 +- kpops/utils/gen_schema.py | 2 +- tests/cli/resources/config.yaml | 2 + .../{module.py => custom_module/__init__.py} | 0 .../__init__.py} | 0 tests/cli/resources/empty_module/config.yaml | 2 + tests/cli/resources/no_module/config.yaml | 1 + tests/cli/test_handlers.py | 11 +- tests/cli/test_registry.py | 2 +- tests/cli/test_schema_generation.py | 35 ++--- .../schema_handler/test_schema_handler.py | 87 ++++-------- .../resources/custom-config/config.yaml | 1 + tests/pipeline/test_example.py | 4 +- tests/pipeline/test_pipeline.py | 115 ++++------------ tests/pipeline/test_template.py | 9 +- 30 files changed, 251 insertions(+), 310 deletions(-) create mode 100644 tests/cli/resources/config.yaml rename tests/cli/resources/{module.py => custom_module/__init__.py} (100%) rename tests/cli/resources/{empty_module.py => empty_module/__init__.py} (100%) create mode 100644 tests/cli/resources/empty_module/config.yaml create mode 100644 tests/cli/resources/no_module/config.yaml diff --git a/config.yaml b/config.yaml index fdd145829..7d0e97a54 100644 --- a/config.yaml +++ b/config.yaml @@ -1 +1,3 @@ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +components_module: tests.pipeline.test_components +pipeline_base_dir: tests/pipeline diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index 0f393ddf8..078f56f07 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -5,9 +5,6 @@ # corresponding flag does not have to be specified in commands. # Variables marked as required can instead be set as flags. # -# Base directory to the pipelines (default is current working -# directory) -KPOPS_PIPELINE_BASE_DIR=. # Path to the dir containing config.yaml files KPOPS_CONFIG_PATH=. # Path to defaults folder diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index 9a9b0012c..cb459f113 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -1,11 +1,10 @@ These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags. -| Name |Default Value|Required| Description | -|-----------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | -|KPOPS_CONFIG_PATH |. |False |Path to the dir containing config.yaml files | -|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | -|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. | -|KPOPS_ENVIRONMENT | |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).| -|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | -|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | +| Name |Default Value|Required| Description | +|--------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +|KPOPS_CONFIG_PATH |. |False |Path to the dir containing config.yaml files | +|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | +|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. | +|KPOPS_ENVIRONMENT | |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).| +|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | +|KPOPS_PIPELINE_STEPS| |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index b7e1a2ced..cc1f68943 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -1,14 +1,21 @@ -# Pipeline config environment variables +# Global config environment variables # # The default setup is shown. These variables are a lower priority # alternative to the settings in `config.yaml`. Variables marked as -# required can instead be set in the pipeline config. +# required can instead be set in the global config. # # defaults_path # The path to the folder containing the defaults.yaml file and the # environment defaults files. Paths can either be absolute or relative # to `config.yaml` KPOPS_DEFAULTS_PATH=. +# components_module +# Custom Python module defining project-specific KPOps components +KPOPS_COMPONENTS_MODULE # No default value, not required +# pipeline_base_dir +# Base directory to the pipelines (default is current working +# directory) +KPOPS_PIPELINE_BASE_DIR=. # kafka_brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index f9b9854ac..fd635278a 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,8 +1,10 @@ -These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. +These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the global config. | Name | Default Value |Required| Description | Setting name | |--------------------------------------------------|----------------------------------------|--------|------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| |KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`|defaults_path | +|KPOPS_COMPONENTS_MODULE | |False |Custom Python module defining project-specific KPOps components |components_module | +|KPOPS_PIPELINE_BASE_DIR |. |False |Base directory to the pipelines (default is current working directory) |pipeline_base_dir | |KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | |KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | |KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 7708ed04c..c4ed0b1d4 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -136,8 +136,21 @@ } }, "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", + "description": "Global configuration for KPOps project.", "properties": { + "components_module": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Custom Python module defining project-specific KPOps components", + "title": "Components Module" + }, "create_namespace": { "default": false, "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", @@ -215,6 +228,13 @@ }, "description": "Configuration for Kafka REST Proxy." }, + "pipeline_base_dir": { + "default": ".", + "description": "Base directory to the pipelines (default is current working directory)", + "format": "path", + "title": "Pipeline Base Dir", + "type": "string" + }, "retain_clean_jobs": { "default": false, "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index d44c49503..6a6e0f4f4 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -48,6 +48,26 @@ The variable is now called `kafka_brokers`. ... ``` +## [Define custom components module & pipeline base dir globally](https://github.com/bakdata/kpops/pull/387) + + + +!!! warning inline end + **The previous CLI parameters have been removed.** + + + +The options for a custom `components_module` and `pipeline_base_dir` are now global settings, defined in `config.yaml`. + +#### config.yaml + +```diff + kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" + environment: development ++ components_module: components ++ pipeline_base_dir: pipelines +``` + ## [Move GitHub action to repsitory root](https://github.com/bakdata/kpops/pull/356) The location of the GitHub action has changed, and it's now available directly as `bakdata/kpops`. diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index ed321367b..fae1884b0 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -29,25 +29,23 @@ Clean pipeline steps **Usage**: ```console -$ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] +$ kpops clean [OPTIONS] PIPELINE_PATH ``` **Arguments**: * `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required] -* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: -* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] -* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops deploy` @@ -57,25 +55,23 @@ Deploy pipeline steps **Usage**: ```console -$ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] +$ kpops deploy [OPTIONS] PIPELINE_PATH ``` **Arguments**: * `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required] -* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: -* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] -* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops destroy` @@ -85,25 +81,23 @@ Destroy pipeline steps **Usage**: ```console -$ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] +$ kpops destroy [OPTIONS] PIPELINE_PATH ``` **Arguments**: * `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required] -* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: -* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] -* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops generate` @@ -113,25 +107,23 @@ Enriches pipelines steps with defaults. The output is used as input for the depl **Usage**: ```console -$ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] +$ kpops generate [OPTIONS] PIPELINE_PATH ``` **Arguments**: * `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required] -* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: -* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--template / --no-template`: Run Helm template [default: no-template] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] -* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] * `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] +* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] * `--help`: Show this message and exit. ## `kpops reset` @@ -141,25 +133,23 @@ Reset pipeline steps **Usage**: ```console -$ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] +$ kpops reset [OPTIONS] PIPELINE_PATH ``` **Arguments**: * `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required] -* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: -* `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] -* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--help`: Show this message and exit. ## `kpops schema` @@ -171,7 +161,7 @@ The schemas can be used to enable support for kpops files in a text editor. **Usage**: ```console -$ kpops schema [OPTIONS] SCOPE:{pipeline|config} [COMPONENTS_MODULE] +$ kpops schema [OPTIONS] SCOPE:{pipeline|config} ``` **Arguments**: @@ -182,15 +172,15 @@ $ kpops schema [OPTIONS] SCOPE:{pipeline|config} [COMPONENTS_MODULE] - pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide [COMPONENTS_MODULES]. + pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide components module in config. config: Schema of KpopsConfig. [required] -* `[COMPONENTS_MODULE]`: Custom Python module containing your project-specific components **Options**: +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] * `--include-stock-components / --no-include-stock-components`: Include the built-in KPOps components. [default: include-stock-components] * `--help`: Show this message and exit. diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index 41740ae77..c3195147b 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -15,3 +15,5 @@ kafka_connect: url: "http://localhost:8083" defaults_path: . + +pipeline_base_dir: examples diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 30a7e15bf..8f5fe5646 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -32,10 +32,10 @@ PATH_CONFIG_ENV_VARS_DOTENV_FILE = PATH_DOCS_VARIABLES / "config_env_vars.env" PATH_CONFIG_ENV_VARS_MD_FILE = PATH_DOCS_VARIABLES / "config_env_vars.md" PATH_CONFIG_ENV_VARS_CSV_FILE = PATH_DOCS_VARIABLES / "temp_config_env_vars.csv" -TITLE_CONFIG_ENV_VARS = "Pipeline config environment variables" +TITLE_CONFIG_ENV_VARS = "Global config environment variables" DESCRIPTION_CONFIG_ENV_VARS = ( "These variables are a lower priority alternative to the settings in `config.yaml`. " - "Variables marked as required can instead be set in the pipeline config." + "Variables marked as required can instead be set in the global config." ) PATH_CLI_ENV_VARS_DOTFILES_FILE = PATH_DOCS_VARIABLES / "cli_env_vars.env" diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 5a7c758e4..c488f2bbf 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -29,6 +29,7 @@ from kpops.components.base_components import PipelineComponent + LOG_DIVIDER = "#" * 100 app = dtyper.Typer(pretty_exceptions_enable=False) @@ -45,15 +46,6 @@ ), ) -BASE_DIR_PATH_OPTION: Path = typer.Option( - default=Path(), - exists=True, - dir_okay=True, - file_okay=False, - envvar=f"{ENV_PREFIX}PIPELINE_BASE_DIR", - help="Base directory to the pipelines (default is current working directory)", -) - DEFAULT_PATH_OPTION: Optional[Path] = typer.Option( default=None, exists=True, @@ -109,11 +101,6 @@ class FilterType(str, Enum): VERBOSE_OPTION = typer.Option(False, help="Enable verbose printing") -COMPONENTS_MODULES: str | None = typer.Argument( - default=None, - help="Custom Python module containing your project-specific components", -) - ENVIRONMENT: str | None = typer.Option( default=None, envvar=f"{ENV_PREFIX}ENVIRONMENT", @@ -133,26 +120,22 @@ class FilterType(str, Enum): def setup_pipeline( - pipeline_base_dir: Path, pipeline_path: Path, - components_module: str | None, kpops_config: KpopsConfig, environment: str | None, ) -> Pipeline: registry = Registry() - if components_module: - registry.find_components(components_module) + if kpops_config.components_module: + registry.find_components(kpops_config.components_module) registry.find_components("kpops.components") - handlers = setup_handlers(components_module, kpops_config) + handlers = setup_handlers(kpops_config) parser = PipelineGenerator(kpops_config, registry, handlers) - return parser.load_yaml(pipeline_base_dir, pipeline_path, environment) + return parser.load_yaml(pipeline_path, environment) -def setup_handlers( - components_module: str | None, config: KpopsConfig -) -> ComponentHandlers: - schema_handler = SchemaHandler.load_schema_handler(components_module, config) +def setup_handlers(config: KpopsConfig) -> ComponentHandlers: + schema_handler = SchemaHandler.load_schema_handler(config) connector_handler = KafkaConnectHandler.from_kpops_config(config) proxy_wrapper = ProxyWrapper(config.kafka_rest) topic_handler = TopicHandler(proxy_wrapper) @@ -218,10 +201,10 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_kpops_config( config: Path, - defaults: Optional[Path], - verbose: bool, - dotenv: Optional[list[Path]], - environment: Optional[str], + defaults: Path | None = None, + dotenv: list[Path] | None = None, + environment: str | None = None, + verbose: bool = False, ) -> KpopsConfig: setup_logging_level(verbose) YamlConfigSettingsSource.config_dir = config @@ -250,18 +233,21 @@ def schema( help=""" Scope of the generated schema \n\n\n - pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide [COMPONENTS_MODULES]. + pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide components module in config. \n\n\n config: Schema of KpopsConfig.""", ), - components_module: Optional[str] = COMPONENTS_MODULES, + config: Path = CONFIG_PATH_OPTION, include_stock_components: bool = typer.Option( default=True, help="Include the built-in KPOps components." ), ) -> None: match scope: case SchemaScope.PIPELINE: - gen_pipeline_schema(components_module, include_stock_components) + kpops_config = create_kpops_config(config) + gen_pipeline_schema( + kpops_config.components_module, include_stock_components + ) case SchemaScope.CONFIG: gen_config_schema() @@ -271,21 +257,23 @@ def schema( ) def generate( pipeline_path: Path = PIPELINE_PATH_ARG, - components_module: Optional[str] = COMPONENTS_MODULES, - pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, template: bool = typer.Option(False, help="Run Helm template"), steps: Optional[str] = PIPELINE_STEPS, filter_type: FilterType = FILTER_TYPE, - verbose: bool = VERBOSE_OPTION, environment: Optional[str] = ENVIRONMENT, + verbose: bool = VERBOSE_OPTION, ) -> Pipeline: - kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) - pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config, environment + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) if not template: print_yaml(pipeline.to_yaml()) @@ -306,21 +294,23 @@ def generate( @app.command(help="Deploy pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, - components_module: Optional[str] = COMPONENTS_MODULES, - pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, filter_type: FilterType = FILTER_TYPE, + environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, - environment: Optional[str] = ENVIRONMENT, -) -> None: - kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) - pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config, environment +): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) for component in steps_to_apply: @@ -331,21 +321,23 @@ def deploy( @app.command(help="Destroy pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, - components_module: Optional[str] = COMPONENTS_MODULES, - pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, filter_type: FilterType = FILTER_TYPE, + environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, - environment: Optional[str] = ENVIRONMENT, -) -> None: - kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) - pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config, environment +): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: log_action("Destroy", component) @@ -355,21 +347,23 @@ def destroy( @app.command(help="Reset pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def reset( pipeline_path: Path = PIPELINE_PATH_ARG, - components_module: Optional[str] = COMPONENTS_MODULES, - pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, filter_type: FilterType = FILTER_TYPE, + environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, - environment: Optional[str] = ENVIRONMENT, -) -> None: - kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) - pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config, environment +): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: log_action("Reset", component) @@ -380,21 +374,23 @@ def reset( @app.command(help="Clean pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 def clean( pipeline_path: Path = PIPELINE_PATH_ARG, - components_module: Optional[str] = COMPONENTS_MODULES, - pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, filter_type: FilterType = FILTER_TYPE, + environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, - environment: Optional[str] = ENVIRONMENT, -) -> None: - kpops_config = create_kpops_config(config, defaults, verbose, dotenv, environment) - pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, kpops_config, environment +): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: log_action("Clean", component) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 06f21eff2..4d92bad03 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -63,13 +63,16 @@ def create_connector( self.create_connector(connector_config) raise KafkaConnectError(response) - def get_connector(self, connector_name: str) -> KafkaConnectResponse: + def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: """Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. """ + if connector_name is None: + msg = "Connector name not set" + raise Exception(msg) response = httpx.get( url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index fae2da0e7..1afb1626b 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -24,15 +24,11 @@ class SchemaHandler: - def __init__( - self, - kpops_config: KpopsConfig, - components_module: str | None, - ) -> None: + def __init__(self, kpops_config: KpopsConfig) -> None: self.schema_registry_client = SchemaRegistryClient( str(kpops_config.schema_registry.url) ) - self.components_module = components_module + self.components_module = kpops_config.components_module @cached_property def schema_provider(self) -> SchemaProvider: @@ -47,11 +43,9 @@ def schema_provider(self) -> SchemaProvider: raise ValueError(msg) from e @classmethod - def load_schema_handler( - cls, components_module: str | None, config: KpopsConfig - ) -> SchemaHandler | None: + def load_schema_handler(cls, config: KpopsConfig) -> SchemaHandler | None: if config.schema_registry.enabled: - return cls(config, components_module) + return cls(config) return None def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index fff9135da..883a8934d 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -36,7 +36,7 @@ class BaseDefaultsComponent(DescConfigModel, ABC): correctly to the component. :param enrich: Whether to enrich component with defaults, defaults to False - :param config: Pipeline configuration to be accessed by this component + :param config: KPOps configuration to be accessed by this component :param handlers: Component handlers to be accessed by this component :param validate: Whether to run custom validation on the component, defaults to True """ @@ -184,7 +184,7 @@ def get_defaults_file_paths( `config.defaults_path` exists and return paths to the defaults files calculated from it. It is up to the caller to handle any false paths. - :param config: Pipeline configuration + :param config: KPOps configuration :param environment: Environment :returns: The defaults files paths """ diff --git a/kpops/config.py b/kpops/config.py index 172ff4305..f71444a43 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -62,7 +62,7 @@ class KafkaConnectConfig(BaseSettings): class KpopsConfig(BaseSettings): - """Pipeline configuration unrelated to the components.""" + """Global configuration for KPOps project.""" defaults_path: Path = Field( default=Path(), @@ -70,6 +70,14 @@ class KpopsConfig(BaseSettings): description="The path to the folder containing the defaults.yaml file and the environment defaults files. " "Paths can either be absolute or relative to `config.yaml`", ) + components_module: str | None = Field( + default=None, + description="Custom Python module defining project-specific KPOps components", + ) + pipeline_base_dir: Path = Field( + default=Path(), + description="Base directory to the pipelines (default is current working directory)", + ) kafka_brokers: str = Field( default=..., examples=[ diff --git a/kpops/pipeline.py b/kpops/pipeline.py index de37576d0..ad69521e1 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -127,20 +127,20 @@ def parse( self.pipeline.validate() return self.pipeline - def load_yaml( - self, base_dir: Path, path: Path, environment: str | None - ) -> Pipeline: + def load_yaml(self, path: Path, environment: str | None) -> Pipeline: """Load pipeline definition from yaml. The file is often named ``pipeline.yaml`` - :param base_dir: Base directory to the pipelines (default is current working directory) :param path: Path to pipeline definition yaml file + :param environment: Environment name :raises TypeError: The pipeline definition should contain a list of components :raises TypeError: The env-specific pipeline definition should contain a list of components :returns: Initialized pipeline object """ - PipelineGenerator.set_pipeline_name_env_vars(base_dir, path) + PipelineGenerator.set_pipeline_name_env_vars( + self.config.pipeline_base_dir, path + ) PipelineGenerator.set_environment_name(environment) main_content = load_yaml_file(path, substitution=ENV) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 18ac3c5a4..93c4b233f 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -141,6 +141,6 @@ class PipelineSchema(RootModel): def gen_config_schema() -> None: - """Generate a json schema from the model of pipeline config.""" + """Generate JSON schema from the model.""" schema = model_json_schema(KpopsConfig) print(json.dumps(schema, indent=4, sort_keys=True)) diff --git a/tests/cli/resources/config.yaml b/tests/cli/resources/config.yaml new file mode 100644 index 000000000..046c98d2a --- /dev/null +++ b/tests/cli/resources/config.yaml @@ -0,0 +1,2 @@ +kafka_brokers: http://127.0.0.1:9092 +components_module: tests.cli.test_schema_generation diff --git a/tests/cli/resources/module.py b/tests/cli/resources/custom_module/__init__.py similarity index 100% rename from tests/cli/resources/module.py rename to tests/cli/resources/custom_module/__init__.py diff --git a/tests/cli/resources/empty_module.py b/tests/cli/resources/empty_module/__init__.py similarity index 100% rename from tests/cli/resources/empty_module.py rename to tests/cli/resources/empty_module/__init__.py diff --git a/tests/cli/resources/empty_module/config.yaml b/tests/cli/resources/empty_module/config.yaml new file mode 100644 index 000000000..735b3904a --- /dev/null +++ b/tests/cli/resources/empty_module/config.yaml @@ -0,0 +1,2 @@ +kafka_brokers: http://127.0.0.1:9092 +components_module: tests.cli.resources.empty_module diff --git a/tests/cli/resources/no_module/config.yaml b/tests/cli/resources/no_module/config.yaml new file mode 100644 index 000000000..79261856b --- /dev/null +++ b/tests/cli/resources/no_module/config.yaml @@ -0,0 +1 @@ +kafka_brokers: http://127.0.0.1:9092 diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index c9534d73c..7732ed61f 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -10,7 +10,7 @@ from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler from kpops.config import KpopsConfig, SchemaRegistryConfig -from tests.cli.resources.module import CustomSchemaProvider +from tests.cli.resources.custom_module import CustomSchemaProvider MODULE = CustomSchemaProvider.__module__ @@ -19,6 +19,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): config = KpopsConfig( defaults_path=Path("fake"), kafka_brokers="broker:9092", + components_module=MODULE, ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") connector_handler = KafkaConnectHandler.from_kpops_config(config) @@ -35,7 +36,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): topic_handler=topic_handler, ) - actual_handlers = setup_handlers(MODULE, config) + actual_handlers = setup_handlers(config) connector_handler_mock.from_kpops_config.assert_called_once_with(config) @@ -55,7 +56,7 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): kafka_brokers="broker:9092", ) schema_handler_mock = mocker.patch("kpops.cli.main.SchemaHandler") - schema_handler = SchemaHandler.load_schema_handler(MODULE, config) + schema_handler = SchemaHandler.load_schema_handler(config) schema_handler_mock.load_schema_handler.return_value = schema_handler connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") @@ -73,9 +74,9 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): topic_handler=topic_handler, ) - actual_handlers = setup_handlers(MODULE, config) + actual_handlers = setup_handlers(config) - schema_handler_mock.load_schema_handler.assert_called_once_with(MODULE, config) + schema_handler_mock.load_schema_handler.assert_called_once_with(config) connector_handler_mock.from_kpops_config.assert_called_once_with(config) diff --git a/tests/cli/test_registry.py b/tests/cli/test_registry.py index 13a9c854b..bc6a7a2f9 100644 --- a/tests/cli/test_registry.py +++ b/tests/cli/test_registry.py @@ -5,7 +5,7 @@ from kpops.cli.registry import ClassNotFoundError, Registry, _find_classes, find_class from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider from kpops.components.base_components.pipeline_component import PipelineComponent -from tests.cli.resources.module import CustomSchemaProvider +from tests.cli.resources.custom_module import CustomSchemaProvider class SubComponent(PipelineComponent): diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index d860a0b9c..bdd987ac6 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -6,13 +6,12 @@ from typing import TYPE_CHECKING import pytest -from pydantic import Field +from pydantic import ConfigDict, Field from typer.testing import CliRunner from kpops.cli.main import app from kpops.components.base_components import PipelineComponent from kpops.utils.docstring import describe_attr -from tests.cli.resources import empty_module if TYPE_CHECKING: from snapshottest.module import SnapshotTest @@ -25,8 +24,7 @@ # type is inherited from PipelineComponent class EmptyPipelineComponent(PipelineComponent): - class Config: - str_strip_whitespace = True + model_config = ConfigDict(str_strip_whitespace=True) # abstract component inheriting from ABC should be excluded @@ -82,9 +80,6 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): ) -MODULE = EmptyPipelineComponent.__module__ - - @pytest.mark.filterwarnings( "ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate" ) @@ -96,6 +91,8 @@ def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): "schema", "pipeline", "--no-include-stock-components", + "--config", + str(RESOURCE_PATH / "no_module"), ], catch_exceptions=False, ) @@ -106,7 +103,7 @@ def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): "No components are provided, no schema is generated.", ) ] - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout def test_gen_pipeline_schema_no_components(self): with pytest.raises(RuntimeError, match="^No valid components found.$"): @@ -116,7 +113,8 @@ def test_gen_pipeline_schema_no_components(self): "schema", "pipeline", "--no-include-stock-components", - empty_module.__name__, + "--config", + str(RESOURCE_PATH / "empty_module"), ], catch_exceptions=False, ) @@ -131,7 +129,7 @@ def test_gen_pipeline_schema_only_stock_module(self): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout assert result.stdout result = runner.invoke( @@ -144,7 +142,7 @@ def test_gen_pipeline_schema_only_stock_module(self): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout assert result.stdout def test_gen_pipeline_schema_only_custom_module(self, snapshot: SnapshotTest): @@ -153,13 +151,14 @@ def test_gen_pipeline_schema_only_custom_module(self, snapshot: SnapshotTest): [ "schema", "pipeline", - MODULE, "--no-include-stock-components", + "--config", + str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout snapshot.assert_match(result.stdout, "test-schema-generation") @@ -169,20 +168,22 @@ def test_gen_pipeline_schema_stock_and_custom_module(self): [ "schema", "pipeline", - MODULE, ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout assert result.stdout def test_gen_config_schema(self): result = runner.invoke( app, - ["schema", "config"], + [ + "schema", + "config", + ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout assert result.stdout diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 6f1f0b623..81e31d35d 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -69,39 +69,33 @@ def to_section(topic_config: TopicConfig) -> ToSection: @pytest.fixture() -def kpops_config_with_sr_enabled() -> KpopsConfig: +def kpops_config() -> KpopsConfig: return KpopsConfig( kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081"), ), + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) -def test_load_schema_handler(kpops_config_with_sr_enabled: KpopsConfig): +def test_load_schema_handler(kpops_config: KpopsConfig): assert isinstance( - SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, kpops_config_with_sr_enabled - ), + SchemaHandler.load_schema_handler(kpops_config), SchemaHandler, ) - config_disable = kpops_config_with_sr_enabled.model_copy() + config_disable = kpops_config.model_copy() config_disable.schema_registry = SchemaRegistryConfig(enabled=False) - assert ( - SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_disable) - is None - ) + assert SchemaHandler.load_schema_handler(config_disable) is None def test_should_lazy_load_schema_provider( - find_class_mock: MagicMock, kpops_config_with_sr_enabled: KpopsConfig + find_class_mock: MagicMock, kpops_config: KpopsConfig ): - schema_handler = SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, kpops_config_with_sr_enabled - ) + schema_handler = SchemaHandler.load_schema_handler(kpops_config) assert schema_handler is not None @@ -116,12 +110,10 @@ def test_should_lazy_load_schema_provider( def test_should_raise_value_error_if_schema_provider_class_not_found( - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=NON_EXISTING_PROVIDER_MODULE, - ) + kpops_config.components_module = NON_EXISTING_PROVIDER_MODULE + schema_handler = SchemaHandler(kpops_config) with pytest.raises( ValueError, @@ -148,12 +140,10 @@ def test_should_raise_value_error_if_schema_provider_class_not_found( ], ) def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty( - kpops_config_with_sr_enabled: KpopsConfig, - components_module: str, + kpops_config: KpopsConfig, components_module: str | None ): - schema_handler = SchemaHandler.load_schema_handler( - components_module, kpops_config_with_sr_enabled - ) + kpops_config.components_module = components_module + schema_handler = SchemaHandler.load_schema_handler(kpops_config) assert schema_handler is not None with pytest.raises( ValueError, @@ -168,12 +158,9 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] @@ -190,12 +177,9 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [1, 2, 3] schema_registry_mock.check_version.return_value = None @@ -213,13 +197,10 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl topic_config: TopicConfig, to_section: ToSection, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): schema_provider = TestSchemaProvider() - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema_registry_mock.get_versions.return_value = [1, 2, 3] @@ -254,13 +235,10 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve log_info_mock: MagicMock, log_debug_mock: MagicMock, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): schema_provider = TestSchemaProvider() - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) registered_version = SchemaVersion(topic_config.value_schema, 1, schema, 1) @@ -290,15 +268,12 @@ def test_should_submit_non_existing_schema_when_not_dry( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] @@ -319,12 +294,9 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] @@ -340,12 +312,9 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( to_section: ToSection, schema_registry_mock: MagicMock, - kpops_config_with_sr_enabled: KpopsConfig, + kpops_config: KpopsConfig, ): - schema_handler = SchemaHandler( - kpops_config=kpops_config_with_sr_enabled, - components_module=TEST_SCHEMA_PROVIDER_MODULE, - ) + schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 60410489d..aa9f84287 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -12,3 +12,4 @@ schema_registry: url: "http://localhost:8081" helm_config: api_version: "2.1.1" +pipeline_base_dir: tests/pipeline diff --git a/tests/pipeline/test_example.py b/tests/pipeline/test_example.py index 0a4c42321..5d6d587d0 100644 --- a/tests/pipeline/test_example.py +++ b/tests/pipeline/test_example.py @@ -16,15 +16,13 @@ def test_atm_fraud(self, snapshot: SnapshotTest): [ "generate", "./examples/bakdata/atm-fraud-detection/pipeline.yaml", - "--pipeline-base-dir", - "examples", "--config", "./examples/bakdata/atm-fraud-detection", ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "atm-fraud-pipeline") diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 9be8edf7c..afb137ff2 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -13,7 +13,6 @@ runner = CliRunner() RESOURCE_PATH = Path(__file__).parent / "resources" -PIPELINE_BASE_DIR_PATH = RESOURCE_PATH.parent @pytest.mark.usefixtures("mock_env", "load_yaml_file_clear_cache") @@ -21,8 +20,6 @@ class TestPipeline: def test_python_api(self): pipeline = kpops.generate( RESOURCE_PATH / "first-pipeline" / "pipeline.yaml", - "tests.pipeline.test_components", - pipeline_base_dir=PIPELINE_BASE_DIR_PATH, defaults=RESOURCE_PATH, ) assert len(pipeline) == 3 @@ -32,17 +29,14 @@ def test_load_pipeline(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "first-pipeline/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) @@ -55,10 +49,7 @@ def test_generate_with_steps_flag_should_write_log_warning( app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "first-pipeline/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), "--steps", @@ -76,24 +67,21 @@ def test_generate_with_steps_flag_should_write_log_warning( ) ] - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout def test_name_equal_prefix_name_concatenation(self): result = runner.invoke( app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "name_prefix_concatenation/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) @@ -105,10 +93,7 @@ def test_pipelines_with_env_values(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "pipeline-with-envs/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), "--environment", @@ -117,7 +102,7 @@ def test_pipelines_with_env_values(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -127,17 +112,14 @@ def test_inflate_pipeline(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "pipeline-with-inflate/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -147,17 +129,14 @@ def test_substitute_in_component(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "component-type-substitution/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( @@ -195,13 +174,10 @@ def test_substitute_in_component_infinite_loop(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str( RESOURCE_PATH / "component-type-substitution/infinite_pipeline.yaml", ), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], @@ -213,8 +189,6 @@ def test_kafka_connector_config_parsing(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "kafka-connect-sink-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH), @@ -235,17 +209,14 @@ def test_no_input_topic(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "no-input-topic-pipeline/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -255,8 +226,6 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "no-user-defined-components/pipeline.yaml"), "--defaults", str(RESOURCE_PATH), @@ -264,7 +233,7 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -275,8 +244,6 @@ def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "kafka-connect-sink/pipeline.yaml"), "--defaults", str(RESOURCE_PATH), @@ -284,7 +251,7 @@ def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -294,17 +261,14 @@ def test_read_from_component(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "read-from-component/pipeline.yaml"), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -314,8 +278,6 @@ def test_with_env_defaults(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "kafka-connect-sink/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "pipeline-with-env-defaults"), @@ -325,7 +287,7 @@ def test_with_env_defaults(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -335,8 +297,6 @@ def test_prefix_pipeline_component(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str( RESOURCE_PATH / "pipeline-component-should-have-prefix/pipeline.yaml", @@ -347,7 +307,7 @@ def test_prefix_pipeline_component(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -360,8 +320,6 @@ def test_with_custom_config_with_relative_defaults_path( app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", str(RESOURCE_PATH / "custom-config"), @@ -371,7 +329,7 @@ def test_with_custom_config_with_relative_defaults_path( catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) producer_details = enriched_pipeline[0] @@ -405,8 +363,6 @@ def test_with_custom_config_with_absolute_defaults_path( app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", str(temp_config_path.parent), @@ -416,7 +372,7 @@ def test_with_custom_config_with_absolute_defaults_path( catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) producer_details = enriched_pipeline[0] @@ -438,8 +394,6 @@ def test_default_config(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), @@ -449,7 +403,7 @@ def test_default_config(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) producer_details = enriched_pipeline[0] @@ -471,8 +425,6 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", str(RESOURCE_PATH / "custom-config"), @@ -481,7 +433,7 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert enriched_pipeline[0]["app"]["streams"]["brokers"] == "env_broker" @@ -494,8 +446,6 @@ def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", str(RESOURCE_PATH / "custom-config"), @@ -504,7 +454,7 @@ def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] @@ -520,8 +470,6 @@ def test_env_specific_config_env_def_in_env_var( app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", config_path, @@ -530,7 +478,7 @@ def test_env_specific_config_env_def_in_env_var( ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] @@ -556,8 +504,6 @@ def test_env_specific_config_env_def_in_cli( app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", config_path, @@ -568,7 +514,7 @@ def test_env_specific_config_env_def_in_cli( ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( enriched_pipeline[0]["app"]["streams"]["schemaRegistryUrl"] == expected_url @@ -579,8 +525,6 @@ def test_config_dir_doesnt_exist(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", "./non-existent-dir", @@ -599,8 +543,6 @@ def test_model_serialization(self, snapshot: SnapshotTest): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "pipeline-with-paths/pipeline.yaml"), "--defaults", str(RESOURCE_PATH), @@ -608,7 +550,7 @@ def test_model_serialization(self, snapshot: SnapshotTest): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") @@ -618,8 +560,6 @@ def test_dotenv_support(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH), @@ -632,7 +572,7 @@ def test_dotenv_support(self): ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( @@ -645,8 +585,6 @@ def test_short_topic_definition(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "pipeline-with-short-topics/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "pipeline-with-short-topics"), @@ -654,7 +592,7 @@ def test_short_topic_definition(self): catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) @@ -692,13 +630,10 @@ def test_kubernetes_app_name_validation(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str( RESOURCE_PATH / "pipeline-with-illegal-kubernetes-name/pipeline.yaml", ), - "tests.pipeline.test_components", "--defaults", str(RESOURCE_PATH), ], @@ -714,8 +649,6 @@ def test_validate_unique_step_names(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "pipeline-duplicate-step-names/pipeline.yaml"), "--defaults", str(RESOURCE_PATH), @@ -728,15 +661,13 @@ def test_temp_trim_release_name(self): app, [ "generate", - "--pipeline-base-dir", - str(PIPELINE_BASE_DIR_PATH), str(RESOURCE_PATH / "temp-trim-release-name/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "temp-trim-release-name"), ], catch_exceptions=False, ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout enriched_pipeline: dict = yaml.safe_load(result.stdout) assert ( enriched_pipeline[0]["name"] diff --git a/tests/pipeline/test_template.py b/tests/pipeline/test_template.py index 40b9dadbf..efd332bb6 100644 --- a/tests/pipeline/test_template.py +++ b/tests/pipeline/test_template.py @@ -11,7 +11,6 @@ runner = CliRunner() RESOURCE_PATH = Path(__file__).parent / "resources" -PIPELINE_BASE_DIR = str(RESOURCE_PATH.parent) class TestTemplate: @@ -26,8 +25,6 @@ def test_default_template_config(self, run_command: MagicMock): app, [ "generate", - "--pipeline-base-dir", - PIPELINE_BASE_DIR, str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), @@ -56,7 +53,7 @@ def test_default_template_config(self, run_command: MagicMock): ], ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout def test_template_config_with_flags(self, run_command: MagicMock): run_command.return_value = "v3.12.0+gc9f554d" @@ -65,8 +62,6 @@ def test_template_config_with_flags(self, run_command: MagicMock): app, [ "generate", - "--pipeline-base-dir", - PIPELINE_BASE_DIR, str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), @@ -99,4 +94,4 @@ def test_template_config_with_flags(self, run_command: MagicMock): ], ) - assert result.exit_code == 0 + assert result.exit_code == 0, result.stdout From 96c9d2de3b29ee30ca11a19a4598df5661ec264b Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Thu, 21 Dec 2023 16:23:44 +0100 Subject: [PATCH 16/34] Update KPOps runner with the new options (#395) --- action.yaml | 11 ++++------- .../user/references/ci-integration/github-actions.md | 4 ++-- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/action.yaml b/action.yaml index ff2533251..7d2cb85b0 100644 --- a/action.yaml +++ b/action.yaml @@ -11,17 +11,14 @@ inputs: working-directory: description: "The root directory containing the config.yaml, pipelines folder and defaults" default: "." - pipeline-base-dir: - description: "Directory where relative pipeline variables are initialized from" - required: false defaults: description: "Defaults folder path" required: false config: - description: "config.yaml file path" + description: "Directory containing the config*.yaml file(s)" required: false - components: - description: "Components package path" + environment: + description: "Environment to run KPOps in" required: false filter-type: description: "Whether to include/exclude the steps defined in KPOPS_PIPELINE_STEPS (default is include)" @@ -79,4 +76,4 @@ runs: - name: ${{ inputs.command }} ${{ inputs.pipeline }} pipeline shell: bash working-directory: ${{inputs.working-directory}} - run: kpops ${{ inputs.command }} ${{ inputs.pipeline }} ${{ inputs.components }} ${{ (inputs.defaults != '' && format('--defaults {0}', inputs.defaults)) || '' }} ${{ (inputs.config != '' && format('--config {0}', inputs.config)) || '' }} ${{ (inputs.pipeline-base-dir != '' && format('--pipeline-base-dir {0}', inputs.pipeline-base-dir)) || '' }} ${{ (inputs.filter-type != '' && format('--filter-type {0}', inputs.filter-type)) || '' }} + run: kpops ${{ inputs.command }} ${{ inputs.pipeline }} ${{ (inputs.defaults != '' && format('--defaults {0}', inputs.defaults)) || '' }} ${{ (inputs.config != '' && format('--config {0}', inputs.config)) || '' }} ${{ (inputs.environment != '' && format('--environment {0}', inputs.environment)) || '' }} ${{ (inputs.filter-type != '' && format('--filter-type {0}', inputs.filter-type)) || '' }} diff --git a/docs/docs/user/references/ci-integration/github-actions.md b/docs/docs/user/references/ci-integration/github-actions.md index 87f9098f1..7a5c233e9 100644 --- a/docs/docs/user/references/ci-integration/github-actions.md +++ b/docs/docs/user/references/ci-integration/github-actions.md @@ -9,9 +9,9 @@ We provided a GitHub composite action `bakdata/kpops` that installs and executes | command | ✅ | - | string | KPOps command to run. generate, deploy, destroy, reset, clean are possible values. Flags such as --dry-run and --execute need to be specified | | pipeline | ✅ | - | string | Pipeline to run by KPOps | | working-directory | ❌ | . | string | root directory used by KPOps to run pipelines | -| pipeline-base-dir | ❌ | - | string | directory where relative pipeline variables are initialized from | | defaults | ❌ | - | string | defaults folder path | -| config | ❌ | - | string | config.yaml file path | +| config | ❌ | - | string | Directory containing the config*.yaml file(s) | +| environment | ❌ | - | string | Environment to run KPOps in | | components | ❌ | - | string | components package path | | filter-type | ❌ | - | string | Whether to include/exclude the steps defined in KPOPS_PIPELINE_STEPS | | python-version | ❌ | "3.11.x" | string | Python version to install (Defaults to the latest stable version of Python 3.11) | From 96c196f9f3fcc200bae0543a48286a33b74b95ed Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 21 Dec 2023 18:21:44 +0100 Subject: [PATCH 17/34] Add steps for KubernetesApp->HelmApp to migration guide --- docs/docs/user/migration-guide/v2-v3.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 6a6e0f4f4..ede1a29f6 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -1,5 +1,29 @@ # Migrate from V2 to V3 +## [Create HelmApp component](https://github.com/bakdata/kpops/pull/370) + +All Helm-specific parts of the built-in [`KubernetesApp`](../core-concepts/components/kubernetes-app.md) have been extracted to a new child component that is more appropriately named [`HelmApp`](../core-concepts/components/helm-app.md). It has to be renamed in your existing pipeline defintions and custom components module. + +#### pipeline.yaml + +```diff +-- type: kubernetes-app ++- type: helm-app + name: foo +``` + +#### custom_module.py + +```diff +- from kpops.components import KubernetesApp ++ from kpops.components import HelmApp + + +- class CustomHelmApp(KubernetesApp): ++ class CustomHelmApp(HelmApp): + ... +``` + ## [Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config](https://github.com/bakdata/kpops/pull/354) The breaking changes target the `config.yaml` file: From 3cd1095997dfbf42cf16cceb04d2da31d3cf8ae6 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Fri, 22 Dec 2023 10:10:57 +0100 Subject: [PATCH 18/34] Fix KPOps action to get package from testPyPI (#396) --- action.yaml | 7 ++++++- .../references/ci-integration/github-actions.md | 16 ++-------------- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/action.yaml b/action.yaml index 7d2cb85b0..eeb4346cd 100644 --- a/action.yaml +++ b/action.yaml @@ -70,7 +70,12 @@ runs: shell: bash run: | echo "::group::install kpops package" - pip install -r "${{ steps.requirements.outputs.path }}" + # Check if kpops-version contains ".dev" + if [[ "${{ inputs.kpops-version }}" == *".dev"* ]]; then + pip install -r "${{ steps.requirements.outputs.path }}" -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ + else + pip install -r "${{ steps.requirements.outputs.path }}" + fi echo "::endgroup::" - name: ${{ inputs.command }} ${{ inputs.pipeline }} pipeline diff --git a/docs/docs/user/references/ci-integration/github-actions.md b/docs/docs/user/references/ci-integration/github-actions.md index 7a5c233e9..c1c726e18 100644 --- a/docs/docs/user/references/ci-integration/github-actions.md +++ b/docs/docs/user/references/ci-integration/github-actions.md @@ -33,25 +33,13 @@ steps: pipeline: pipelines/my-pipeline-file.yaml kpops-version: 1.2.3 + # It is possible to use a pre-release KPOps version from TestPyPI https://test.pypi.org/project/kpops/#history - name: Deploy Kafka pipeline uses: bakdata/kpops@main with: command: deploy --execute working-directory: home/my-kpops-root-dir pipeline: pipelines/my-pipeline-file.yaml - kpops-version: 1.2.3 + kpops-version: 1.2.5.dev20230707132709 # ... ``` - -It is possible to use a pre-release KPOps version from [TestPyPI](https://test.pypi.org/project/kpops/#history). - -```yaml -steps: - - name: Deploy Kafka pipeline - uses: bakdata/kpops@main - with: - command: deploy --execute - working-directory: home/my-kpops-root-dir - pipeline: pipelines/my-pipeline-file.yaml - kpops-version: 1.2.5.dev20230707132709 -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ -``` From 4a415944706c495f17eb2896ee152e11bdec522b Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Tue, 2 Jan 2024 14:01:35 +0100 Subject: [PATCH 19/34] Use hash and trim long Helm release names instead of only trimming (#390) fixes https://github.com/bakdata/kpops/issues/46 --- docs/docs/user/migration-guide/v2-v3.md | 16 +++++++ .../component_handlers/helm_wrapper/utils.py | 24 ++++++---- kpops/components/base_components/helm_app.py | 9 +++- kpops/components/base_components/kafka_app.py | 38 +++++++-------- .../base_components/kafka_connector.py | 7 ++- .../helm_wrapper/test_utils.py | 48 ++++++++++--------- tests/components/test_kafka_app.py | 3 +- tests/components/test_kafka_connector.py | 19 +++++++- tests/components/test_kafka_sink_connector.py | 25 +++++----- .../components/test_kafka_source_connector.py | 27 ++++++----- tests/components/test_kubernetes_app.py | 14 +++++- tests/components/test_producer_app.py | 33 +++++++------ tests/components/test_streams_app.py | 43 +++++++++-------- tests/pipeline/test_template.py | 7 ++- 14 files changed, 193 insertions(+), 120 deletions(-) diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index ede1a29f6..38b86bea0 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -1,5 +1,21 @@ # Migrate from V2 to V3 +## [Use hash and trim long Helm release names instead of only trimming](https://github.com/bakdata/kpops/pull/390) + +KPOps handles long (more than 53 characters) Helm releases names differently. Helm will not find your (long) old release names anymore. Therefore, it is recommended that you should once destroy your pipeline with KPOps v2 to remove old Helm release names. After a clean destroy, re-deploy your pipeline with the KPOps v3. + +For example if you have a component with the Helm release name `example-component-name-too-long-fake-fakefakefakefakefake`. The new release name will shorten the original name to 52 characters and then replace the last 6 characters of the trimmed name with the first 5 characters of the result of SHA-1(helm_release_name). + + + +```console +example-component-name-too-long-fake-fakefakef-0a7fc ----> 52 chars +---------------------------------------------- ----- + ^Shortened helm_release_name ^first 5 characters of SHA1(helm_release_name) +``` + + + ## [Create HelmApp component](https://github.com/bakdata/kpops/pull/370) All Helm-specific parts of the built-in [`KubernetesApp`](../core-concepts/components/kubernetes-app.md) have been extracted to a new child component that is more appropriately named [`HelmApp`](../core-concepts/components/helm-app.md). It has to be renamed in your existing pipeline defintions and custom components module. diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index 7ad76b93a..5f5577842 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -1,22 +1,30 @@ +import hashlib import logging log = logging.getLogger("HelmUtils") - +ENCODING = "utf-8" RELEASE_NAME_MAX_LEN = 52 -def trim_release_name(name: str, suffix: str = "") -> str: - """Trim Helm release name while preserving suffix. +def create_helm_release_name(name: str, suffix: str = "") -> str: + """Shortens the long Helm release name. + + Creates a 52 character long release name if the name length exceeds the Helm release character length. + It first trims the string and fetches the first RELEASE_NAME_MAX_LEN - len(suffix) characters. + Then it replaces the last 6 characters with the SHA-1 encoded string (with "-") to avoid collision + and append the suffix if given. - :param name: The release name including optional suffix + :param name: The Helm release name to be shortened. :param suffix: The release suffix to preserve - :return: Truncated release name. + :return: Trimmed + hashed version of the release name if it exceeds the Helm release character length otherwise the actual release name """ if len(name) > RELEASE_NAME_MAX_LEN: - new_name = name[: (RELEASE_NAME_MAX_LEN - len(suffix))] + suffix + exact_name = name[: RELEASE_NAME_MAX_LEN - len(suffix)] + hash_name = hashlib.sha1(name.encode(ENCODING)).hexdigest() + new_name = exact_name[:-6] + "-" + hash_name[:5] + suffix log.critical( - f"Invalid Helm release name '{name}'. Truncating to {RELEASE_NAME_MAX_LEN} characters: \n {name} --> {new_name}" + f"Invalid Helm release name '{name}'. Truncating and hashing the release name: \n {name} --> {new_name}" ) - name = new_name + return new_name return name diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 5d70bacfd..6fe6293c0 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -16,6 +16,7 @@ HelmTemplateFlags, HelmUpgradeInstallFlags, ) +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components.base_components.kubernetes_app import KubernetesApp from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr @@ -67,7 +68,13 @@ def dry_run_handler(self) -> DryRunHandler: @property def helm_release_name(self) -> str: """The name for the Helm release. Can be overridden.""" - return self.full_name + return create_helm_release_name(self.full_name) + + @property + def clean_release_name(self) -> str: + """The name for the Helm release for cleanup jobs. Can be overridden.""" + suffix = "-clean" + return create_helm_release_name(self.helm_release_name, suffix) @property def helm_chart(self) -> str: diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index b62e54bab..0346f70b0 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -10,9 +10,10 @@ HelmRepoConfig, HelmUpgradeInstallFlags, ) -from kpops.component_handlers.helm_wrapper.utils import trim_release_name from kpops.components.base_components.helm_app import HelmApp -from kpops.components.base_components.kubernetes_app import KubernetesAppConfig +from kpops.components.base_components.kubernetes_app import ( + KubernetesAppConfig, +) from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -40,14 +41,16 @@ class KafkaAppConfig(KubernetesAppConfig): """Settings specific to Kafka Apps. :param streams: Kafka streams config - :param name_override: Override name with this value, defaults to None + :param name_override: Override name with this value """ streams: KafkaStreamsConfig = Field( default=..., description=describe_attr("streams", __doc__) ) name_override: str | None = Field( - default=None, description=describe_attr("name_override", __doc__) + default=None, + title="Nameoverride", + description=describe_attr("name_override", __doc__), ) @@ -108,28 +111,21 @@ def _run_clean_up_job( :param values: The value YAML for the chart :param dry_run: Dry run command :param retain_clean_jobs: Whether to retain the cleanup job, defaults to False - :return: """ - suffix = "-clean" - clean_up_release_name = trim_release_name( - self.helm_release_name + suffix, suffix - ) - log.info(f"Uninstall old cleanup job for {clean_up_release_name}") + log.info(f"Uninstall old cleanup job for {self.clean_release_name}") - self.__uninstall_clean_up_job(clean_up_release_name, dry_run) + self.__uninstall_clean_up_job(self.clean_release_name, dry_run) - log.info(f"Init cleanup job for {clean_up_release_name}") + log.info(f"Init cleanup job for {self.clean_release_name}") - stdout = self.__install_clean_up_job( - clean_up_release_name, suffix, values, dry_run - ) + stdout = self.__install_clean_up_job(self.clean_release_name, values, dry_run) if dry_run: - self.dry_run_handler.print_helm_diff(stdout, clean_up_release_name, log) + self.dry_run_handler.print_helm_diff(stdout, self.clean_release_name, log) if not retain_clean_jobs: - log.info(f"Uninstall cleanup job for {clean_up_release_name}") - self.__uninstall_clean_up_job(clean_up_release_name, dry_run) + log.info(f"Uninstall cleanup job for {self.clean_release_name}") + self.__uninstall_clean_up_job(self.clean_release_name, dry_run) def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None: """Uninstall clean up job. @@ -142,7 +138,6 @@ def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None: def __install_clean_up_job( self, release_name: str, - suffix: str, values: dict, dry_run: bool, ) -> str: @@ -152,11 +147,10 @@ def __install_clean_up_job( :param suffix: Suffix to add to the release name, e.g. "-clean" :param values: The Helm values for the chart :param dry_run: Whether to do a dry run of the command - :return: Install clean up job with helm, return the output of the installation + :return: Return the output of the installation """ - clean_up_release_name = trim_release_name(release_name, suffix) return self.helm.upgrade_install( - clean_up_release_name, + release_name, self.clean_up_helm_chart, dry_run, self.namespace, diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 7af2c5ae4..c6f21612d 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -17,7 +17,7 @@ HelmTemplateFlags, HelmUpgradeInstallFlags, ) -from kpops.component_handlers.helm_wrapper.utils import trim_release_name +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.component_handlers.kafka_connect.model import ( KafkaConnectorConfig, KafkaConnectorType, @@ -104,8 +104,7 @@ def helm(self) -> Helm: @property def _resetter_release_name(self) -> str: suffix = "-clean" - clean_up_release_name = self.full_name + suffix - return trim_release_name(clean_up_release_name, suffix) + return create_helm_release_name(self.full_name + suffix, suffix) @property def _resetter_helm_chart(self) -> str: @@ -244,7 +243,7 @@ def _get_kafka_connect_resetter_values( **kwargs, ), connector_type=self._connector_type.value, - name_override=self.full_name, + name_override=self.full_name + "-clean", ).model_dump(), **self.resetter_values, } diff --git a/tests/component_handlers/helm_wrapper/test_utils.py b/tests/component_handlers/helm_wrapper/test_utils.py index eef6ca14f..4a7111d88 100644 --- a/tests/component_handlers/helm_wrapper/test_utils.py +++ b/tests/component_handlers/helm_wrapper/test_utils.py @@ -1,29 +1,33 @@ -from kpops.component_handlers.helm_wrapper.utils import trim_release_name +from kpops.component_handlers.helm_wrapper.utils import ( + create_helm_release_name, +) -def test_trim_release_name_with_suffix(): - name = trim_release_name( - "example-component-name-too-long-fake-fakefakefakefakefake-clean", - suffix="-clean", - ) - assert name == "example-component-name-too-long-fake-fakefakef-clean" - assert len(name) == 52 +def test_helm_release_name_for_long_names(): + long_release_name = "example-component-name-too-long-fake-fakefakefakefakefake" + actual_release_name = create_helm_release_name(long_release_name) -def test_trim_release_name_without_suffix(): - name = trim_release_name( - "example-component-name-too-long-fake-fakefakefakefakefake" - ) - assert name == "example-component-name-too-long-fake-fakefakefakefak" - assert len(name) == 52 + expected_helm_release_name = "example-component-name-too-long-fake-fakefakef-0a7fc" + assert expected_helm_release_name == actual_release_name + assert len(expected_helm_release_name) == 52 -def test_no_trim_release_name(): - assert ( - trim_release_name("normal-name-with-no-need-of-trim-clean", suffix="-clean") - == "normal-name-with-no-need-of-trim-clean" - ) - assert ( - trim_release_name("normal-name-with-no-need-of-trim") - == "normal-name-with-no-need-of-trim" +def test_helm_release_name_for_install_and_clean_must_be_different(): + long_release_name = "example-component-name-too-long-fake-fakefakefakefakefake" + + helm_clean_release_name = create_helm_release_name(long_release_name, "-clean") + expected_helm_release_name = ( + "example-component-name-too-long-fake-fakefakef-0a7fc-clean" ) + + assert expected_helm_release_name != helm_clean_release_name + + +def test_helm_release_name_for_short_names(): + short_release_name = "example-component-name" + + actual_helm_release_name = create_helm_release_name(short_release_name) + + assert actual_helm_release_name == short_release_name + assert len(actual_helm_release_name) < 53 diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index 06af5d4f5..6a5ed7d18 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -10,6 +10,7 @@ HelmRepoConfig, HelmUpgradeInstallFlags, ) +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components.base_components import KafkaApp from kpops.config import KpopsConfig @@ -92,7 +93,7 @@ def test_should_deploy_kafka_app( print_helm_diff.assert_called_once() helm_upgrade_install.assert_called_once_with( - "${pipeline_name}-example-name", + create_helm_release_name("${pipeline_name}-example-name"), "test/test-chart", True, "test-namespace", diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 8ea178ef2..6c0e0dcc3 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -14,7 +14,8 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop" CONNECTOR_FULL_NAME = "${pipeline_name}-" + CONNECTOR_NAME -CONNECTOR_CLEAN_FULL_NAME = "${pipeline_name}-test-connector-with-long-name-clean" +CONNECTOR_CLEAN_FULL_NAME = CONNECTOR_FULL_NAME + "-clean" +CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline_name}-test-connector-with-lon-449ec-clean" CONNECTOR_CLASS = "com.bakdata.connect.TestConnector" @@ -111,3 +112,19 @@ def test_connector_config_name_override( app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore[reportGeneralTypeIssues] namespace="test-namespace", ) + + def test_resetter_release_name( + self, + config: KpopsConfig, + handlers: ComponentHandlers, + connector_config: KafkaConnectorConfig, + ): + connector = KafkaConnector( + name=CONNECTOR_NAME, + config=config, + handlers=handlers, + app=connector_config, + namespace="test-namespace", + ) + assert connector.app.name == CONNECTOR_FULL_NAME + assert connector._resetter_release_name == CONNECTOR_CLEAN_RELEASE_NAME diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 25fa67498..81b5049d9 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -28,6 +28,7 @@ from kpops.utils.colorify import magentaify from tests.components.test_kafka_connector import ( CONNECTOR_CLEAN_FULL_NAME, + CONNECTOR_CLEAN_RELEASE_NAME, CONNECTOR_FULL_NAME, CONNECTOR_NAME, TestKafkaConnector, @@ -211,11 +212,11 @@ def test_reset_when_dry_run_is_false( ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=dry_run, ), mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, namespace="test-namespace", chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", dry_run=dry_run, @@ -231,12 +232,12 @@ def test_reset_when_dry_run_is_false( "connector": CONNECTOR_FULL_NAME, "deleteConsumerGroup": False, }, - "nameOverride": CONNECTOR_FULL_NAME, + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=dry_run, ), ] @@ -301,11 +302,11 @@ def test_clean_when_dry_run_is_false( ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=dry_run, ), mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, namespace="test-namespace", chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", dry_run=dry_run, @@ -321,12 +322,12 @@ def test_clean_when_dry_run_is_false( "connector": CONNECTOR_FULL_NAME, "deleteConsumerGroup": True, }, - "nameOverride": CONNECTOR_FULL_NAME, + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=dry_run, ), ] @@ -395,11 +396,11 @@ def test_clean_without_to_when_dry_run_is_false( ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=dry_run, ), mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, namespace="test-namespace", chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", dry_run=dry_run, @@ -415,12 +416,12 @@ def test_clean_without_to_when_dry_run_is_false( "connector": CONNECTOR_FULL_NAME, "deleteConsumerGroup": True, }, - "nameOverride": CONNECTOR_FULL_NAME, + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=dry_run, ), ] diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index 5d129e987..a34efc364 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -25,11 +25,14 @@ from kpops.utils.environment import ENV from tests.components.test_kafka_connector import ( CONNECTOR_CLEAN_FULL_NAME, + CONNECTOR_CLEAN_RELEASE_NAME, CONNECTOR_FULL_NAME, CONNECTOR_NAME, TestKafkaConnector, ) +CLEAN_SUFFIX = "-clean" + class TestKafkaSourceConnector(TestKafkaConnector): @pytest.fixture() @@ -157,11 +160,11 @@ def test_reset_when_dry_run_is_false( ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=False, ), mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, namespace="test-namespace", chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", dry_run=False, @@ -177,12 +180,12 @@ def test_reset_when_dry_run_is_false( "connector": CONNECTOR_FULL_NAME, "offsetTopic": "kafka-connect-offsets", }, - "nameOverride": CONNECTOR_FULL_NAME, + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=False, ), ] @@ -232,11 +235,11 @@ def test_clean_when_dry_run_is_false( ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=False, ), mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, namespace="test-namespace", chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", dry_run=False, @@ -252,12 +255,12 @@ def test_clean_when_dry_run_is_false( "connector": CONNECTOR_FULL_NAME, "offsetTopic": "kafka-connect-offsets", }, - "nameOverride": CONNECTOR_FULL_NAME, + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=False, ), ] @@ -307,11 +310,11 @@ def test_clean_without_to_when_dry_run_is_false( ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=False, ), mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, namespace="test-namespace", chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", dry_run=False, @@ -327,12 +330,12 @@ def test_clean_without_to_when_dry_run_is_false( "connector": CONNECTOR_FULL_NAME, "offsetTopic": "kafka-connect-offsets", }, - "nameOverride": CONNECTOR_FULL_NAME, + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, ), mocker.call.helm.uninstall( namespace="test-namespace", - release_name=CONNECTOR_CLEAN_FULL_NAME, + release_name=CONNECTOR_CLEAN_RELEASE_NAME, dry_run=False, ), ] diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 95ab11f6c..45f575fb8 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -5,17 +5,23 @@ from pytest_mock import MockerFixture from kpops.component_handlers import ComponentHandlers +from kpops.component_handlers.helm_wrapper.model import ( + HelmRepoConfig, +) +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components.base_components.kubernetes_app import ( KubernetesApp, KubernetesAppConfig, ) from kpops.config import KpopsConfig +HELM_RELEASE_NAME = create_helm_release_name("${pipeline_name}-test-kubernetes-app") + DEFAULTS_PATH = Path(__file__).parent / "resources" class KubernetesTestValue(KubernetesAppConfig): - foo: str + name_override: str class TestKubernetesApp: @@ -37,7 +43,11 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture() def app_value(self) -> KubernetesTestValue: - return KubernetesTestValue(foo="foo") + return KubernetesTestValue(**{"name_override": "test-value"}) + + @pytest.fixture() + def repo_config(self) -> HelmRepoConfig: + return HelmRepoConfig(repository_name="test", url="https://bakdata.com") @pytest.fixture() def kubernetes_app( diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 89ca25bdd..ca8b67fac 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -7,6 +7,7 @@ from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmUpgradeInstallFlags +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components import ProducerApp from kpops.components.base_components.models.to_section import ( OutputTopicTypes, @@ -16,11 +17,15 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" +PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" +PRODUCER_APP_FULL_NAME = "${pipeline_name}-" + PRODUCER_APP_NAME +PRODUCER_APP_RELEASE_NAME = create_helm_release_name(PRODUCER_APP_FULL_NAME) +PRODUCER_APP_CLEAN_RELEASE_NAME = create_helm_release_name( + PRODUCER_APP_RELEASE_NAME, "-clean" +) -class TestProducerApp: - PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" - PRODUCER_APP_CLEAN_NAME = "test-producer-app-with-long-n-clean" +class TestProducerApp: @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( @@ -44,7 +49,7 @@ def producer_app( self, config: KpopsConfig, handlers: ComponentHandlers ) -> ProducerApp: return ProducerApp( - name=self.PRODUCER_APP_NAME, + name=PRODUCER_APP_NAME, config=config, handlers=handlers, **{ @@ -66,7 +71,7 @@ def producer_app( def test_output_topics(self, config: KpopsConfig, handlers: ComponentHandlers): producer_app = ProducerApp( - name=self.PRODUCER_APP_NAME, + name=PRODUCER_APP_NAME, config=config, handlers=handlers, **{ @@ -115,7 +120,7 @@ def test_deploy_order_when_dry_run_is_false( assert mock.mock_calls == [ mocker.call.mock_create_topics(to_section=producer_app.to, dry_run=False), mocker.call.mock_helm_upgrade_install( - "${pipeline_name}-" + self.PRODUCER_APP_NAME, + PRODUCER_APP_RELEASE_NAME, "bakdata-streams-bootstrap/producer-app", False, "test-namespace", @@ -149,7 +154,7 @@ def test_destroy( producer_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-" + self.PRODUCER_APP_NAME, True + "test-namespace", PRODUCER_APP_RELEASE_NAME, True ) def test_should_not_reset_producer_app( @@ -175,11 +180,11 @@ def test_should_not_reset_producer_app( assert mock.mock_calls == [ mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, True, ), mocker.call.helm_upgrade_install( - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, "bakdata-streams-bootstrap/producer-app-cleanup-job", True, "test-namespace", @@ -193,12 +198,12 @@ def test_should_not_reset_producer_app( ), mocker.call.print_helm_diff( ANY, - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, logging.getLogger("KafkaApp"), ), mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, True, ), ] @@ -220,11 +225,11 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w assert mock.mock_calls == [ mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, False, ), mocker.call.helm_upgrade_install( - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, "bakdata-streams-bootstrap/producer-app-cleanup-job", False, "test-namespace", @@ -238,7 +243,7 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w ), mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.PRODUCER_APP_CLEAN_NAME, + PRODUCER_APP_CLEAN_RELEASE_NAME, False, ), ] diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 93f6022f2..8943b64a3 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -9,6 +9,7 @@ HelmDiffConfig, HelmUpgradeInstallFlags, ) +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components import StreamsApp from kpops.components.base_components.models import TopicName from kpops.components.base_components.models.to_section import ( @@ -20,11 +21,15 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" +STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" +STREAMS_APP_FULL_NAME = "${pipeline_name}-" + STREAMS_APP_NAME +STREAMS_APP_RELEASE_NAME = create_helm_release_name(STREAMS_APP_FULL_NAME) +STREAMS_APP_CLEAN_RELEASE_NAME = create_helm_release_name( + STREAMS_APP_RELEASE_NAME, "-clean" +) -class TestStreamsApp: - STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" - STREAMS_APP_CLEAN_NAME = "test-streams-app-with-long-na-clean" +class TestStreamsApp: @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( @@ -49,7 +54,7 @@ def streams_app( self, config: KpopsConfig, handlers: ComponentHandlers ) -> StreamsApp: return StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -69,7 +74,7 @@ def streams_app( def test_set_topics(self, config: KpopsConfig, handlers: ComponentHandlers): streams_app = StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -115,7 +120,7 @@ def test_no_empty_input_topic( self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -148,7 +153,7 @@ def test_should_validate(self, config: KpopsConfig, handlers: ComponentHandlers) ValueError, match="Define role only if `type` is `pattern` or `None`" ): StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -172,7 +177,7 @@ def test_should_validate(self, config: KpopsConfig, handlers: ComponentHandlers) ValueError, match="Define `role` only if `type` is undefined" ): StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -195,7 +200,7 @@ def test_set_streams_output_from_to( self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -234,7 +239,7 @@ def test_weave_inputs_from_prev_component( self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -273,7 +278,7 @@ def test_deploy_order_when_dry_run_is_false( mocker: MockerFixture, ): streams_app = StreamsApp( - name=self.STREAMS_APP_NAME, + name=STREAMS_APP_NAME, config=config, handlers=handlers, **{ @@ -318,7 +323,7 @@ def test_deploy_order_when_dry_run_is_false( assert mock.mock_calls == [ mocker.call.mock_create_topics(to_section=streams_app.to, dry_run=dry_run), mocker.call.mock_helm_upgrade_install( - "${pipeline_name}-" + self.STREAMS_APP_NAME, + STREAMS_APP_RELEASE_NAME, "bakdata-streams-bootstrap/streams-app", dry_run, "test-namespace", @@ -354,7 +359,7 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): streams_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-" + self.STREAMS_APP_NAME, True + "test-namespace", STREAMS_APP_RELEASE_NAME, True ) def test_reset_when_dry_run_is_false( @@ -375,11 +380,11 @@ def test_reset_when_dry_run_is_false( assert mock.mock_calls == [ mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME, + STREAMS_APP_CLEAN_RELEASE_NAME, dry_run, ), mocker.call.helm_upgrade_install( - "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME, + STREAMS_APP_CLEAN_RELEASE_NAME, "bakdata-streams-bootstrap/streams-app-cleanup-job", dry_run, "test-namespace", @@ -394,7 +399,7 @@ def test_reset_when_dry_run_is_false( ), mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME, + STREAMS_APP_CLEAN_RELEASE_NAME, dry_run, ), ] @@ -419,11 +424,11 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( assert mock.mock_calls == [ mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME, + STREAMS_APP_CLEAN_RELEASE_NAME, dry_run, ), mocker.call.helm_upgrade_install( - "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME, + STREAMS_APP_CLEAN_RELEASE_NAME, "bakdata-streams-bootstrap/streams-app-cleanup-job", dry_run, "test-namespace", @@ -438,7 +443,7 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( ), mocker.call.helm_uninstall( "test-namespace", - "${pipeline_name}-" + self.STREAMS_APP_CLEAN_NAME, + STREAMS_APP_CLEAN_RELEASE_NAME, dry_run, ), ] diff --git a/tests/pipeline/test_template.py b/tests/pipeline/test_template.py index efd332bb6..c4b0757bb 100644 --- a/tests/pipeline/test_template.py +++ b/tests/pipeline/test_template.py @@ -7,6 +7,9 @@ from kpops.cli.main import app from kpops.component_handlers.helm_wrapper.helm import Helm +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name + +HELM_RELEASE_NAME = create_helm_release_name("resources-custom-config-app2") runner = CliRunner() @@ -39,7 +42,7 @@ def test_default_template_config(self, run_command: MagicMock): [ "helm", "template", - "resources-custom-config-app2", + HELM_RELEASE_NAME, "bakdata-streams-bootstrap/streams-app", "--namespace", "development-namespace", @@ -78,7 +81,7 @@ def test_template_config_with_flags(self, run_command: MagicMock): [ "helm", "template", - "resources-custom-config-app2", + HELM_RELEASE_NAME, "bakdata-streams-bootstrap/streams-app", "--namespace", "development-namespace", From c256a03679ba3bcf29de62926125608436e9f089 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 18:24:09 +0100 Subject: [PATCH 20/34] Refactor Helm `nameOverride` (#397) Fixes #327 Remove previous workaround for setting Helm app `nameOverride` in favor of a more robust implementation in `HelmApp` --- docs/docs/schema/pipeline.json | 104 ++++++++++-------- .../component_handlers/kafka_connect/model.py | 15 +-- kpops/components/base_components/helm_app.py | 40 ++++++- kpops/components/base_components/kafka_app.py | 15 +-- .../base_components/kafka_connector.py | 10 +- .../base_components/kubernetes_app.py | 4 +- .../streams_bootstrap/producer/model.py | 4 +- .../producer/producer_app.py | 4 +- .../streams_bootstrap/streams/model.py | 4 +- .../streams_bootstrap/streams/streams_app.py | 4 +- kpops/pipeline.py | 10 -- tests/components/test_helm_app.py | 38 ++++--- tests/components/test_kafka_app.py | 1 + tests/components/test_kubernetes_app.py | 22 ++-- tests/components/test_producer_app.py | 9 +- tests/components/test_streams_app.py | 11 +- tests/pipeline/snapshots/snap_test_example.py | 5 - .../pipeline/snapshots/snap_test_pipeline.py | 37 ------- 18 files changed, 166 insertions(+), 171 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 0882ccfa5..77198a215 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -66,10 +66,10 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KubernetesAppConfig" + "$ref": "#/$defs/HelmAppValues" } ], - "description": "Application-specific settings" + "description": "Helm app values" }, "from": { "anyOf": [ @@ -146,6 +146,27 @@ "title": "HelmApp", "type": "object" }, + "HelmAppValues": { + "additionalProperties": true, + "description": "Helm app values.", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Override name with this value", + "title": "Nameoverride" + } + }, + "title": "HelmAppValues", + "type": "object" + }, "HelmRepoConfig": { "description": "Helm repository configuration.", "properties": { @@ -420,13 +441,6 @@ "title": "KafkaSourceConnector", "type": "object" }, - "KubernetesAppConfig": { - "additionalProperties": true, - "description": "Settings specific to Kubernetes apps.", - "properties": {}, - "title": "KubernetesAppConfig", - "type": "object" - }, "OutputTopicTypes": { "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)", "enum": [ @@ -443,7 +457,7 @@ "app": { "allOf": [ { - "$ref": "#/$defs/ProducerValues" + "$ref": "#/$defs/ProducerAppValues" } ], "description": "Application-specific settings" @@ -523,6 +537,38 @@ "title": "ProducerApp", "type": "object" }, + "ProducerAppValues": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Override name with this value", + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/ProducerStreamsConfig" + } + ], + "description": "Kafka Streams settings" + } + }, + "required": [ + "streams" + ], + "title": "ProducerAppValues", + "type": "object" + }, "ProducerStreamsConfig": { "additionalProperties": true, "description": "Kafka Streams settings specific to Producer.", @@ -574,38 +620,6 @@ "title": "ProducerStreamsConfig", "type": "object" }, - "ProducerValues": { - "additionalProperties": true, - "description": "Settings specific to producers.", - "properties": { - "nameOverride": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Override name with this value", - "title": "Nameoverride" - }, - "streams": { - "allOf": [ - { - "$ref": "#/$defs/ProducerStreamsConfig" - } - ], - "description": "Kafka Streams settings" - } - }, - "required": [ - "streams" - ], - "title": "ProducerValues", - "type": "object" - }, "RepoAuthFlags": { "description": "Authorisation-related flags for `helm repo`.", "properties": { @@ -680,7 +694,7 @@ "app": { "allOf": [ { - "$ref": "#/$defs/StreamsAppConfig" + "$ref": "#/$defs/StreamsAppValues" } ], "description": "Application-specific settings" @@ -847,7 +861,7 @@ "title": "StreamsAppAutoScaling", "type": "object" }, - "StreamsAppConfig": { + "StreamsAppValues": { "additionalProperties": true, "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { @@ -888,7 +902,7 @@ "required": [ "streams" ], - "title": "StreamsAppConfig", + "title": "StreamsAppValues", "type": "object" }, "StreamsConfig": { diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index a7ec45af9..59cdba7b9 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -11,6 +11,7 @@ from pydantic.json_schema import SkipJsonSchema from typing_extensions import override +from kpops.components.base_components.helm_app import HelmAppValues from kpops.utils.pydantic import ( CamelCaseConfigModel, DescConfigModel, @@ -92,21 +93,13 @@ class KafkaConnectConfigErrorResponse(BaseModel): configs: list[KafkaConnectConfigDescription] -class KafkaConnectResetterConfig(CamelCaseConfigModel): +class KafkaConnectorResetterConfig(CamelCaseConfigModel): brokers: str connector: str delete_consumer_group: bool | None = None offset_topic: str | None = None -class KafkaConnectResetterValues(CamelCaseConfigModel): +class KafkaConnectorResetterValues(HelmAppValues): connector_type: Literal["source", "sink"] - config: KafkaConnectResetterConfig - name_override: str - - # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` - # BEWARE! All default values are enforced, hard to replicate without - # access to ``model_dump`` - @override - def model_dump(self, **_) -> dict[str, Any]: - return super().model_dump(by_alias=True, exclude_none=True) + config: KafkaConnectorResetterConfig diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 6fe6293c0..36e3bd21b 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -17,7 +17,10 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.base_components.kubernetes_app import KubernetesApp +from kpops.components.base_components.kubernetes_app import ( + KubernetesApp, + KubernetesAppValues, +) from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import exclude_by_name @@ -25,6 +28,28 @@ log = logging.getLogger("HelmApp") +class HelmAppValues(KubernetesAppValues): + """Helm app values. + + :param name_override: Override name with this value + """ + + name_override: str | None = Field( + default=None, + title="Nameoverride", + description=describe_attr("name_override", __doc__), + ) + + # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` + # BEWARE! All default values are enforced, hard to replicate without + # access to ``model_dump`` + @override + def model_dump(self, **_) -> dict[str, Any]: + return super().model_dump( + by_alias=True, exclude_none=True, exclude_defaults=True + ) + + class HelmApp(KubernetesApp): """Kubernetes app managed through Helm with an associated Helm chart. @@ -32,6 +57,7 @@ class HelmApp(KubernetesApp): deploying the component, defaults to None this means that the command "helm repo add" is not called and Helm expects a path to local Helm chart. :param version: Helm chart version, defaults to None + :param app: Helm app values """ repo_config: HelmRepoConfig | None = Field( @@ -42,6 +68,10 @@ class HelmApp(KubernetesApp): default=None, description=describe_attr("version", __doc__), ) + app: HelmAppValues = Field( + default=..., + description=describe_attr("app", __doc__), + ) @cached_property def helm(self) -> Helm: @@ -74,7 +104,7 @@ def helm_release_name(self) -> str: def clean_release_name(self) -> str: """The name for the Helm release for cleanup jobs. Can be overridden.""" suffix = "-clean" - return create_helm_release_name(self.helm_release_name, suffix) + return create_helm_release_name(self.full_name + suffix, suffix) @property def helm_chart(self) -> str: @@ -149,9 +179,9 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.model_dump( - by_alias=True, exclude_none=True, exclude_defaults=True - ) + if self.app.name_override is None: + self.app.name_override = self.full_name + return self.app.model_dump() def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component. diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 0346f70b0..c7c983e0d 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -10,10 +10,7 @@ HelmRepoConfig, HelmUpgradeInstallFlags, ) -from kpops.components.base_components.helm_app import HelmApp -from kpops.components.base_components.kubernetes_app import ( - KubernetesAppConfig, -) +from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -37,21 +34,15 @@ class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): ) -class KafkaAppConfig(KubernetesAppConfig): +class KafkaAppValues(HelmAppValues): """Settings specific to Kafka Apps. :param streams: Kafka streams config - :param name_override: Override name with this value """ streams: KafkaStreamsConfig = Field( default=..., description=describe_attr("streams", __doc__) ) - name_override: str | None = Field( - default=None, - title="Nameoverride", - description=describe_attr("name_override", __doc__), - ) class KafkaApp(HelmApp, ABC): @@ -66,7 +57,7 @@ class KafkaApp(HelmApp, ABC): :param version: Helm chart version, defaults to "2.9.0" """ - app: KafkaAppConfig = Field( + app: KafkaAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index c6f21612d..22a9a4d8f 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -20,9 +20,9 @@ from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.component_handlers.kafka_connect.model import ( KafkaConnectorConfig, + KafkaConnectorResetterConfig, + KafkaConnectorResetterValues, KafkaConnectorType, - KafkaConnectResetterConfig, - KafkaConnectResetterValues, ) from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.models.from_section import FromTopic @@ -175,7 +175,7 @@ def _run_connect_resetter( :param dry_run: If the cleanup should be run in dry run mode or not :param retain_clean_jobs: If the cleanup job should be kept - :param kwargs: Other values for the KafkaConnectResetter + :param kwargs: Other values for the KafkaConnectorResetter """ log.info( magentaify( @@ -236,8 +236,8 @@ def _get_kafka_connect_resetter_values( :return: The Helm chart values of the connector resetter """ return { - **KafkaConnectResetterValues( - config=KafkaConnectResetterConfig( + **KafkaConnectorResetterValues( + config=KafkaConnectorResetterConfig( connector=self.full_name, brokers=self.config.kafka_brokers, **kwargs, diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index cae474cee..2b4065191 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -18,7 +18,7 @@ ) -class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): +class KubernetesAppValues(CamelCaseConfigModel, DescConfigModel): """Settings specific to Kubernetes apps.""" model_config = ConfigDict( @@ -39,7 +39,7 @@ class KubernetesApp(PipelineComponent, ABC): default=..., description=describe_attr("namespace", __doc__), ) - app: KubernetesAppConfig = Field( + app: KubernetesAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 01bda1dbc..53db5af67 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,7 +1,7 @@ from pydantic import ConfigDict, Field from kpops.components.base_components.kafka_app import ( - KafkaAppConfig, + KafkaAppValues, KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr @@ -22,7 +22,7 @@ class ProducerStreamsConfig(KafkaStreamsConfig): ) -class ProducerValues(KafkaAppConfig): +class ProducerAppValues(KafkaAppValues): """Settings specific to producers. :param streams: Kafka Streams settings diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 6091cdd77..e37529bae 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -9,7 +9,7 @@ TopicConfig, ) from kpops.components.streams_bootstrap.app_type import AppType -from kpops.components.streams_bootstrap.producer.model import ProducerValues +from kpops.components.streams_bootstrap.producer.model import ProducerAppValues from kpops.utils.docstring import describe_attr @@ -25,7 +25,7 @@ class ProducerApp(KafkaApp): :param from_: Producer doesn't support FromSection, defaults to None """ - app: ProducerValues = Field( + app: ProducerAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 2c8b952ce..b52bc162c 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -5,7 +5,7 @@ from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( - KafkaAppConfig, + KafkaAppValues, KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr @@ -166,7 +166,7 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): model_config = ConfigDict(extra="allow") -class StreamsAppConfig(KafkaAppConfig): +class StreamsAppValues(KafkaAppValues): """StreamsBoostrap app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index a466b4eba..e8a434b70 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -3,7 +3,7 @@ from kpops.components.base_components.kafka_app import KafkaApp from kpops.components.streams_bootstrap.app_type import AppType -from kpops.components.streams_bootstrap.streams.model import StreamsAppConfig +from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr @@ -13,7 +13,7 @@ class StreamsApp(KafkaApp): :param app: Application-specific settings """ - app: StreamsAppConfig = Field( + app: StreamsAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/pipeline.py b/kpops/pipeline.py index ad69521e1..a409f8e35 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -3,7 +3,6 @@ import json import logging from collections import Counter -from contextlib import suppress from dataclasses import dataclass, field from typing import TYPE_CHECKING @@ -53,7 +52,6 @@ def find(self, component_name: str) -> PipelineComponent: raise ValueError(msg) def add(self, component: PipelineComponent) -> None: - self._populate_component_name(component) self.root.append(component) def __bool__(self) -> bool: @@ -78,14 +76,6 @@ def validate_unique_names(self) -> None: msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}" raise ValidationError(msg) - @staticmethod - def _populate_component_name(component: PipelineComponent) -> None: # TODO: remove - with suppress( - AttributeError # Some components like Kafka Connect do not have a name_override attribute - ): - if (app := getattr(component, "app")) and app.name_override is None: - app.name_override = component.full_name - def create_env_components_index( environment_components: list[dict], diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index 0b933b1e9..e43c9de41 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -12,18 +12,13 @@ HelmUpgradeInstallFlags, RepoAuthFlags, ) -from kpops.components.base_components.helm_app import HelmApp -from kpops.components.base_components.kubernetes_app import KubernetesAppConfig +from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify DEFAULTS_PATH = Path(__file__).parent / "resources" -class HelmTestValue(KubernetesAppConfig): - name_override: str - - class TestHelmApp: @pytest.fixture() def config(self) -> KpopsConfig: @@ -51,8 +46,8 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.helm_app.log.info") @pytest.fixture() - def app_value(self) -> HelmTestValue: - return HelmTestValue(name_override="test-value") + def app_values(self) -> HelmAppValues: + return HelmAppValues(**{"foo": "test-value"}) @pytest.fixture() def repo_config(self) -> HelmRepoConfig: @@ -63,14 +58,14 @@ def helm_app( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: HelmTestValue, + app_values: HelmAppValues, repo_config: HelmRepoConfig, ) -> HelmApp: return HelmApp( name="test-helm-app", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", repo_config=repo_config, ) @@ -97,7 +92,10 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( "test/test-chart", False, "test-namespace", - {"nameOverride": "test-value"}, + { + "nameOverride": "${pipeline_name}-test-helm-app", + "foo": "test-value", + }, HelmUpgradeInstallFlags(), ) @@ -107,7 +105,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( handlers: ComponentHandlers, helm_mock: MagicMock, mocker: MockerFixture, - app_value: HelmTestValue, + app_values: HelmAppValues, ): repo_config = HelmRepoConfig( repository_name="test-repo", url="https://test.com/charts/" @@ -116,7 +114,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( name="test-helm-app", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", repo_config=repo_config, version="3.4.5", @@ -142,7 +140,10 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( "test/test-chart", False, "test-namespace", - {"nameOverride": "test-value"}, + { + "nameOverride": "${pipeline_name}-test-helm-app", + "foo": "test-value", + }, HelmUpgradeInstallFlags(version="3.4.5"), ), ] @@ -152,7 +153,7 @@ def test_should_deploy_app_with_local_helm_chart( config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, - app_value: HelmTestValue, + app_values: HelmAppValues, ): class AppWithLocalChart(HelmApp): repo_config: None = None @@ -166,7 +167,7 @@ def helm_chart(self) -> str: name="test-app-with-local-chart", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -179,7 +180,10 @@ def helm_chart(self) -> str: "path/to/helm/charts/", False, "test-namespace", - {"nameOverride": "test-value"}, + { + "nameOverride": "${pipeline_name}-test-app-with-local-chart", + "foo": "test-value", + }, HelmUpgradeInstallFlags(), ) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index 6a5ed7d18..d7e8fd5d4 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -98,6 +98,7 @@ def test_should_deploy_kafka_app( True, "test-namespace", { + "nameOverride": "${pipeline_name}-example-name", "streams": {"brokers": "fake-broker:9092", "outputTopic": "test"}, }, HelmUpgradeInstallFlags(version="1.2.3"), diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 45f575fb8..ebc2701fa 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -11,7 +11,7 @@ from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components.base_components.kubernetes_app import ( KubernetesApp, - KubernetesAppConfig, + KubernetesAppValues, ) from kpops.config import KpopsConfig @@ -20,8 +20,8 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" -class KubernetesTestValue(KubernetesAppConfig): - name_override: str +class KubernetesTestValues(KubernetesAppValues): + foo: str class TestKubernetesApp: @@ -42,8 +42,8 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.kubernetes_app.log.info") @pytest.fixture() - def app_value(self) -> KubernetesTestValue: - return KubernetesTestValue(**{"name_override": "test-value"}) + def app_values(self) -> KubernetesTestValues: + return KubernetesTestValues(foo="foo") @pytest.fixture() def repo_config(self) -> HelmRepoConfig: @@ -54,13 +54,13 @@ def kubernetes_app( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: KubernetesTestValue, + app_values: KubernetesTestValues, ) -> KubernetesApp: return KubernetesApp( name="test-kubernetes-app", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -68,7 +68,7 @@ def test_should_raise_value_error_when_name_is_not_valid( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: KubernetesTestValue, + app_values: KubernetesTestValues, ): with pytest.raises( ValueError, match=r"The component name .* is invalid for Kubernetes." @@ -77,7 +77,7 @@ def test_should_raise_value_error_when_name_is_not_valid( name="Not-Compatible*", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -88,7 +88,7 @@ def test_should_raise_value_error_when_name_is_not_valid( name="snake_case*", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -96,6 +96,6 @@ def test_should_raise_value_error_when_name_is_not_valid( name="valid-name", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index ca8b67fac..07e78bb6a 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -20,12 +20,16 @@ PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" PRODUCER_APP_FULL_NAME = "${pipeline_name}-" + PRODUCER_APP_NAME PRODUCER_APP_RELEASE_NAME = create_helm_release_name(PRODUCER_APP_FULL_NAME) +PRODUCER_APP_CLEAN_FULL_NAME = PRODUCER_APP_FULL_NAME + "-clean" PRODUCER_APP_CLEAN_RELEASE_NAME = create_helm_release_name( - PRODUCER_APP_RELEASE_NAME, "-clean" + PRODUCER_APP_CLEAN_FULL_NAME, "-clean" ) class TestProducerApp: + def test_release_name(self): + assert PRODUCER_APP_CLEAN_RELEASE_NAME.endswith("-clean") + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( @@ -125,6 +129,7 @@ def test_deploy_order_when_dry_run_is_false( False, "test-namespace", { + "nameOverride": PRODUCER_APP_FULL_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", @@ -189,6 +194,7 @@ def test_should_not_reset_producer_app( True, "test-namespace", { + "nameOverride": PRODUCER_APP_FULL_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", @@ -234,6 +240,7 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w False, "test-namespace", { + "nameOverride": PRODUCER_APP_FULL_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 8943b64a3..ebd5cf7d9 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -24,12 +24,16 @@ STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" STREAMS_APP_FULL_NAME = "${pipeline_name}-" + STREAMS_APP_NAME STREAMS_APP_RELEASE_NAME = create_helm_release_name(STREAMS_APP_FULL_NAME) +STREAMS_APP_CLEAN_FULL_NAME = STREAMS_APP_FULL_NAME + "-clean" STREAMS_APP_CLEAN_RELEASE_NAME = create_helm_release_name( - STREAMS_APP_RELEASE_NAME, "-clean" + STREAMS_APP_CLEAN_FULL_NAME, "-clean" ) class TestStreamsApp: + def test_release_name(self): + assert STREAMS_APP_CLEAN_RELEASE_NAME.endswith("-clean") + @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( @@ -328,6 +332,7 @@ def test_deploy_order_when_dry_run_is_false( dry_run, "test-namespace", { + "nameOverride": STREAMS_APP_FULL_NAME, "streams": { "brokers": "fake-broker:9092", "extraOutputTopics": { @@ -336,7 +341,7 @@ def test_deploy_order_when_dry_run_is_false( }, "outputTopic": "${output_topic_name}", "errorTopic": "${error_topic_name}", - } + }, }, HelmUpgradeInstallFlags( create_namespace=False, @@ -389,6 +394,7 @@ def test_reset_when_dry_run_is_false( dry_run, "test-namespace", { + "nameOverride": STREAMS_APP_FULL_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", @@ -433,6 +439,7 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( dry_run, "test-namespace", { + "nameOverride": STREAMS_APP_FULL_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 14d3d650c..a88a7ee4a 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -13,7 +13,6 @@ 'debug': True, 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', 'imageTag': '1.0.0', - 'nameOverride': 'account-producer', 'prometheus': { 'jmx': { 'enabled': False @@ -64,7 +63,6 @@ 'debug': True, 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer', 'imageTag': '1.0.0', - 'nameOverride': 'transaction-avro-producer', 'prometheus': { 'jmx': { 'enabled': False @@ -120,7 +118,6 @@ 'labels': { 'pipeline': 'bakdata-atm-fraud-detection' }, - 'nameOverride': 'transaction-joiner', 'prometheus': { 'jmx': { 'enabled': False @@ -182,7 +179,6 @@ 'labels': { 'pipeline': 'bakdata-atm-fraud-detection' }, - 'nameOverride': 'fraud-detector', 'prometheus': { 'jmx': { 'enabled': False @@ -244,7 +240,6 @@ 'labels': { 'pipeline': 'bakdata-atm-fraud-detection' }, - 'nameOverride': 'account-linker', 'prometheus': { 'jmx': { 'enabled': False diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index c9fee4d4b..0da4f9260 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -10,7 +10,6 @@ snapshots['TestPipeline.test_default_config test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-custom-config-app1', 'resources': { 'limits': { 'memory': '2G' @@ -58,7 +57,6 @@ 'labels': { 'pipeline': 'resources-custom-config' }, - 'nameOverride': 'resources-custom-config-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'resources-custom-config-app2-error', @@ -110,7 +108,6 @@ }, 'image': 'example-registry/fake-image', 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer', 'schedule': '30 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -165,7 +162,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-pipeline-with-inflate-converter', 'resources': { 'limits': { 'memory': '2G' @@ -242,7 +238,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate', 'replicaCount': 4, 'resources': { 'requests': { @@ -345,7 +340,6 @@ }, { 'app': { - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -397,7 +391,6 @@ { 'app': { 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -492,7 +485,6 @@ }, 'image': 'example-registry/fake-image', 'imageTag': '0.0.1', - 'nameOverride': 'resources-first-pipeline-scheduled-producer', 'schedule': '30 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -547,7 +539,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-first-pipeline-converter', 'resources': { 'limits': { 'memory': '2G' @@ -624,7 +615,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', 'replicaCount': 4, 'resources': { 'requests': { @@ -683,7 +673,6 @@ snapshots['TestPipeline.test_model_serialization test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-pipeline-with-paths-account-producer', 'streams': { 'brokers': 'test', 'extraOutputTopics': { @@ -716,7 +705,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-no-input-topic-pipeline-app1', 'resources': { 'limits': { 'memory': '2G' @@ -779,7 +767,6 @@ }, { 'app': { - 'nameOverride': 'resources-no-input-topic-pipeline-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -839,7 +826,6 @@ { 'app': { 'image': 'fake-image', - 'nameOverride': 'resources-no-user-defined-components-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -904,7 +890,6 @@ }, 'image': 'example-registry/fake-image', 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-envs-input-producer', 'schedule': '20 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -959,7 +944,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-pipeline-with-envs-converter', 'resources': { 'limits': { 'memory': '2G' @@ -1036,7 +1020,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-envs-filter', 'replicaCount': 4, 'resources': { 'requests': { @@ -1098,7 +1081,6 @@ 'debug': True, 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', 'imageTag': '1.0.0', - 'nameOverride': 'from-pipeline-component-account-producer', 'prometheus': { 'jmx': { 'enabled': False @@ -1132,7 +1114,6 @@ snapshots['TestPipeline.test_read_from_component test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-read-from-component-producer1', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { @@ -1167,7 +1148,6 @@ }, { 'app': { - 'nameOverride': 'producer2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { @@ -1217,7 +1197,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-read-from-component-inflate-step', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1314,7 +1293,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1377,7 +1355,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'inflate-step-without-prefix', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1474,7 +1451,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1522,7 +1498,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer1', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1579,7 +1554,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1634,7 +1608,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer3', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1689,7 +1662,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer4', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1740,7 +1712,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer5', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1804,7 +1775,6 @@ 'app_schedule': '30 3/8 * * *', 'app_type': 'scheduled-producer' }, - 'nameOverride': 'resources-component-type-substitution-scheduled-producer', 'schedule': '30 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -1859,7 +1829,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-component-type-substitution-converter', 'resources': { 'limits': { 'memory': '2G' @@ -1943,7 +1912,6 @@ 'filter': 'filter-app-filter', 'test_placeholder_in_placeholder': 'filter-app-filter' }, - 'nameOverride': 'resources-component-type-substitution-filter-app', 'replicaCount': 4, 'resources': { 'requests': { @@ -2002,7 +1970,6 @@ snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-custom-config-app1', 'resources': { 'limits': { 'memory': '2G' @@ -2050,7 +2017,6 @@ 'labels': { 'pipeline': 'resources-custom-config' }, - 'nameOverride': 'resources-custom-config-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'app2-dead-letter-topic', @@ -2097,7 +2063,6 @@ snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-custom-config-app1', 'resources': { 'limits': { 'memory': '2G' @@ -2145,7 +2110,6 @@ 'labels': { 'pipeline': 'resources-custom-config' }, - 'nameOverride': 'resources-custom-config-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'app2-dead-letter-topic', @@ -2193,7 +2157,6 @@ { 'app': { 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app-development', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { From a6385dba54cf151506d4ff655ba306ce1188ea87 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 09:30:04 +0100 Subject: [PATCH 21/34] Mark component type as computed Pydantic field (#399) - Mark component type property as computed Pydantic field - Remove hack --- kpops/components/base_components/base_defaults_component.py | 3 ++- kpops/pipeline.py | 2 -- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 883a8934d..e286b3f1f 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -8,7 +8,7 @@ from typing import TypeVar import typer -from pydantic import AliasChoices, ConfigDict, Field +from pydantic import AliasChoices, ConfigDict, Field, computed_field from pydantic.json_schema import SkipJsonSchema from kpops.component_handlers import ComponentHandlers @@ -75,6 +75,7 @@ def __init__(self, **kwargs) -> None: if kwargs.get("validate", True): self._validate_custom(**kwargs) + @computed_field @cached_classproperty def type(cls: type[Self]) -> str: # pyright: ignore[reportGeneralTypeIssues] """Return calling component's type. diff --git a/kpops/pipeline.py b/kpops/pipeline.py index a409f8e35..45a39c232 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -233,8 +233,6 @@ def enrich_component( self.env_components_index.get(component.name, {}), component.model_dump(mode="json", by_alias=True), ) - # HACK: make sure component type is set for inflated components, because property is not serialized by Pydantic - env_component_as_dict["type"] = component.type component_data = self.substitute_in_component(env_component_as_dict) From 2301aafc3733e1fd9cc6cb7352de7705ec3b4367 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 09:30:27 +0100 Subject: [PATCH 22/34] Fix missing component type in pipeline schema (#401) --- docs/docs/schema/pipeline.json | 35 ++++++++++++++++--- kpops/utils/gen_schema.py | 9 +---- .../snapshots/snap_test_schema_generation.py | 35 ++++++++++++++++--- 3 files changed, 61 insertions(+), 18 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 77198a215..da2f31fcb 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -124,6 +124,10 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "helm-app", + "title": "Type" + }, "version": { "anyOf": [ { @@ -141,7 +145,8 @@ "required": [ "name", "namespace", - "app" + "app", + "type" ], "title": "HelmApp", "type": "object" @@ -307,6 +312,10 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "kafka-sink-connector", + "title": "Type" + }, "version": { "anyOf": [ { @@ -324,7 +333,8 @@ "required": [ "name", "namespace", - "app" + "app", + "type" ], "title": "KafkaSinkConnector", "type": "object" @@ -419,6 +429,10 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "kafka-source-connector", + "title": "Type" + }, "version": { "anyOf": [ { @@ -436,7 +450,8 @@ "required": [ "name", "namespace", - "app" + "app", + "type" ], "title": "KafkaSourceConnector", "type": "object" @@ -515,6 +530,10 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "producer-app", + "title": "Type" + }, "version": { "anyOf": [ { @@ -532,7 +551,8 @@ "required": [ "name", "namespace", - "app" + "app", + "type" ], "title": "ProducerApp", "type": "object" @@ -759,6 +779,10 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "streams-app", + "title": "Type" + }, "version": { "anyOf": [ { @@ -776,7 +800,8 @@ "required": [ "name", "namespace", - "app" + "app", + "type" ], "title": "StreamsApp", "type": "object" diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 93c4b233f..9c3448d90 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -8,7 +8,7 @@ from pydantic import Field, RootModel from pydantic.fields import FieldInfo -from pydantic.json_schema import GenerateJsonSchema, SkipJsonSchema, model_json_schema +from pydantic.json_schema import GenerateJsonSchema, model_json_schema from pydantic_core.core_schema import ( DefinitionsSchema, LiteralSchema, @@ -108,7 +108,6 @@ def gen_pipeline_schema( component.model_fields["type"] = FieldInfo( annotation=Literal[component.type], # type:ignore[valid-type] default=component.type, - exclude=True, ) core_schema: DefinitionsSchema = component.__pydantic_core_schema__ # pyright:ignore[reportGeneralTypeIssues] model_schema: ModelFieldsSchema = core_schema["schema"]["schema"] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess] @@ -117,12 +116,6 @@ def gen_pipeline_schema( schema=LiteralSchema( type="literal", expected=[component.type], - metadata={ - "pydantic.internal.needs_apply_discriminated_union": False, - "pydantic_js_annotation_functions": [ - SkipJsonSchema().__get_pydantic_json_schema__ # pyright:ignore[reportGeneralTypeIssues] - ], - }, ), ) diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index f8f75d870..4875c610b 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -48,10 +48,15 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "empty-pipeline-component", + "title": "Type" } }, "required": [ - "name" + "name", + "type" ], "title": "EmptyPipelineComponent", "type": "object" @@ -172,10 +177,15 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "sub-pipeline-component", + "title": "Type" } }, "required": [ - "name" + "name", + "type" ], "title": "SubPipelineComponent", "type": "object" @@ -219,10 +229,15 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "sub-pipeline-component-correct", + "title": "Type" } }, "required": [ - "name" + "name", + "type" ], "title": "SubPipelineComponentCorrect", "type": "object" @@ -271,11 +286,16 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "sub-pipeline-component-correct-docstr", + "title": "Type" } }, "required": [ "name", - "example_attr" + "example_attr", + "type" ], "title": "SubPipelineComponentCorrectDocstr", "type": "object" @@ -319,10 +339,15 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "sub-pipeline-component-no-schema-type-no-type", + "title": "Type" } }, "required": [ - "name" + "name", + "type" ], "title": "SubPipelineComponentNoSchemaTypeNoType", "type": "object" From d0f0906c43db6aba3d440b455b2a28090b4420e6 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 13:24:13 +0100 Subject: [PATCH 23/34] Refactor generate template for Python API usage (#380) `template` renamed to ~~`render`~~ `manifest` as it's no longer strictly tied to Helm template, can be used to render final resources of any pipeline component, e.g. Kubernetes manifest Extract `kpops generate --template` into separate ~~`kpops render`~~ `kpops manifest` command Return Kubernetes manifest as mapping for use with Python API Closes #103 --------- Co-authored-by: Ivan Yordanov --- docs/docs/user/migration-guide/v2-v3.md | 18 + docs/docs/user/references/cli-commands.md | 39 +- kpops/__init__.py | 3 +- kpops/cli/main.py | 65 +- kpops/component_handlers/helm_wrapper/helm.py | 21 +- .../helm_wrapper/helm_diff.py | 9 +- .../component_handlers/helm_wrapper/model.py | 11 +- .../component_handlers/kubernetes/__init__.py | 0 kpops/component_handlers/kubernetes/model.py | 30 + kpops/components/base_components/helm_app.py | 6 +- .../base_components/kafka_connector.py | 11 +- .../base_components/models/resource.py | 5 + .../base_components/pipeline_component.py | 22 +- kpops/utils/dict_differ.py | 7 +- kpops/utils/yaml.py | 10 +- .../helm_wrapper/test_dry_run_handler.py | 12 +- .../helm_wrapper/test_helm_diff.py | 17 +- .../helm_wrapper/test_helm_wrapper.py | 171 +- .../component_handlers/kubernetes/__init__.py | 0 tests/component_handlers/kubernetes/model.py | 38 + .../pipeline/snapshots/snap_test_generate.py | 2244 +++++++++++++++++ .../pipeline/snapshots/snap_test_manifest.py | 17 + .../{test_pipeline.py => test_generate.py} | 31 +- .../{test_template.py => test_manifest.py} | 60 +- 24 files changed, 2617 insertions(+), 230 deletions(-) create mode 100644 kpops/component_handlers/kubernetes/__init__.py create mode 100644 kpops/component_handlers/kubernetes/model.py create mode 100644 kpops/components/base_components/models/resource.py create mode 100644 tests/component_handlers/kubernetes/__init__.py create mode 100644 tests/component_handlers/kubernetes/model.py create mode 100644 tests/pipeline/snapshots/snap_test_generate.py create mode 100644 tests/pipeline/snapshots/snap_test_manifest.py rename tests/pipeline/{test_pipeline.py => test_generate.py} (96%) rename tests/pipeline/{test_template.py => test_manifest.py} (56%) diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 38b86bea0..9d94af628 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -170,3 +170,21 @@ topic_name_config: + default_error_topic_name: "${pipeline_name}-${component.name}-dead-letter-topic" + default_output_topic_name: "${pipeline_name}-${component.name}-topic" ``` + +## [Refactor generate template for Python API usage](https://github.com/bakdata/kpops/pull/380) + +The `template` method of every pipeline component has been renamed to `manifest` as it is no longer strictly tied to Helm template. Instead, it can be used to render the final resources of a component, such as Kubernetes manifests. + +There is also a new `kpops manifest` command replacing the existing `kpops generate --template` flag. + +If you're using this functionality in your custom components, it needs to be updated. + +```diff + from kpops.components.base_components.models.resource import Resource + + @override +- def template(self) -> None: ++ def manifest(self) -> Resource: + """Render final component resources, e.g. Kubernetes manifests.""" + return [] # list of manifests +``` diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index fae1884b0..09fe4f40e 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -18,9 +18,10 @@ $ kpops [OPTIONS] COMMAND [ARGS]... * `clean`: Clean pipeline steps * `deploy`: Deploy pipeline steps * `destroy`: Destroy pipeline steps -* `generate`: Enriches pipelines steps with defaults. +* `generate`: Generate enriched pipeline representation +* `manifest`: Render final resource representation * `reset`: Reset pipeline steps -* `schema`: Generate json schema. +* `schema`: Generate JSON schema. ## `kpops clean` @@ -102,7 +103,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH ## `kpops generate` -Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands. +Enrich pipeline steps with defaults. The enriched pipeline is used for all KPOps operations (deploy, destroy, ...). **Usage**: @@ -119,7 +120,31 @@ $ kpops generate [OPTIONS] PIPELINE_PATH * `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] -* `--template / --no-template`: Run Helm template [default: no-template] +* `--output / --no-output`: Enable output printing [default: output] +* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] +* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--help`: Show this message and exit. + +## `kpops manifest` + +In addition to generate, render final resource representation for each pipeline step, e.g. Kubernetes manifests. + +**Usage**: + +```console +$ kpops manifest [OPTIONS] PIPELINE_PATH +``` + +**Arguments**: + +* `PIPELINE_PATH`: Path to YAML with pipeline definition [env var: KPOPS_PIPELINE_PATH;required] + +**Options**: + +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] +* `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] +* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] +* `--output / --no-output`: Enable output printing [default: output] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] * `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] * `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] @@ -154,9 +179,9 @@ $ kpops reset [OPTIONS] PIPELINE_PATH ## `kpops schema` -Generate json schema. +Generate JSON schema. -The schemas can be used to enable support for kpops files in a text editor. +The schemas can be used to enable support for KPOps files in a text editor. **Usage**: @@ -172,7 +197,7 @@ $ kpops schema [OPTIONS] SCOPE:{pipeline|config} - pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide components module in config. + pipeline: Schema of PipelineComponents. Includes the built-in KPOps components by default. To include custom components, provide components module in config. diff --git a/kpops/__init__.py b/kpops/__init__.py index 8fea6bcdf..4d23bd364 100644 --- a/kpops/__init__.py +++ b/kpops/__init__.py @@ -1,10 +1,11 @@ __version__ = "2.0.11" # export public API functions -from kpops.cli.main import clean, deploy, destroy, generate, reset +from kpops.cli.main import clean, deploy, destroy, generate, manifest, reset __all__ = ( "generate", + "manifest", "deploy", "destroy", "reset", diff --git a/kpops/cli/main.py b/kpops/cli/main.py index c488f2bbf..8603e6ac9 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +from collections.abc import Iterator from enum import Enum from pathlib import Path from typing import TYPE_CHECKING, Optional @@ -18,6 +19,7 @@ from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper +from kpops.components.base_components.models.resource import Resource from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline import Pipeline, PipelineGenerator from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema @@ -25,8 +27,6 @@ from kpops.utils.yaml import print_yaml if TYPE_CHECKING: - from collections.abc import Iterator - from kpops.components.base_components import PipelineComponent @@ -99,6 +99,7 @@ class FilterType(str, Enum): help="Whether the --steps option should include/exclude the steps", ) +OUTPUT_OPTION = typer.Option(True, help="Enable output printing") VERBOSE_OPTION = typer.Option(False, help="Enable verbose printing") ENVIRONMENT: str | None = typer.Option( @@ -221,9 +222,9 @@ def create_kpops_config( @app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 help=""" - Generate json schema. + Generate JSON schema. - The schemas can be used to enable support for kpops files in a text editor. + The schemas can be used to enable support for KPOps files in a text editor. """ ) def schema( @@ -233,7 +234,7 @@ def schema( help=""" Scope of the generated schema \n\n\n - pipeline: Schema of PipelineComponents. Includes the built-in kpops components by default. To include custom components, provide components module in config. + pipeline: Schema of PipelineComponents. Includes the built-in KPOps components by default. To include custom components, provide components module in config. \n\n\n config: Schema of KpopsConfig.""", ), @@ -253,16 +254,15 @@ def schema( @app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 - help="Enriches pipelines steps with defaults. The output is used as input for the deploy/destroy/... commands." + short_help="Generate enriched pipeline representation", + help="Enrich pipeline steps with defaults. The enriched pipeline is used for all KPOps operations (deploy, destroy, ...).", ) def generate( pipeline_path: Path = PIPELINE_PATH_ARG, dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, - template: bool = typer.Option(False, help="Run Helm template"), - steps: Optional[str] = PIPELINE_STEPS, - filter_type: FilterType = FILTER_TYPE, + output: bool = OUTPUT_OPTION, environment: Optional[str] = ENVIRONMENT, verbose: bool = VERBOSE_OPTION, ) -> Pipeline: @@ -274,21 +274,44 @@ def generate( verbose, ) pipeline = setup_pipeline(pipeline_path, kpops_config, environment) - - if not template: + if output: print_yaml(pipeline.to_yaml()) + return pipeline - if template: - steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) - for component in steps_to_apply: - component.template() - elif steps: - log.warning( - "The following flags are considered only when `--template` is set: \n \ - '--steps'" - ) - return pipeline +@app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 + short_help="Render final resource representation", + help="In addition to generate, render final resource representation for each pipeline step, e.g. Kubernetes manifests.", +) +def manifest( + pipeline_path: Path = PIPELINE_PATH_ARG, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, + defaults: Optional[Path] = DEFAULT_PATH_OPTION, + config: Path = CONFIG_PATH_OPTION, + output: bool = OUTPUT_OPTION, + steps: Optional[str] = PIPELINE_STEPS, + filter_type: FilterType = FILTER_TYPE, + environment: Optional[str] = ENVIRONMENT, + verbose: bool = VERBOSE_OPTION, +) -> list[Resource]: + pipeline = generate( + pipeline_path=pipeline_path, + dotenv=dotenv, + defaults=defaults, + config=config, + output=False, + environment=environment, + verbose=verbose, + ) + steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) + resources: list[Resource] = [] + for component in steps_to_apply: + resource = component.manifest() + resources.append(resource) + if output: + for manifest in resource: + print_yaml(manifest) + return resources @app.command(help="Deploy pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 5e4d758db..8499504ba 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -4,6 +4,7 @@ import re import subprocess import tempfile +from pathlib import Path from typing import TYPE_CHECKING import yaml @@ -18,6 +19,8 @@ RepoAuthFlags, Version, ) +from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.components.base_components.models.resource import Resource if TYPE_CHECKING: from collections.abc import Iterable, Iterator @@ -132,8 +135,8 @@ def template( namespace: str, values: dict, flags: HelmTemplateFlags | None = None, - ) -> str: - """From HELM: Render chart templates locally and display the output. + ) -> Resource: + """From Helm: Render chart templates locally and display the output. Any values that would normally be looked up or retrieved in-cluster will be faked locally. Additionally, none of the server-side testing of chart @@ -144,11 +147,11 @@ def template( :param namespace: The Kubernetes namespace the command should execute in :param values: `values.yaml` to be used :param flags: the flags to be set for `helm template`, defaults to HelmTemplateFlags() - :return: the output of `helm template` + :return: the rendered resource (list of Kubernetes manifests) """ if flags is None: flags = HelmTemplateFlags() - with tempfile.NamedTemporaryFile("w") as values_file: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as values_file: yaml.safe_dump(values, values_file) command = [ "helm", @@ -161,7 +164,9 @@ def template( values_file.name, ] command.extend(flags.to_command()) - return self.__execute(command) + output = self.__execute(command) + manifests = KubernetesManifest.from_yaml(output) + return list(manifests) def get_manifest(self, release_name: str, namespace: str) -> Iterable[HelmTemplate]: command = [ @@ -198,7 +203,11 @@ def load_manifest(yaml_contents: str) -> Iterator[HelmTemplate]: if line.startswith("---"): is_beginning = True if template_name and current_yaml_doc: - yield HelmTemplate.load(template_name, "\n".join(current_yaml_doc)) + manifests = KubernetesManifest.from_yaml( + "\n".join(current_yaml_doc) + ) + manifest = next(manifests) # only 1 manifest + yield HelmTemplate(Path(template_name), manifest) template_name = None current_yaml_doc.clear() elif is_beginning: diff --git a/kpops/component_handlers/helm_wrapper/helm_diff.py b/kpops/component_handlers/helm_wrapper/helm_diff.py index 26de5613a..7827fe453 100644 --- a/kpops/component_handlers/helm_wrapper/helm_diff.py +++ b/kpops/component_handlers/helm_wrapper/helm_diff.py @@ -2,6 +2,7 @@ from collections.abc import Iterable, Iterator from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate +from kpops.component_handlers.kubernetes.model import KubernetesManifest from kpops.utils.dict_differ import Change, render_diff log = logging.getLogger("HelmDiff") @@ -15,7 +16,7 @@ def __init__(self, config: HelmDiffConfig) -> None: def calculate_changes( current_release: Iterable[HelmTemplate], new_release: Iterable[HelmTemplate], - ) -> Iterator[Change[dict]]: + ) -> Iterator[Change[KubernetesManifest]]: """Compare 2 releases and generate a Change object for each difference. :param current_release: Iterable containing HelmTemplate objects for the current release @@ -31,13 +32,13 @@ def calculate_changes( # get corresponding dry-run release new_resource = new_release_index.pop(current_resource.filepath, None) yield Change( - current_resource.template, - new_resource.template if new_resource else {}, + current_resource.manifest, + new_resource.manifest if new_resource else KubernetesManifest(), ) # collect added files for new_resource in new_release_index.values(): - yield Change({}, new_resource.template) + yield Change(KubernetesManifest(), new_resource.manifest) def log_helm_diff( self, diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 0a155bb0d..8a635983f 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -2,11 +2,11 @@ from dataclasses import dataclass from pathlib import Path -import yaml from pydantic import BaseModel, ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.exception import ParseError +from kpops.component_handlers.kubernetes.model import KubernetesManifest from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import DescConfigModel @@ -160,8 +160,8 @@ def to_command(self) -> list[str]: @dataclass class HelmTemplate: - filepath: str - template: dict + filepath: Path + manifest: KubernetesManifest @staticmethod def parse_source(source: str) -> str: @@ -176,11 +176,6 @@ def parse_source(source: str) -> str: raise ParseError(msg) return source.removeprefix(HELM_SOURCE_PREFIX).strip() - @classmethod - def load(cls, filepath: str, content: str): - template = yaml.load(content, yaml.Loader) - return cls(filepath, template) - # Indicates the beginning of `NOTES:` section in the output of `helm install` or # `helm upgrade` diff --git a/kpops/component_handlers/kubernetes/__init__.py b/kpops/component_handlers/kubernetes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/kpops/component_handlers/kubernetes/model.py b/kpops/component_handlers/kubernetes/model.py new file mode 100644 index 000000000..a88063e0a --- /dev/null +++ b/kpops/component_handlers/kubernetes/model.py @@ -0,0 +1,30 @@ +import json +from collections import UserDict +from collections.abc import Iterator +from typing import TypeAlias + +import yaml + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +# JSON values +Json: TypeAlias = dict[str, "Json"] | list["Json"] | str | int | float | bool | None + + +class KubernetesManifest(UserDict[str, Json]): + """Representation of a Kubernetes API object as YAML/JSON mapping.""" + + @classmethod + def from_yaml(cls, /, content: str) -> Iterator[Self]: + manifests: Iterator[dict[str, Json]] = yaml.load_all(content, yaml.Loader) + for manifest in manifests: + yield cls(manifest) + + @classmethod + def from_json(cls, /, content: str) -> Self: + manifest: dict[str, Json] = json.loads(content) + return cls(manifest) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 36e3bd21b..07c3c6831 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -21,6 +21,7 @@ KubernetesApp, KubernetesAppValues, ) +from kpops.components.base_components.models.resource import Resource from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import exclude_by_name @@ -135,15 +136,14 @@ def template_flags(self) -> HelmTemplateFlags: ) @override - def template(self) -> None: - stdout = self.helm.template( + def manifest(self) -> Resource: + return self.helm.template( self.helm_release_name, self.helm_chart, self.namespace, self.to_helm_values(), self.template_flags, ) - print(stdout) @property def deploy_flags(self) -> HelmUpgradeInstallFlags: diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 22a9a4d8f..246e30b11 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -26,6 +26,7 @@ ) from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.models.from_section import FromTopic +from kpops.components.base_components.models.resource import Resource from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr @@ -282,18 +283,17 @@ def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: raise NotImplementedError(msg) @override - def template(self) -> None: + def manifest(self) -> Resource: values = self._get_kafka_connect_resetter_values( offset_topic=self.offset_topic, ) - stdout = self.helm.template( + return self.helm.template( self._resetter_release_name, self._resetter_helm_chart, self.namespace, values, self.template_flags, ) - print(stdout) @override def reset(self, dry_run: bool) -> None: @@ -329,16 +329,15 @@ def add_input_topics(self, topics: list[str]) -> None: setattr(self.app, "topics", ",".join(topics)) @override - def template(self) -> None: + def manifest(self) -> Resource: values = self._get_kafka_connect_resetter_values() - stdout = self.helm.template( + return self.helm.template( self._resetter_release_name, self._resetter_helm_chart, self.namespace, values, self.template_flags, ) - print(stdout) @override def set_input_pattern(self, name: str) -> None: diff --git a/kpops/components/base_components/models/resource.py b/kpops/components/base_components/models/resource.py new file mode 100644 index 000000000..08c01f344 --- /dev/null +++ b/kpops/components/base_components/models/resource.py @@ -0,0 +1,5 @@ +from collections.abc import Mapping, Sequence +from typing import Any, TypeAlias + +# representation of final resource for component, e.g. a list of Kubernetes manifests +Resource: TypeAlias = Sequence[Mapping[str, Any]] diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 7be212300..b53e03d43 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -12,6 +12,7 @@ FromTopic, InputTopicTypes, ) +from kpops.components.base_components.models.resource import Resource from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, @@ -178,7 +179,7 @@ def weave_from_topics( self.apply_from_inputs(input_topic, from_topic) def inflate(self) -> list[PipelineComponent]: - """Inflate a component. + """Inflate component. This is helpful if one component should result in multiple components. To support this, override this method and return a list of components @@ -187,35 +188,30 @@ def inflate(self) -> list[PipelineComponent]: """ return [self] - def template(self) -> None: - """Run `helm template`. - - From HELM: Render chart templates locally and display the output. - Any values that would normally be looked up or retrieved in-cluster will - be faked locally. Additionally, none of the server-side testing of chart - validity (e.g. whether an API is supported) is done. - """ + def manifest(self) -> Resource: + """Render final component resources, e.g. Kubernetes manifests.""" + return [] def deploy(self, dry_run: bool) -> None: - """Deploy the component (self) to the k8s cluster. + """Deploy component, e.g. to Kubernetes cluster. :param dry_run: Whether to do a dry run of the command """ def destroy(self, dry_run: bool) -> None: - """Uninstall the component (self) from the k8s cluster. + """Uninstall component, e.g. from Kubernetes cluster. :param dry_run: Whether to do a dry run of the command """ def reset(self, dry_run: bool) -> None: - """Reset component (self) state. + """Reset component state. :param dry_run: Whether to do a dry run of the command """ def clean(self, dry_run: bool) -> None: - """Remove component (self) and any trace of it. + """Destroy component including related states. :param dry_run: Whether to do a dry run of the command """ diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 934924e21..1c3dbdeb7 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -1,5 +1,6 @@ from __future__ import annotations +from collections.abc import Mapping from dataclasses import dataclass from difflib import Differ from enum import Enum @@ -78,12 +79,12 @@ def __find_changed_key(key_1: list[str] | str, key_2: str = "") -> str: return f"{key_1}.{key_2}" -def render_diff(d1: dict, d2: dict, ignore: set[str] | None = None) -> str | None: +def render_diff(d1: Mapping, d2: Mapping, ignore: set[str] | None = None) -> str | None: differences = list(diff(d1, d2, ignore=ignore)) if not differences: return None - d2_filtered: dict = patch(differences, d1) + d2_filtered: Mapping = patch(differences, d1) return "".join( colorize_diff( differ.compare( @@ -109,5 +110,5 @@ def colorize_line(line: str) -> str: return line -def to_yaml(data: dict) -> Sequence[str]: +def to_yaml(data: Mapping) -> Sequence[str]: return yaml.dump(data, sort_keys=True).splitlines(keepends=True) diff --git a/kpops/utils/yaml.py b/kpops/utils/yaml.py index 2f0e16608..be554cf6c 100644 --- a/kpops/utils/yaml.py +++ b/kpops/utils/yaml.py @@ -83,14 +83,16 @@ def substitute_nested(input: str, **kwargs) -> str: return old_str -def print_yaml(input: str, *, substitution: dict | None = None) -> None: - """Print YAML to console with syntax highlighting. +def print_yaml(data: Mapping | str, *, substitution: dict | None = None) -> None: + """Print YAML object with syntax highlighting. - :param s: YAML content + :param data: YAML document :param substitution: Substitution dictionary, defaults to None """ + if not isinstance(data, str): + data = yaml.safe_dump(dict(data)) syntax = Syntax( - substitute(input, substitution), + substitute(data, substitution), "yaml", background_color="default", theme="ansi_dark", diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py index bad4f2aa8..584becd32 100644 --- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py +++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py @@ -1,4 +1,5 @@ from logging import Logger +from pathlib import Path from unittest.mock import MagicMock import pytest @@ -7,6 +8,7 @@ from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler from kpops.component_handlers.helm_wrapper.model import HelmTemplate +from kpops.component_handlers.kubernetes.model import KubernetesManifest log = Logger("TestLogger") @@ -34,7 +36,9 @@ def test_should_print_helm_diff_when_release_is_new( helm_mock.get_manifest.return_value = iter(()) mock_load_manifest = mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest", - return_value=iter([HelmTemplate("path.yaml", {"a": 1})]), + return_value=iter( + [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))] + ), ) log.addHandler(caplog.handler) @@ -55,11 +59,13 @@ def test_should_print_helm_diff_when_release_exists( caplog: LogCaptureFixture, ): helm_mock.get_manifest.return_value = iter( - [HelmTemplate("path.yaml", {"a": 1})] + [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))] ) mock_load_manifest = mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest", - return_value=iter([HelmTemplate("path.yaml", {"a": 1})]), + return_value=iter( + [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))] + ), ) log.addHandler(caplog.handler) diff --git a/tests/component_handlers/helm_wrapper/test_helm_diff.py b/tests/component_handlers/helm_wrapper/test_helm_diff.py index 15a58a023..edb64363e 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_diff.py +++ b/tests/component_handlers/helm_wrapper/test_helm_diff.py @@ -1,11 +1,14 @@ +from pathlib import Path + from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate +from kpops.component_handlers.kubernetes.model import KubernetesManifest from kpops.utils.dict_differ import Change def test_diff(): helm_diff = HelmDiff(HelmDiffConfig()) - templates = [HelmTemplate("a.yaml", {})] + templates = [HelmTemplate(Path("a.yaml"), KubernetesManifest())] assert list(helm_diff.calculate_changes(templates, templates)) == [ Change( old_value={}, @@ -17,12 +20,12 @@ def test_diff(): assert list( helm_diff.calculate_changes( [ - HelmTemplate("a.yaml", {"a": 1}), - HelmTemplate("b.yaml", {"b": 1}), + HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1})), + HelmTemplate(Path("b.yaml"), KubernetesManifest({"b": 1})), ], [ - HelmTemplate("a.yaml", {"a": 2}), - HelmTemplate("c.yaml", {"c": 1}), + HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 2})), + HelmTemplate(Path("c.yaml"), KubernetesManifest({"c": 1})), ], ) ) == [ @@ -42,7 +45,9 @@ def test_diff(): # test no current release assert list( - helm_diff.calculate_changes((), [HelmTemplate("a.yaml", {"a": 1})]) + helm_diff.calculate_changes( + (), [HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1}))] + ) ) == [ Change( old_value={}, diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index ce6fae709..cdc7e9d9d 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -13,6 +13,7 @@ HelmConfig, HelmTemplateFlags, HelmUpgradeInstallFlags, + KubernetesManifest, ParseError, RepoAuthFlags, Version, @@ -30,8 +31,10 @@ def temp_file_mock(self, mocker: MockerFixture) -> MagicMock: return temp_file_mock @pytest.fixture() - def run_command(self, mocker: MockerFixture) -> MagicMock: - return mocker.patch.object(Helm, "_Helm__execute") + def mock_execute(self, mocker: MockerFixture) -> MagicMock: + mock_execute = mocker.patch.object(Helm, "_Helm__execute") + mock_execute.return_value = "" + return mock_execute @pytest.fixture() def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: @@ -43,12 +46,14 @@ def mock_get_version(self, mocker: MockerFixture) -> MagicMock: mock_get_version.return_value = Version(major=3, minor=12, patch=0) return mock_get_version + @pytest.fixture() + def helm(self, mock_get_version: MagicMock) -> Helm: + return Helm(helm_config=HelmConfig()) + def test_should_call_run_command_method_when_helm_install_with_defaults( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - - helm_wrapper.upgrade_install( + helm.upgrade_install( release_name="test-release", chart=f"bakdata-streams-bootstrap/{AppType.STREAMS_APP.value}", dry_run=False, @@ -56,7 +61,7 @@ def test_should_call_run_command_method_when_helm_install_with_defaults( values={"commandLine": "test"}, flags=HelmUpgradeInstallFlags(), ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "upgrade", @@ -74,7 +79,7 @@ def test_should_call_run_command_method_when_helm_install_with_defaults( ) def test_should_include_configured_tls_parameters_on_add_when_version_is_old( - self, run_command: MagicMock, mocker: MockerFixture + self, mock_execute: MagicMock, mocker: MockerFixture ): mock_get_version = mocker.patch.object(Helm, "get_version") mock_get_version.return_value = Version(major=3, minor=6, patch=0) @@ -85,7 +90,7 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old( "fake", RepoAuthFlags(ca_file=Path("a_file.ca"), insecure_skip_tls_verify=True), ) - assert run_command.mock_calls == [ + assert mock_execute.mock_calls == [ mock.call( [ "helm", @@ -104,16 +109,14 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_old( ] def test_should_include_configured_tls_parameters_on_add_when_version_is_new( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm = Helm(HelmConfig()) - helm.add_repo( "test-repository", "fake", RepoAuthFlags(ca_file=Path("a_file.ca"), insecure_skip_tls_verify=True), ) - assert run_command.mock_calls == [ + assert mock_execute.mock_calls == [ mock.call( [ "helm", @@ -132,10 +135,9 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_new( ] def test_should_include_configured_tls_parameters_on_update( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - helm_wrapper.upgrade_install( + helm.upgrade_install( release_name="test-release", chart="test-repository/test-chart", dry_run=False, @@ -147,7 +149,7 @@ def test_should_include_configured_tls_parameters_on_update( ), ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "upgrade", @@ -168,10 +170,9 @@ def test_should_include_configured_tls_parameters_on_update( ) def test_should_call_run_command_method_when_helm_install_with_non_defaults( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - helm_wrapper.upgrade_install( + helm.upgrade_install( release_name="test-release", chart="test-repository/streams-app", namespace="test-namespace", @@ -187,7 +188,7 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( version="2.4.2", ), ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "upgrade", @@ -213,27 +214,25 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( ) def test_should_call_run_command_method_when_uninstalling_streams_app( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - helm_wrapper.uninstall( + helm.uninstall( namespace="test-namespace", release_name="test-release", dry_run=False, ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( ["helm", "uninstall", "test-release", "--namespace", "test-namespace"], ) def test_should_log_warning_when_release_not_found( self, - run_command: MagicMock, + helm: Helm, + mock_execute: MagicMock, log_warning_mock: MagicMock, - mock_get_version: MagicMock, ): - helm_wrapper = Helm(helm_config=HelmConfig()) - run_command.side_effect = ReleaseNotFoundException() - helm_wrapper.uninstall( + mock_execute.side_effect = ReleaseNotFoundException() + helm.uninstall( namespace="test-namespace", release_name="test-release", dry_run=False, @@ -244,16 +243,14 @@ def test_should_log_warning_when_release_not_found( ) def test_should_call_run_command_method_when_installing_streams_app__with_dry_run( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - - helm_wrapper.uninstall( + helm.uninstall( namespace="test-namespace", release_name="test-release", dry_run=True, ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "uninstall", @@ -284,26 +281,18 @@ def test_validate_console_output(self): f"validate_console_output() raised ReleaseNotFoundException unexpectedly!\nError message: {ReleaseNotFoundException}" ) - def test_helm_template_load(self): - stdout = dedent( - """ - --- - # Source: chart/templates/test2.yaml - apiVersion: v1 - kind: ServiceAccount - metadata: - labels: - foo: bar - """ + def test_helm_template(self): + path = Path("test2.yaml") + manifest = KubernetesManifest( + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": {"labels": {"foo": "bar"}}, + } ) - - helm_template = HelmTemplate.load("test2.yaml", stdout) - assert helm_template.filepath == "test2.yaml" - assert helm_template.template == { - "apiVersion": "v1", - "kind": "ServiceAccount", - "metadata": {"labels": {"foo": "bar"}}, - } + helm_template = HelmTemplate(path, manifest) + assert helm_template.filepath == path + assert helm_template.manifest == manifest def test_load_manifest_with_no_notes(self): stdout = dedent( @@ -324,10 +313,12 @@ def test_load_manifest_with_no_notes(self): assert all( isinstance(helm_template, HelmTemplate) for helm_template in helm_templates ) - assert helm_templates[0].filepath == "chart/templates/test3a.yaml" - assert helm_templates[0].template == {"data": [{"a": 1}, {"b": 2}]} - assert helm_templates[1].filepath == "chart/templates/test3b.yaml" - assert helm_templates[1].template == {"foo": "bar"} + assert helm_templates[0].filepath == Path("chart/templates/test3a.yaml") + assert helm_templates[0].manifest == KubernetesManifest( + {"data": [{"a": 1}, {"b": 2}]} + ) + assert helm_templates[1].filepath == Path("chart/templates/test3b.yaml") + assert helm_templates[1].manifest == KubernetesManifest({"foo": "bar"}) def test_raise_parse_error_when_helm_content_is_invalid(self): stdout = dedent( @@ -392,16 +383,15 @@ def test_load_manifest(self): assert all( isinstance(helm_template, HelmTemplate) for helm_template in helm_templates ) - assert helm_templates[0].filepath == "chart/templates/test3a.yaml" - assert helm_templates[0].template == {"data": [{"a": 1}, {"b": 2}]} - assert helm_templates[1].filepath == "chart/templates/test3b.yaml" - assert helm_templates[1].template == {"foo": "bar"} + assert helm_templates[0].filepath == Path("chart/templates/test3a.yaml") + assert helm_templates[0].manifest == KubernetesManifest( + {"data": [{"a": 1}, {"b": 2}]} + ) + assert helm_templates[1].filepath == Path("chart/templates/test3b.yaml") + assert helm_templates[1].manifest == KubernetesManifest({"foo": "bar"}) - def test_helm_get_manifest( - self, run_command: MagicMock, mock_get_version: MagicMock - ): - helm_wrapper = Helm(helm_config=HelmConfig()) - run_command.return_value = dedent( + def test_helm_get_manifest(self, helm: Helm, mock_execute: MagicMock): + mock_execute.return_value = dedent( """ --- # Source: chart/templates/test.yaml @@ -410,10 +400,8 @@ def test_helm_get_manifest( - b: 2 """ ) - helm_templates = list( - helm_wrapper.get_manifest("test-release", "test-namespace") - ) - run_command.assert_called_once_with( + helm_templates = list(helm.get_manifest("test-release", "test-namespace")) + mock_execute.assert_called_once_with( command=[ "helm", "get", @@ -424,18 +412,18 @@ def test_helm_get_manifest( ], ) assert len(helm_templates) == 1 - assert helm_templates[0].filepath == "chart/templates/test.yaml" - assert helm_templates[0].template == {"data": [{"a": 1}, {"b": 2}]} + assert helm_templates[0].filepath == Path("chart/templates/test.yaml") + assert helm_templates[0].manifest == KubernetesManifest( + {"data": [{"a": 1}, {"b": 2}]} + ) - run_command.side_effect = ReleaseNotFoundException() - assert helm_wrapper.get_manifest("test-release", "test-namespace") == () + mock_execute.side_effect = ReleaseNotFoundException() + assert helm.get_manifest("test-release", "test-namespace") == () def test_should_call_run_command_method_when_helm_template_with_optional_args( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - - helm_wrapper.template( + helm.template( release_name="test-release", chart="bakdata-streams-bootstrap/streams-app", namespace="test-ns", @@ -446,7 +434,7 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args( cert_file=Path("a_file.pem"), ), ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "template", @@ -469,18 +457,15 @@ def test_should_call_run_command_method_when_helm_template_with_optional_args( ) def test_should_call_run_command_method_when_helm_template_without_optional_args( - self, run_command: MagicMock, mock_get_version: MagicMock + self, helm: Helm, mock_execute: MagicMock ): - helm_wrapper = Helm(helm_config=HelmConfig()) - - helm_wrapper.template( + helm.template( release_name="test-release", chart="bakdata-streams-bootstrap/streams-app", namespace="test-ns", values={"commandLine": "test"}, - flags=HelmTemplateFlags(), ) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "template", @@ -507,14 +492,14 @@ def test_should_call_run_command_method_when_helm_template_without_optional_args ) def test_should_call_helm_version( self, - run_command: MagicMock, + mock_execute: MagicMock, raw_version: str, expected_version: Version, ): - run_command.return_value = raw_version + mock_execute.return_value = raw_version helm = Helm(helm_config=HelmConfig()) - run_command.assert_called_once_with( + mock_execute.assert_called_once_with( [ "helm", "version", @@ -525,9 +510,9 @@ def test_should_call_helm_version( assert helm._version == expected_version def test_should_raise_exception_if_helm_version_is_old( - self, run_command: MagicMock + self, mock_execute: MagicMock ): - run_command.return_value = "v2.9.0+gc9f554d" + mock_execute.return_value = "v2.9.0+gc9f554d" with pytest.raises( RuntimeError, match="The supported Helm version is 3.x.x. The current Helm version is 2.9.0", @@ -535,9 +520,9 @@ def test_should_raise_exception_if_helm_version_is_old( Helm(helm_config=HelmConfig()) def test_should_raise_exception_if_helm_version_cannot_be_parsed( - self, run_command: MagicMock + self, mock_execute: MagicMock ): - run_command.return_value = "123" + mock_execute.return_value = "123" with pytest.raises( RuntimeError, match="Could not parse the Helm version.\n\nHelm output:\n123" ): diff --git a/tests/component_handlers/kubernetes/__init__.py b/tests/component_handlers/kubernetes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/component_handlers/kubernetes/model.py b/tests/component_handlers/kubernetes/model.py new file mode 100644 index 000000000..334c1f937 --- /dev/null +++ b/tests/component_handlers/kubernetes/model.py @@ -0,0 +1,38 @@ +from textwrap import dedent + +import pytest + +from kpops.component_handlers.kubernetes.model import KubernetesManifest + + +class TestKubernetesManifest: + @pytest.mark.parametrize( + ("helm_template", "expected_manifest"), + [ + pytest.param( + dedent( + """ + --- + # Source: chart/templates/test2.yaml + apiVersion: v1 + kind: ServiceAccount + metadata: + labels: + foo: bar + """ + ), + [ + KubernetesManifest( + { + "apiVersion": "v1", + "kind": "ServiceAccount", + "metadata": {"labels": {"foo": "bar"}}, + } + ) + ], + ) + ], + ) + def test_from_yaml(self, helm_template: str, expected_manifest: KubernetesManifest): + manifests = KubernetesManifest.from_yaml(helm_template) + assert list(manifests) == expected_manifest diff --git a/tests/pipeline/snapshots/snap_test_generate.py b/tests/pipeline/snapshots/snap_test_generate.py new file mode 100644 index 000000000..f6d75f3e0 --- /dev/null +++ b/tests/pipeline/snapshots/snap_test_generate.py @@ -0,0 +1,2244 @@ +# -*- coding: utf-8 -*- +# snapshottest: v1 - https://goo.gl/zC4yUc +from __future__ import unicode_literals + +from snapshottest import Snapshot + + +snapshots = Snapshot() + +snapshots['TestGenerate.test_default_config test-pipeline'] = [ + { + 'app': { + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-custom-config-app1', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-custom-config-app1': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'resources-custom-config-app2-error', + 'inputTopics': [ + 'resources-custom-config-app1' + ], + 'outputTopic': 'resources-custom-config-app2', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-custom-config-app2': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + }, + 'resources-custom-config-app2-error': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } +] + +snapshots['TestGenerate.test_inflate_pipeline test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-pipeline-with-inflate-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-pipeline-with-inflate-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-converter-error', + 'inputTopics': [ + 'resources-pipeline-with-inflate-scheduled-producer' + ], + 'outputTopic': 'resources-pipeline-with-inflate-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-inflate-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-inflate-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-pipeline-with-inflate-should-inflate', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-pipeline-with-inflate-should-inflate' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-error', + 'inputTopics': [ + 'resources-pipeline-with-inflate-converter' + ], + 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'should-inflate', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-inflate-should-inflate': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-inflate-should-inflate-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-pipeline-with-inflate-should-inflate', + 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1' + }, + 'name': 'should-inflate-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { + }, + 'topics': { + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + }, + 'should-inflate-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' + ], + 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'should-inflate-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + }, + 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_kafka_connect_sink_weave_from_topics test-pipeline'] = [ + { + 'app': { + 'image': 'fake-image', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-kafka-connect-sink-streams-app-error', + 'inputTopics': [ + 'example-topic' + ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + }, + 'topics': { + 'example-topic': { + 'type': 'input' + } + } + }, + 'name': 'streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-kafka-connect-sink-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-kafka-connect-sink-es-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'example-output' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } +] + +snapshots['TestGenerate.test_load_pipeline test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-first-pipeline-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-first-pipeline-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-first-pipeline-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-first-pipeline-converter-error', + 'inputTopics': [ + 'resources-first-pipeline-scheduled-producer' + ], + 'outputTopic': 'resources-first-pipeline-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-first-pipeline-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-first-pipeline-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error', + 'inputTopics': [ + 'resources-first-pipeline-converter' + ], + 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', + 'namespace': 'example-namespace', + 'prefix': 'resources-first-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_model_serialization test-pipeline'] = [ + { + 'app': { + 'streams': { + 'brokers': 'test', + 'extraOutputTopics': { + }, + 'outputTopic': 'out', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'account-producer', + 'namespace': 'test', + 'prefix': 'resources-pipeline-with-paths-', + 'repo_config': { + 'repo_auth_flags': { + 'ca_file': 'my-cert.cert', + 'insecure_skip_tls_verify': False, + 'password': '$CI_JOB_TOKEN', + 'username': 'masked' + }, + 'repository_name': 'masked', + 'url': 'masked' + }, + 'type': 'producer-app', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_no_input_topic test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'CONVERT_XML': True + }, + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', + 'inputPattern': '.*', + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + }, + 'topics': { + '.*': { + 'type': 'pattern' + } + } + }, + 'name': 'app1', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-input-topic-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-no-input-topic-pipeline-app1-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-input-topic-pipeline-app2-error', + 'extraOutputTopics': { + 'extra': 'example-output-extra', + 'test-output': 'test-output-extra' + }, + 'inputTopics': [ + 'example-output' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-input-topic-pipeline-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output-extra': { + 'configs': { + }, + 'role': 'extra' + }, + 'resources-no-input-topic-pipeline-app2-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + }, + 'test-output-extra': { + 'configs': { + }, + 'role': 'test-output' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_no_user_defined_components test-pipeline'] = [ + { + 'app': { + 'image': 'fake-image', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-no-user-defined-components-streams-app-error', + 'inputTopics': [ + 'example-topic' + ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + }, + 'topics': { + 'example-topic': { + 'type': 'input' + } + } + }, + 'name': 'streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-no-user-defined-components-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-no-user-defined-components-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_pipelines_with_env_values test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'override-arg' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'schedule': '20 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-pipeline-with-envs-input-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'input-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-pipeline-with-envs-input-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-pipeline-with-envs-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-envs-converter-error', + 'inputTopics': [ + 'resources-pipeline-with-envs-input-producer' + ], + 'outputTopic': 'resources-pipeline-with-envs-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-envs-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-envs-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-pipeline-with-envs-filter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-pipeline-with-envs-filter' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-pipeline-with-envs-filter-error', + 'inputTopics': [ + 'resources-pipeline-with-envs-converter' + ], + 'outputTopic': 'resources-pipeline-with-envs-filter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'filter', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-envs-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-pipeline-with-envs-filter': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-pipeline-with-envs-filter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_prefix_pipeline_component test-pipeline'] = [ + { + 'app': { + 'debug': True, + 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', + 'imageTag': '1.0.0', + 'prometheus': { + 'jmx': { + 'enabled': False + } + }, + 'replicaCount': 1, + 'schedule': '0 12 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'schemaRegistryUrl': 'http://localhost:8081/' + }, + 'suspend': True + }, + 'name': 'account-producer', + 'namespace': '${NAMESPACE}', + 'prefix': 'from-pipeline-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'type': 'producer-app', + 'version': '2.9.0' + } +] + +snapshots['TestGenerate.test_read_from_component test-pipeline'] = [ + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-read-from-component-producer1', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'producer1', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-producer1': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-read-from-component-producer2', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'producer2', + 'namespace': 'example-namespace', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-producer2': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-read-from-component-inflate-step', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-read-from-component-inflate-step' + ] + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-error', + 'inputTopics': [ + 'resources-read-from-component-producer2' + ], + 'outputTopic': 'resources-read-from-component-inflate-step', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-inflate-step': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-read-from-component-inflate-step-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-read-from-component-inflate-step', + 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1' + }, + 'name': 'inflate-step-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' + ], + 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-read-from-component-inflate-step-without-prefix', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-read-from-component-inflate-step-without-prefix' + ] + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-error', + 'inputTopics': [ + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' + ], + 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step-without-prefix', + 'namespace': 'example-namespace', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-inflate-step-without-prefix': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-without-prefix-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'should-inflate', + 'version': '2.4.2' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'resources-read-from-component-inflate-step-without-prefix', + 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1' + }, + 'name': 'inflate-step-without-prefix-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-without-prefix-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error', + 'inputTopics': [ + 'kafka-sink-connector' + ], + 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'inflate-step-without-prefix-inflated-streams-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer1-error', + 'inputTopics': [ + 'resources-read-from-component-producer1' + ], + 'outputTopic': 'resources-read-from-component-consumer1', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'producer1': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer1', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer1': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-read-from-component-consumer1-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer2-error', + 'inputTopics': [ + 'resources-read-from-component-producer1', + 'resources-read-from-component-consumer1' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'consumer1': { + 'type': 'input' + }, + 'producer1': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer2', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer2-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer3-error', + 'inputTopics': [ + 'resources-read-from-component-producer1', + 'resources-read-from-component-producer2' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'producer2': { + 'type': 'input' + } + }, + 'topics': { + 'resources-read-from-component-producer1': { + 'type': 'input' + } + } + }, + 'name': 'consumer3', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer3-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer4-error', + 'inputTopics': [ + 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'inflate-step': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer4', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer4-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + }, + { + 'app': { + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-read-from-component-consumer5-error', + 'inputTopics': [ + 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' + ], + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + 'inflate-step-without-prefix': { + 'type': 'input' + } + }, + 'topics': { + } + }, + 'name': 'consumer5', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-read-from-component-consumer5-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_substitute_in_component test-pipeline'] = [ + { + 'app': { + 'commandLine': { + 'FAKE_ARG': 'fake-arg-value' + }, + 'image': 'example-registry/fake-image', + 'imageTag': '0.0.1', + 'labels': { + 'app_name': 'scheduled-producer', + 'app_schedule': '30 3/8 * * *', + 'app_type': 'scheduled-producer' + }, + 'schedule': '30 3/8 * * *', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'resources-component-type-substitution-scheduled-producer', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'scheduled-producer', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + 'com/bakdata/kafka/fake': '1.0.0' + }, + 'topics': { + 'resources-component-type-substitution-scheduled-producer': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 12, + 'type': 'output', + 'value_schema': 'com.bakdata.fake.Produced' + } + } + }, + 'type': 'scheduled-producer', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'converter-resources-component-type-substitution-converter', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 1, + 'minReplicas': 0, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + ] + }, + 'commandLine': { + 'CONVERT_XML': True + }, + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-component-type-substitution-converter-error', + 'inputTopics': [ + 'resources-component-type-substitution-scheduled-producer' + ], + 'outputTopic': 'resources-component-type-substitution-converter', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'converter', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-component-type-substitution-converter': { + 'configs': { + 'cleanup.policy': 'compact,delete', + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-component-type-substitution-converter-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 10, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'converter', + 'version': '2.4.2' + }, + { + 'app': { + 'autoscaling': { + 'consumerGroup': 'filter-resources-component-type-substitution-filter-app', + 'cooldownPeriod': 300, + 'enabled': True, + 'lagThreshold': 10000, + 'maxReplicas': 4, + 'minReplicas': 4, + 'offsetResetPolicy': 'earliest', + 'pollingInterval': 30, + 'topics': [ + 'resources-component-type-substitution-filter-app' + ] + }, + 'commandLine': { + 'TYPE': 'nothing' + }, + 'image': 'fake-registry/filter', + 'imageTag': '2.4.1', + 'labels': { + 'app_name': 'filter-app', + 'app_resources_requests_memory': '3G', + 'app_type': 'filter', + 'filter': 'filter-app-filter', + 'test_placeholder_in_placeholder': 'filter-app-filter' + }, + 'replicaCount': 4, + 'resources': { + 'requests': { + 'memory': '3G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-component-type-substitution-filter-app-error', + 'inputTopics': [ + 'resources-component-type-substitution-converter' + ], + 'outputTopic': 'resources-component-type-substitution-filter-app', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'filter-app', + 'namespace': 'example-namespace', + 'prefix': 'resources-component-type-substitution-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'resources-component-type-substitution-filter-app': { + 'configs': { + 'retention.ms': '-1' + }, + 'partitions_count': 50, + 'type': 'output' + }, + 'resources-component-type-substitution-filter-app-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'filter', + 'version': '2.4.2' + } +] + +snapshots['TestGenerate.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = [ + { + 'app': { + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'app1-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app1-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'app2-dead-letter-topic', + 'inputTopics': [ + 'app1-test-topic' + ], + 'outputTopic': 'app2-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app2-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'app2-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } +] + +snapshots['TestGenerate.test_with_custom_config_with_relative_defaults_path test-pipeline'] = [ + { + 'app': { + 'resources': { + 'limits': { + 'memory': '2G' + }, + 'requests': { + 'memory': '2G' + } + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'extraOutputTopics': { + }, + 'outputTopic': 'app1-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app1', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app1-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'producer-app', + 'version': '2.9.0' + }, + { + 'app': { + 'image': 'some-image', + 'labels': { + 'pipeline': 'resources-custom-config' + }, + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'errorTopic': 'app2-dead-letter-topic', + 'inputTopics': [ + 'app1-test-topic' + ], + 'outputTopic': 'app2-test-topic', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'name': 'app2', + 'namespace': 'development-namespace', + 'prefix': 'resources-custom-config-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'app2-dead-letter-topic': { + 'configs': { + }, + 'partitions_count': 1, + 'type': 'error' + }, + 'app2-test-topic': { + 'configs': { + }, + 'partitions_count': 3, + 'type': 'output' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + } +] + +snapshots['TestGenerate.test_with_env_defaults test-pipeline'] = [ + { + 'app': { + 'image': 'fake-image', + 'streams': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'config': { + 'large.message.id.generator': 'com.bakdata.kafka.MurmurHashIdGenerator' + }, + 'errorTopic': 'resources-kafka-connect-sink-streams-app-development-error', + 'inputTopics': [ + 'example-topic' + ], + 'outputTopic': 'example-output', + 'schemaRegistryUrl': 'http://localhost:8081/' + } + }, + 'from': { + 'components': { + }, + 'topics': { + 'example-topic': { + 'type': 'input' + } + } + }, + 'name': 'streams-app-development', + 'namespace': 'development-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-streams-bootstrap', + 'url': 'https://bakdata.github.io/streams-bootstrap/' + }, + 'to': { + 'models': { + }, + 'topics': { + 'example-output': { + 'configs': { + }, + 'type': 'output' + }, + 'resources-kafka-connect-sink-streams-app-development-error': { + 'configs': { + 'cleanup.policy': 'compact,delete' + }, + 'partitions_count': 1, + 'type': 'error', + 'value_schema': 'com.bakdata.kafka.DeadLetter' + } + } + }, + 'type': 'streams-app', + 'version': '2.9.0' + }, + { + 'app': { + 'batch.size': '2000', + 'behavior.on.malformed.documents': 'warn', + 'behavior.on.null.values': 'delete', + 'connection.compression': 'true', + 'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector', + 'key.ignore': 'false', + 'linger.ms': '5000', + 'max.buffered.records': '20000', + 'name': 'resources-kafka-connect-sink-es-sink-connector', + 'read.timeout.ms': '120000', + 'tasks.max': '1', + 'topics': 'example-output' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'resetter_values': { + }, + 'type': 'kafka-sink-connector', + 'version': '1.0.4' + } +] diff --git a/tests/pipeline/snapshots/snap_test_manifest.py b/tests/pipeline/snapshots/snap_test_manifest.py new file mode 100644 index 000000000..044b51c92 --- /dev/null +++ b/tests/pipeline/snapshots/snap_test_manifest.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# snapshottest: v1 - https://goo.gl/zC4yUc +from __future__ import unicode_literals + +from snapshottest import GenericRepr, Snapshot + + +snapshots = Snapshot() + +snapshots['TestManifest.test_python_api resource 0'] = [ + GenericRepr("{'apiVersion': 'batch/v1', 'kind': 'Job', 'metadata': {'name': 'resources-custom-config-app1', 'labels': {'app': 'resources-custom-config-app1', 'chart': 'producer-app-2.9.0', 'release': 'resources-custom-config-app1'}}, 'spec': {'template': {'metadata': {'labels': {'app': 'resources-custom-config-app1', 'release': 'resources-custom-config-app1'}}, 'spec': {'restartPolicy': 'OnFailure', 'affinity': None, 'containers': [{'name': 'resources-custom-config-app1', 'image': 'producerApp:latest', 'imagePullPolicy': 'Always', 'resources': {'limits': {'cpu': '500m', 'memory': '2G'}, 'requests': {'cpu': '200m', 'memory': '2G'}}, 'env': [{'name': 'ENV_PREFIX', 'value': 'APP_'}, {'name': 'APP_BROKERS', 'value': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092'}, {'name': 'APP_SCHEMA_REGISTRY_URL', 'value': 'http://localhost:8081/'}, {'name': 'APP_DEBUG', 'value': 'false'}, {'name': 'APP_OUTPUT_TOPIC', 'value': 'resources-custom-config-app1'}, {'name': 'JAVA_TOOL_OPTIONS', 'value': '-XX:MaxRAMPercentage=75.0 '}]}]}}, 'backoffLimit': 6}}") +] + +snapshots['TestManifest.test_python_api resource 1'] = [ + GenericRepr('{\'apiVersion\': \'v1\', \'kind\': \'ConfigMap\', \'metadata\': {\'name\': \'resources-custom-config-app2-jmx-configmap\', \'labels\': {\'app\': \'resources-custom-config-app2\', \'chart\': \'streams-app-2.9.0\', \'release\': \'resources-custom-config-app2\', \'heritage\': \'Helm\'}}, \'data\': {\'jmx-kafka-streams-app-prometheus.yml\': \'jmxUrl: service:jmx:rmi:///jndi/rmi://localhost:5555/jmxrmi\\nlowercaseOutputName: true\\nlowercaseOutputLabelNames: true\\nssl: false\\nrules:\\n - pattern: ".*"\\n\'}}'), + GenericRepr("{'apiVersion': 'apps/v1', 'kind': 'Deployment', 'metadata': {'name': 'resources-custom-config-app2', 'labels': {'app': 'resources-custom-config-app2', 'chart': 'streams-app-2.9.0', 'release': 'resources-custom-config-app2', 'pipeline': 'resources-custom-config'}}, 'spec': {'replicas': 1, 'selector': {'matchLabels': {'app': 'resources-custom-config-app2', 'release': 'resources-custom-config-app2'}}, 'template': {'metadata': {'annotations': {'prometheus.io/scrape': 'true', 'prometheus.io/port': '5556'}, 'labels': {'app': 'resources-custom-config-app2', 'release': 'resources-custom-config-app2', 'pipeline': 'resources-custom-config'}}, 'spec': {'affinity': {'podAntiAffinity': {'preferredDuringSchedulingIgnoredDuringExecution': [{'weight': 1, 'podAffinityTerm': {'topologyKey': 'kubernetes.io/hostname', 'labelSelector': {'matchExpressions': [{'key': 'app', 'operator': 'In', 'values': ['resources-custom-config-app2']}]}}}]}}, 'containers': [{'name': 'resources-custom-config-app2', 'image': 'some-image:latest', 'imagePullPolicy': 'Always', 'resources': {'limits': {'cpu': '500m', 'memory': '2G'}, 'requests': {'cpu': '200m', 'memory': '300Mi'}}, 'env': [{'name': 'ENV_PREFIX', 'value': 'APP_'}, {'name': 'KAFKA_JMX_PORT', 'value': '5555'}, {'name': 'APP_VOLATILE_GROUP_INSTANCE_ID', 'value': 'true'}, {'name': 'APP_BROKERS', 'value': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092'}, {'name': 'APP_SCHEMA_REGISTRY_URL', 'value': 'http://localhost:8081/'}, {'name': 'APP_DEBUG', 'value': 'false'}, {'name': 'APP_INPUT_TOPICS', 'value': 'resources-custom-config-app1'}, {'name': 'APP_OUTPUT_TOPIC', 'value': 'resources-custom-config-app2'}, {'name': 'APP_ERROR_TOPIC', 'value': 'resources-custom-config-app2-error'}, {'name': 'JAVA_TOOL_OPTIONS', 'value': '-Dcom.sun.management.jmxremote.port=5555 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 '}], 'ports': [{'containerPort': 5555, 'name': 'jmx'}]}, {'name': 'prometheus-jmx-exporter', 'image': 'solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143', 'command': ['java', '-XX:+UnlockExperimentalVMOptions', '-XX:+UseCGroupMemoryLimitForHeap', '-XX:MaxRAMFraction=1', '-XshowSettings:vm', '-jar', 'jmx_prometheus_httpserver.jar', '5556', '/etc/jmx-streams-app/jmx-kafka-streams-app-prometheus.yml'], 'ports': [{'containerPort': 5556}], 'resources': {'limits': {'cpu': '300m', 'memory': '2G'}, 'requests': {'cpu': '100m', 'memory': '500Mi'}}, 'volumeMounts': [{'name': 'jmx-config', 'mountPath': '/etc/jmx-streams-app'}]}], 'volumes': [{'name': 'jmx-config', 'configMap': {'name': 'resources-custom-config-app2-jmx-configmap'}}]}}}}") +] diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_generate.py similarity index 96% rename from tests/pipeline/test_pipeline.py rename to tests/pipeline/test_generate.py index afb137ff2..68e8efd92 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_generate.py @@ -1,4 +1,3 @@ -import logging from pathlib import Path import pytest @@ -16,11 +15,12 @@ @pytest.mark.usefixtures("mock_env", "load_yaml_file_clear_cache") -class TestPipeline: +class TestGenerate: def test_python_api(self): pipeline = kpops.generate( RESOURCE_PATH / "first-pipeline" / "pipeline.yaml", defaults=RESOURCE_PATH, + output=False, ) assert len(pipeline) == 3 @@ -42,33 +42,6 @@ def test_load_pipeline(self, snapshot: SnapshotTest): snapshot.assert_match(enriched_pipeline, "test-pipeline") - def test_generate_with_steps_flag_should_write_log_warning( - self, caplog: pytest.LogCaptureFixture - ): - result = runner.invoke( - app, - [ - "generate", - str(RESOURCE_PATH / "first-pipeline/pipeline.yaml"), - "--defaults", - str(RESOURCE_PATH), - "--steps", - "a", - ], - catch_exceptions=False, - ) - - assert caplog.record_tuples == [ - ( - "root", - logging.WARNING, - "The following flags are considered only when `--template` is set: \n \ - '--steps'", - ) - ] - - assert result.exit_code == 0, result.stdout - def test_name_equal_prefix_name_concatenation(self): result = runner.invoke( app, diff --git a/tests/pipeline/test_template.py b/tests/pipeline/test_manifest.py similarity index 56% rename from tests/pipeline/test_template.py rename to tests/pipeline/test_manifest.py index c4b0757bb..0910c2dac 100644 --- a/tests/pipeline/test_template.py +++ b/tests/pipeline/test_manifest.py @@ -3,46 +3,54 @@ import pytest from pytest_mock import MockerFixture +from snapshottest.module import SnapshotTest from typer.testing import CliRunner +import kpops from kpops.cli.main import app from kpops.component_handlers.helm_wrapper.helm import Helm -from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name - -HELM_RELEASE_NAME = create_helm_release_name("resources-custom-config-app2") +from kpops.component_handlers.helm_wrapper.model import HelmConfig, Version runner = CliRunner() RESOURCE_PATH = Path(__file__).parent / "resources" -class TestTemplate: +class TestManifest: + @pytest.fixture() + def mock_execute(self, mocker: MockerFixture) -> MagicMock: + mock_execute = mocker.patch.object(Helm, "_Helm__execute") + mock_execute.return_value = "" # Helm Template + return mock_execute + @pytest.fixture() - def run_command(self, mocker: MockerFixture) -> MagicMock: - return mocker.patch.object(Helm, "_Helm__execute") + def mock_get_version(self, mocker: MockerFixture) -> MagicMock: + mock_get_version = mocker.patch.object(Helm, "get_version") + mock_get_version.return_value = Version(major=3, minor=12, patch=0) + return mock_get_version - def test_default_template_config(self, run_command: MagicMock): - run_command.return_value = "v3.12.0+gc9f554d" + @pytest.fixture(autouse=True) + def helm(self, mock_get_version: MagicMock) -> Helm: + return Helm(helm_config=HelmConfig()) + def test_default_config(self, mock_execute: MagicMock): result = runner.invoke( app, [ - "generate", + "manifest", str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), - "--template", "--environment", "development", ], catch_exceptions=False, ) - - run_command.assert_called_with( + mock_execute.assert_called_with( [ "helm", "template", - HELM_RELEASE_NAME, + "resources-custom-config-app2", "bakdata-streams-bootstrap/streams-app", "--namespace", "development-namespace", @@ -55,33 +63,28 @@ def test_default_template_config(self, run_command: MagicMock): "--wait", ], ) - assert result.exit_code == 0, result.stdout - def test_template_config_with_flags(self, run_command: MagicMock): - run_command.return_value = "v3.12.0+gc9f554d" - + def test_custom_config(self, mock_execute: MagicMock): result = runner.invoke( app, [ - "generate", + "manifest", str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--defaults", str(RESOURCE_PATH / "no-topics-defaults"), "--config", str(RESOURCE_PATH / "custom-config"), - "--template", "--environment", "development", ], catch_exceptions=False, ) - - run_command.assert_called_with( + mock_execute.assert_called_with( [ "helm", "template", - HELM_RELEASE_NAME, + "resources-custom-config-app2", "bakdata-streams-bootstrap/streams-app", "--namespace", "development-namespace", @@ -96,5 +99,16 @@ def test_template_config_with_flags(self, run_command: MagicMock): "2.1.1", ], ) - assert result.exit_code == 0, result.stdout + + def test_python_api(self, snapshot: SnapshotTest): + resources = kpops.manifest( + RESOURCE_PATH / "custom-config/pipeline.yaml", + defaults=RESOURCE_PATH / "no-topics-defaults", + output=False, + environment="development", + ) + assert isinstance(resources, list) + assert len(resources) == 2 + for i, resource in enumerate(resources): + snapshot.assert_match(resource, f"resource {i}") From 1547ecaae840f9a7d9708131f7132fabcefc3cf8 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 13:24:58 +0100 Subject: [PATCH 24/34] Generate defaults schema (#402) Closes #180 --- .../editor_integration/settings.json | 14 +- docs/docs/schema/defaults.json | 1587 +++++++++++++++++ docs/docs/user/references/cli-commands.md | 4 +- .../user/references/editor-integration.md | 5 +- hooks/gen_schema.py | 10 +- kpops/cli/main.py | 12 +- kpops/utils/gen_schema.py | 84 +- tests/cli/test_schema_generation.py | 69 +- 8 files changed, 1732 insertions(+), 53 deletions(-) create mode 100644 docs/docs/schema/defaults.json diff --git a/docs/docs/resources/editor_integration/settings.json b/docs/docs/resources/editor_integration/settings.json index bead179c9..1938692c3 100644 --- a/docs/docs/resources/editor_integration/settings.json +++ b/docs/docs/resources/editor_integration/settings.json @@ -1,6 +1,16 @@ { "yaml.schemas": { - "https://bakdata.github.io/kpops/2.0/schema/config.json": "config.yaml", - "https://bakdata.github.io/kpops/2.0/schema/pipeline.json": "pipeline.yaml" + "https://bakdata.github.io/kpops/3.0/schema/pipeline.json": [ + "pipeline.yaml", + "pipeline_*.yaml" + ], + "https://bakdata.github.io/kpops/3.0/schema/defaults.json": [ + "defaults.yaml", + "defaults_*.yaml" + ], + "https://bakdata.github.io/kpops/3.0/schema/config.json": [ + "config.yaml", + "config_*.yaml" + ] } } diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json new file mode 100644 index 000000000..137b547e9 --- /dev/null +++ b/docs/docs/schema/defaults.json @@ -0,0 +1,1587 @@ +{ + "$defs": { + "FromSection": { + "additionalProperties": false, + "description": "Holds multiple input topics.", + "properties": { + "components": { + "additionalProperties": { + "$ref": "#/$defs/FromTopic" + }, + "default": {}, + "description": "Components to read from", + "title": "Components", + "type": "object" + }, + "topics": { + "additionalProperties": { + "$ref": "#/$defs/FromTopic" + }, + "default": {}, + "description": "Input topics", + "title": "Topics", + "type": "object" + } + }, + "title": "FromSection", + "type": "object" + }, + "FromTopic": { + "additionalProperties": false, + "description": "Input topic.", + "properties": { + "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", + "title": "Role" + }, + "type": { + "anyOf": [ + { + "$ref": "#/$defs/InputTopicTypes" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic type" + } + }, + "title": "FromTopic", + "type": "object" + }, + "HelmApp": { + "additionalProperties": true, + "description": "Kubernetes app managed through Helm with an associated Helm chart.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Helm app values" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "anyOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "helm-app", + "title": "Type" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app", + "type" + ], + "title": "HelmApp", + "type": "object" + }, + "HelmAppValues": { + "additionalProperties": true, + "description": "Helm app values.", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Override name with this value", + "title": "Nameoverride" + } + }, + "title": "HelmAppValues", + "type": "object" + }, + "HelmRepoConfig": { + "description": "Helm repository configuration.", + "properties": { + "repo_auth_flags": { + "allOf": [ + { + "$ref": "#/$defs/RepoAuthFlags" + } + ], + "default": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "description": "Authorisation-related flags" + }, + "repository_name": { + "description": "Name of the Helm repository", + "title": "Repository Name", + "type": "string" + }, + "url": { + "description": "URL to the Helm repository", + "title": "Url", + "type": "string" + } + }, + "required": [ + "repository_name", + "url" + ], + "title": "HelmRepoConfig", + "type": "object" + }, + "InputTopicTypes": { + "description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", + "enum": [ + "input", + "pattern" + ], + "title": "InputTopicTypes", + "type": "string" + }, + "KafkaApp": { + "additionalProperties": true, + "description": "Base component for Kafka-based components.\nProducer or streaming apps should inherit from this class.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/KafkaAppValues" + } + ], + "description": "Application-specific settings" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app" + ], + "title": "KafkaApp", + "type": "object" + }, + "KafkaAppValues": { + "additionalProperties": true, + "description": "Settings specific to Kafka Apps.", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Override name with this value", + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/KafkaStreamsConfig" + } + ], + "description": "Kafka streams config" + } + }, + "required": [ + "streams" + ], + "title": "KafkaAppValues", + "type": "object" + }, + "KafkaConnector": { + "additionalProperties": true, + "description": "Base class for all Kafka connectors.\nShould only be used to set defaults", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConnectorConfig" + } + ], + "description": "Application-specific settings" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "resetter_values": { + "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", + "title": "Resetter Values", + "type": "object" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "1.0.4", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app" + ], + "title": "KafkaConnector", + "type": "object" + }, + "KafkaConnectorConfig": { + "additionalProperties": true, + "additional_properties": { + "type": "string" + }, + "description": "Settings specific to Kafka Connectors.", + "properties": { + "connector.class": { + "title": "Connector.Class", + "type": "string" + } + }, + "required": [ + "connector.class" + ], + "title": "KafkaConnectorConfig", + "type": "object" + }, + "KafkaSinkConnector": { + "additionalProperties": true, + "description": "Kafka sink connector model.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConnectorConfig" + } + ], + "description": "Application-specific settings" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "resetter_values": { + "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", + "title": "Resetter Values", + "type": "object" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "kafka-sink-connector", + "title": "Type" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "1.0.4", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app", + "type" + ], + "title": "KafkaSinkConnector", + "type": "object" + }, + "KafkaSourceConnector": { + "additionalProperties": true, + "description": "Kafka source connector model.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConnectorConfig" + } + ], + "description": "Application-specific settings" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "offset_topic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running", + "title": "Offset Topic" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-kafka-connect-resetter", + "url": "https://bakdata.github.io/kafka-connect-resetter/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "resetter_values": { + "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", + "title": "Resetter Values", + "type": "object" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "kafka-source-connector", + "title": "Type" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "1.0.4", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app", + "type" + ], + "title": "KafkaSourceConnector", + "type": "object" + }, + "KafkaStreamsConfig": { + "additionalProperties": true, + "description": "Kafka Streams config.", + "properties": { + "brokers": { + "description": "Brokers", + "title": "Brokers", + "type": "string" + }, + "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "URL of the schema registry", + "title": "Schemaregistryurl" + } + }, + "required": [ + "brokers" + ], + "title": "KafkaStreamsConfig", + "type": "object" + }, + "KubernetesApp": { + "additionalProperties": true, + "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/KubernetesAppValues" + } + ], + "description": "Application-specific settings" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + } + }, + "required": [ + "name", + "namespace", + "app" + ], + "title": "KubernetesApp", + "type": "object" + }, + "KubernetesAppValues": { + "additionalProperties": true, + "description": "Settings specific to Kubernetes apps.", + "properties": {}, + "title": "KubernetesAppValues", + "type": "object" + }, + "OutputTopicTypes": { + "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)", + "enum": [ + "output", + "error" + ], + "title": "OutputTopicTypes", + "type": "string" + }, + "PipelineComponent": { + "additionalProperties": true, + "description": "Base class for all components.", + "properties": { + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + } + }, + "required": [ + "name" + ], + "title": "PipelineComponent", + "type": "object" + }, + "ProducerApp": { + "additionalProperties": true, + "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/ProducerAppValues" + } + ], + "description": "Application-specific settings" + }, + "from": { + "default": null, + "description": "Producer doesn't support FromSection", + "title": "From", + "type": "null" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "producer-app", + "title": "Type" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app", + "type" + ], + "title": "ProducerApp", + "type": "object" + }, + "ProducerAppValues": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Override name with this value", + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/ProducerStreamsConfig" + } + ], + "description": "Kafka Streams settings" + } + }, + "required": [ + "streams" + ], + "title": "ProducerAppValues", + "type": "object" + }, + "ProducerStreamsConfig": { + "additionalProperties": true, + "description": "Kafka Streams settings specific to Producer.", + "properties": { + "brokers": { + "description": "Brokers", + "title": "Brokers", + "type": "string" + }, + "extraOutputTopics": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra output topics", + "title": "Extraoutputtopics", + "type": "object" + }, + "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Output topic", + "title": "Outputtopic" + }, + "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "URL of the schema registry", + "title": "Schemaregistryurl" + } + }, + "required": [ + "brokers" + ], + "title": "ProducerStreamsConfig", + "type": "object" + }, + "RepoAuthFlags": { + "description": "Authorisation-related flags for `helm repo`.", + "properties": { + "ca_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers", + "title": "Ca File" + }, + "cert_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Path to SSL certificate file to identify HTTPS client", + "title": "Cert File" + }, + "insecure_skip_tls_verify": { + "default": false, + "description": "If true, Kubernetes API server's certificate will not be checked for validity", + "title": "Insecure Skip Tls Verify", + "type": "boolean" + }, + "password": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Password", + "title": "Password" + }, + "username": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Username", + "title": "Username" + } + }, + "title": "RepoAuthFlags", + "type": "object" + }, + "StreamsApp": { + "additionalProperties": true, + "description": "StreamsApp component that configures a streams bootstrap app.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/StreamsAppValues" + } + ], + "description": "Application-specific settings" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "type": { + "const": "streams-app", + "title": "Type" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app", + "type" + ], + "title": "StreamsApp", + "type": "object" + }, + "StreamsAppAutoScaling": { + "additionalProperties": true, + "description": "Kubernetes Event-driven Autoscaling config.", + "properties": { + "consumerGroup": { + "description": "Name of the consumer group used for checking the offset on the topic and processing the related lag.", + "title": "Consumer group", + "type": "string" + }, + "cooldownPeriod": { + "default": 300, + "description": "The period to wait after the last trigger reported active before scaling the resource back to 0. https://keda.sh/docs/2.9/concepts/scaling-deployments/#cooldownperiod", + "title": "Cooldown period", + "type": "integer" + }, + "enabled": { + "default": false, + "description": "", + "title": "Enabled", + "type": "boolean" + }, + "idleReplicas": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, + "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", + "title": "Idle replica count" + }, + "lagThreshold": { + "description": "Average target value to trigger scaling actions.", + "title": "Lag threshold", + "type": "integer" + }, + "maxReplicas": { + "default": 1, + "description": "This setting is passed to the HPA definition that KEDA will create for a given resource and holds the maximum number of replicas of the target resouce. https://keda.sh/docs/2.9/concepts/scaling-deployments/#maxreplicacount", + "title": "Max replica count", + "type": "integer" + }, + "minReplicas": { + "default": 0, + "description": "Minimum number of replicas KEDA will scale the resource down to. \"https://keda.sh/docs/2.9/concepts/scaling-deployments/#minreplicacount\"", + "title": "Min replica count", + "type": "integer" + }, + "offsetResetPolicy": { + "default": "earliest", + "description": "The offset reset policy for the consumer if the consumer group is not yet subscribed to a partition.", + "title": "Offset reset policy", + "type": "string" + }, + "pollingInterval": { + "default": 30, + "description": "This is the interval to check each trigger on. https://keda.sh/docs/2.9/concepts/scaling-deployments/#pollinginterval", + "title": "Polling interval", + "type": "integer" + }, + "topics": { + "default": [], + "description": "List of auto-generated Kafka Streams topics used by the streams app.", + "items": { + "type": "string" + }, + "title": "Topics", + "type": "array" + } + }, + "required": [ + "consumerGroup", + "lagThreshold" + ], + "title": "StreamsAppAutoScaling", + "type": "object" + }, + "StreamsAppValues": { + "additionalProperties": true, + "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "properties": { + "autoscaling": { + "anyOf": [ + { + "$ref": "#/$defs/StreamsAppAutoScaling" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Kubernetes Event-driven Autoscaling config" + }, + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Override name with this value", + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/StreamsConfig" + } + ], + "description": "Streams Bootstrap streams section" + } + }, + "required": [ + "streams" + ], + "title": "StreamsAppValues", + "type": "object" + }, + "StreamsConfig": { + "additionalProperties": true, + "description": "Streams Bootstrap streams section.", + "properties": { + "brokers": { + "description": "Brokers", + "title": "Brokers", + "type": "string" + }, + "config": { + "default": {}, + "description": "Configuration", + "title": "Config", + "type": "object" + }, + "errorTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Error topic", + "title": "Errortopic" + }, + "extraInputPatterns": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra input patterns", + "title": "Extrainputpatterns", + "type": "object" + }, + "extraInputTopics": { + "additionalProperties": { + "items": { + "type": "string" + }, + "type": "array" + }, + "default": {}, + "description": "Extra input topics", + "title": "Extrainputtopics", + "type": "object" + }, + "extraOutputTopics": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra output topics", + "title": "Extraoutputtopics", + "type": "object" + }, + "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Input pattern", + "title": "Inputpattern" + }, + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, + "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Output topic", + "title": "Outputtopic" + }, + "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "URL of the schema registry", + "title": "Schemaregistryurl" + } + }, + "required": [ + "brokers" + ], + "title": "StreamsConfig", + "type": "object" + }, + "ToSection": { + "additionalProperties": false, + "description": "Holds multiple output topics.", + "properties": { + "models": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Data models", + "title": "Models", + "type": "object" + }, + "topics": { + "additionalProperties": { + "$ref": "#/$defs/TopicConfig" + }, + "default": {}, + "description": "Output topics", + "title": "Topics", + "type": "object" + } + }, + "title": "ToSection", + "type": "object" + }, + "TopicConfig": { + "additionalProperties": false, + "description": "Configure an output topic.", + "properties": { + "configs": { + "additionalProperties": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "default": {}, + "description": "Topic configs", + "title": "Configs", + "type": "object" + }, + "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Key schema class name", + "title": "Key schema" + }, + "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Number of partitions into which the topic is divided", + "title": "Partitions count" + }, + "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Replication factor of the topic", + "title": "Replication factor" + }, + "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", + "title": "Role" + }, + "type": { + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic type", + "title": "Topic type" + }, + "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Value schema class name", + "title": "Value schema" + } + }, + "title": "TopicConfig", + "type": "object" + } + }, + "properties": { + "helm-app": { + "$ref": "#/$defs/HelmApp" + }, + "kafka-app": { + "$ref": "#/$defs/KafkaApp" + }, + "kafka-connector": { + "$ref": "#/$defs/KafkaConnector" + }, + "kafka-sink-connector": { + "$ref": "#/$defs/KafkaSinkConnector" + }, + "kafka-source-connector": { + "$ref": "#/$defs/KafkaSourceConnector" + }, + "kubernetes-app": { + "$ref": "#/$defs/KubernetesApp" + }, + "pipeline-component": { + "$ref": "#/$defs/PipelineComponent" + }, + "producer-app": { + "$ref": "#/$defs/ProducerApp" + }, + "streams-app": { + "$ref": "#/$defs/StreamsApp" + } + }, + "required": [ + "helm-app", + "kafka-app", + "kafka-connector", + "kafka-sink-connector", + "kafka-source-connector", + "kubernetes-app", + "pipeline-component", + "producer-app", + "streams-app" + ], + "title": "DefaultsSchema", + "type": "object" +} diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 09fe4f40e..0a7617224 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -186,12 +186,12 @@ The schemas can be used to enable support for KPOps files in a text editor. **Usage**: ```console -$ kpops schema [OPTIONS] SCOPE:{pipeline|config} +$ kpops schema [OPTIONS] SCOPE:{pipeline|defaults|config} ``` **Arguments**: -* `SCOPE:{pipeline|config}`: +* `SCOPE:{pipeline|defaults|config}`: Scope of the generated schema diff --git a/docs/docs/user/references/editor-integration.md b/docs/docs/user/references/editor-integration.md index 86b7e93d0..a5c91890b 100644 --- a/docs/docs/user/references/editor-integration.md +++ b/docs/docs/user/references/editor-integration.md @@ -1,10 +1,11 @@ # Editor integration -KPOps provides JSON schemas that enable autocompletion and validation for some of the files that the user must work with. +KPOps provides JSON schemas that enable autocompletion and validation for all YAML files that the user must work with. ## Supported files - [`pipeline.yaml`](../../resources/pipeline-components/pipeline.md) +- [`defaults.yaml`](../core-concepts/defaults.md) - [`config.yaml`](../core-concepts/config.md) ## Usage @@ -23,6 +24,6 @@ KPOps provides JSON schemas that enable autocompletion and validation for some o ``` !!! tip "Advanced usage" - It is possible to generate schemas with the [`kpops schema`](./cli-commands.md#kpops-schema) command. Useful when using custom components or when using a pre-release version of KPOps. + It is possible to generate schemas with the [`kpops schema`](./cli-commands.md#kpops-schema) command. Useful for including custom components or when using a pre-release version of KPOps. diff --git a/hooks/gen_schema.py b/hooks/gen_schema.py index 726da823f..3e3cdb0e8 100644 --- a/hooks/gen_schema.py +++ b/hooks/gen_schema.py @@ -4,7 +4,12 @@ from pathlib import Path from hooks import ROOT -from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema +from kpops.utils.gen_schema import ( + SchemaScope, + gen_config_schema, + gen_defaults_schema, + gen_pipeline_schema, +) PATH_TO_SCHEMA = ROOT / "docs/docs/schema" @@ -20,6 +25,8 @@ def gen_schema(scope: SchemaScope): match scope: case SchemaScope.PIPELINE: gen_pipeline_schema() + case SchemaScope.DEFAULTS: + gen_defaults_schema() case SchemaScope.CONFIG: gen_config_schema() Path(PATH_TO_SCHEMA / f"{scope.value}.json").write_text(f.getvalue()) @@ -27,4 +34,5 @@ def gen_schema(scope: SchemaScope): if __name__ == "__main__": gen_schema(SchemaScope.PIPELINE) + gen_schema(SchemaScope.DEFAULTS) gen_schema(SchemaScope.CONFIG) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 8603e6ac9..4c342cdac 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -22,7 +22,12 @@ from kpops.components.base_components.models.resource import Resource from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline import Pipeline, PipelineGenerator -from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema +from kpops.utils.gen_schema import ( + SchemaScope, + gen_config_schema, + gen_defaults_schema, + gen_pipeline_schema, +) from kpops.utils.pydantic import YamlConfigSettingsSource from kpops.utils.yaml import print_yaml @@ -249,6 +254,11 @@ def schema( gen_pipeline_schema( kpops_config.components_module, include_stock_components ) + case SchemaScope.DEFAULTS: + kpops_config = create_kpops_config(config) + gen_defaults_schema( + kpops_config.components_module, include_stock_components + ) case SchemaScope.CONFIG: gen_config_schema() diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 9c3448d90..034787ed9 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -4,9 +4,14 @@ from abc import ABC from collections.abc import Sequence from enum import Enum -from typing import Annotated, Literal, Union +from typing import Annotated, Any, Literal, Union -from pydantic import Field, RootModel +from pydantic import ( + BaseModel, + Field, + RootModel, + create_model, +) from pydantic.fields import FieldInfo from pydantic.json_schema import GenerateJsonSchema, model_json_schema from pydantic_core.core_schema import ( @@ -17,12 +22,15 @@ ) from kpops.cli.registry import _find_classes -from kpops.components import PipelineComponent +from kpops.components import ( + PipelineComponent, +) from kpops.config import KpopsConfig class SchemaScope(str, Enum): PIPELINE = "pipeline" + DEFAULTS = "defaults" CONFIG = "config" @@ -33,8 +41,15 @@ class MultiComponentGenerateJsonSchema(GenerateJsonSchema): log = logging.getLogger("") +def print_schema(model: type[BaseModel]) -> None: + schema = model_json_schema(model, by_alias=True) + print(json.dumps(schema, indent=4, sort_keys=True)) + + def _is_valid_component( - defined_component_types: set[str], component: type[PipelineComponent] + defined_component_types: set[str], + component: type[PipelineComponent], + allow_abstract: bool, ) -> bool: """Check whether a PipelineComponent subclass has a valid definition for the schema generation. @@ -42,7 +57,9 @@ def _is_valid_component( :param component: component type to be validated :return: Whether component is valid for schema generation """ - if inspect.isabstract(component) or ABC in component.__bases__: + if not allow_abstract and ( + inspect.isabstract(component) or ABC in component.__bases__ + ): log.warning(f"SKIPPED {component.__name__}, component is abstract.") return False if component.type in defined_component_types: @@ -54,6 +71,7 @@ def _is_valid_component( def _add_components( components_module: str, + allow_abstract: bool, components: tuple[type[PipelineComponent], ...] | None = None, ) -> tuple[type[PipelineComponent], ...]: """Add components to a components tuple. @@ -67,18 +85,39 @@ def _add_components( :return: Extended tuple """ if components is None: - components = tuple() # noqa: C408 + components = () # Set of existing types, against which to check the new ones defined_component_types = {component.type for component in components} custom_components = ( component for component in _find_classes(components_module, PipelineComponent) - if _is_valid_component(defined_component_types, component) + if _is_valid_component(defined_component_types, component, allow_abstract) ) components += tuple(custom_components) return components +def find_components( + components_module: str | None, + include_stock_components: bool, + include_abstract: bool = False, +) -> tuple[type[PipelineComponent], ...]: + if not (include_stock_components or components_module): + msg = "No components are provided, no schema is generated." + raise RuntimeError(msg) + # Add stock components if enabled + components: tuple[type[PipelineComponent], ...] = () + if include_stock_components: + components = _add_components("kpops.components", include_abstract) + # Add custom components if provided + if components_module: + components = _add_components(components_module, include_abstract, components) + if not components: + msg = "No valid components found." + raise RuntimeError(msg) + return components + + def gen_pipeline_schema( components_module: str | None = None, include_stock_components: bool = True ) -> None: @@ -89,19 +128,7 @@ def gen_pipeline_schema( :param include_stock_components: Whether to include the stock components, defaults to True """ - if not (include_stock_components or components_module): - log.warning("No components are provided, no schema is generated.") - return - # Add stock components if enabled - components: tuple[type[PipelineComponent], ...] = () - if include_stock_components: - components = _add_components("kpops.components") - # Add custom components if provided - if components_module: - components = _add_components(components_module, components) - if not components: - msg = "No valid components found." - raise RuntimeError(msg) + components = find_components(components_module, include_stock_components) # re-assign component type as Literal to work as discriminator for component in components: @@ -129,11 +156,20 @@ class PipelineSchema(RootModel): AnnotatedPipelineComponents # pyright:ignore[reportGeneralTypeIssues] ] - schema = PipelineSchema.model_json_schema(by_alias=True) - print(json.dumps(schema, indent=4, sort_keys=True)) + print_schema(PipelineSchema) + + +def gen_defaults_schema( + components_module: str | None = None, include_stock_components: bool = True +) -> None: + components = find_components(components_module, include_stock_components, True) + components_mapping: dict[str, Any] = { + component.type: (component, ...) for component in components + } + DefaultsSchema = create_model("DefaultsSchema", **components_mapping) + print_schema(DefaultsSchema) def gen_config_schema() -> None: """Generate JSON schema from the model.""" - schema = model_json_schema(KpopsConfig) - print(json.dumps(schema, indent=4, sort_keys=True)) + print_schema(KpopsConfig) diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index bdd987ac6..ddc4977dd 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -1,6 +1,6 @@ from __future__ import annotations -import logging +import json from abc import ABC, abstractmethod from pathlib import Path from typing import TYPE_CHECKING @@ -10,6 +10,7 @@ from typer.testing import CliRunner from kpops.cli.main import app +from kpops.cli.registry import Registry from kpops.components.base_components import PipelineComponent from kpops.utils.docstring import describe_attr @@ -84,26 +85,27 @@ class SubPipelineComponentCorrectDocstr(SubPipelineComponent): "ignore:handlers", "ignore:config", "ignore:enrich", "ignore:validate" ) class TestGenSchema: - def test_gen_pipeline_schema_no_modules(self, caplog: pytest.LogCaptureFixture): - result = runner.invoke( - app, - [ - "schema", - "pipeline", - "--no-include-stock-components", - "--config", - str(RESOURCE_PATH / "no_module"), - ], - catch_exceptions=False, - ) - assert caplog.record_tuples == [ - ( - "root", - logging.WARNING, - "No components are provided, no schema is generated.", + @pytest.fixture + def stock_components(self) -> list[type[PipelineComponent]]: + registry = Registry() + registry.find_components("kpops.components") + return list(registry._classes.values()) + + def test_gen_pipeline_schema_no_modules(self): + with pytest.raises( + RuntimeError, match="^No components are provided, no schema is generated.$" + ): + runner.invoke( + app, + [ + "schema", + "pipeline", + "--no-include-stock-components", + "--config", + str(RESOURCE_PATH / "no_module"), + ], + catch_exceptions=False, ) - ] - assert result.exit_code == 0, result.stdout def test_gen_pipeline_schema_no_components(self): with pytest.raises(RuntimeError, match="^No valid components found.$"): @@ -145,7 +147,9 @@ def test_gen_pipeline_schema_only_stock_module(self): assert result.exit_code == 0, result.stdout assert result.stdout - def test_gen_pipeline_schema_only_custom_module(self, snapshot: SnapshotTest): + def test_gen_pipeline_schema_only_custom_module( + self, snapshot: SnapshotTest, stock_components: list[type[PipelineComponent]] + ): result = runner.invoke( app, [ @@ -161,6 +165,11 @@ def test_gen_pipeline_schema_only_custom_module(self, snapshot: SnapshotTest): assert result.exit_code == 0, result.stdout snapshot.assert_match(result.stdout, "test-schema-generation") + schema = json.loads(result.stdout) + assert schema["title"] == "PipelineSchema" + assert set(schema["items"]["discriminator"]["mapping"].keys()).isdisjoint( + {component.type for component in stock_components} + ) def test_gen_pipeline_schema_stock_and_custom_module(self): result = runner.invoke( @@ -175,6 +184,24 @@ def test_gen_pipeline_schema_stock_and_custom_module(self): assert result.exit_code == 0, result.stdout assert result.stdout + def test_gen_defaults_schema(self, stock_components: list[type[PipelineComponent]]): + result = runner.invoke( + app, + [ + "schema", + "defaults", + "--config", + str(RESOURCE_PATH / "no_module"), + ], + catch_exceptions=False, + ) + + assert result.exit_code == 0, result.stdout + assert result.stdout + schema = json.loads(result.stdout) + assert schema["title"] == "DefaultsSchema" + assert schema["required"] == [component.type for component in stock_components] + def test_gen_config_schema(self): result = runner.invoke( app, From 3a86e04b6d868814f0dfa0ca36b372d055a58308 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 11 Jan 2024 15:01:24 +0200 Subject: [PATCH 25/34] Update docs for substitution variable usage in v3 (#409) closes #403 --- docs/docs/resources/pipeline-components/kafka-app.yaml | 2 +- docs/docs/resources/pipeline-components/pipeline.yaml | 6 +++--- docs/docs/resources/pipeline-components/producer-app.yaml | 2 +- .../pipeline-components/sections/app-kafka-app.yaml | 2 +- .../pipeline-components/sections/app-producer-app.yaml | 2 +- .../pipeline-components/sections/app-streams-app.yaml | 2 +- docs/docs/resources/pipeline-components/streams-app.yaml | 2 +- .../resources/pipeline-defaults/defaults-kafka-app.yaml | 2 +- .../resources/pipeline-defaults/defaults-producer-app.yaml | 2 +- .../resources/pipeline-defaults/defaults-streams-app.yaml | 2 +- docs/docs/resources/pipeline-defaults/defaults.yaml | 6 +++--- 11 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index cdc49ef28..5d5ad0bf6 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -50,7 +50,7 @@ app: # required streams: # required brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app # Helm repository configuration (optional) diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 1c6350fbc..4312a2f31 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -113,7 +113,7 @@ app: # required streams: # required brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app # Helm repository configuration (optional) @@ -328,7 +328,7 @@ app: # required streams: # required, producer-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 @@ -399,7 +399,7 @@ # Streams Bootstrap streams section streams: # required, streams-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 5be3551d8..49f0e4e43 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -33,7 +33,7 @@ app: # required streams: # required, producer-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml index 73b70c59e..6ea97790d 100644 --- a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml @@ -3,6 +3,6 @@ app: # required streams: # required brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml index 0cbe04ded..7eef930a9 100644 --- a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml @@ -3,7 +3,7 @@ app: # required streams: # required, producer-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml index 1c5f0849f..d57bb22b4 100644 --- a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml @@ -5,7 +5,7 @@ # Streams Bootstrap streams section streams: # required, streams-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index f77edf80c..3f42b6096 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -52,7 +52,7 @@ # Streams Bootstrap streams section streams: # required, streams-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index bd6c9e2d9..5d621c7fc 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -8,7 +8,7 @@ kafka-app: app: # required streams: # required brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml index bfa5521c4..5dbbb6ba6 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml @@ -11,7 +11,7 @@ producer-app: app: # required streams: # required, producer-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml index ae1adab98..15026fe1e 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml @@ -10,7 +10,7 @@ streams-app: # Streams Bootstrap streams section streams: # required, streams-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index 58b22d3f3..cfa5e275f 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -29,7 +29,7 @@ kafka-app: app: # required streams: # required brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app version: "2.12.0" # Helm chart version @@ -181,7 +181,7 @@ producer-app: app: # required streams: # required, producer-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 @@ -199,7 +199,7 @@ streams-app: # Streams Bootstrap streams section streams: # required, streams-app-specific brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + schemaRegistryUrl: ${schema_registry.url} inputTopics: - topic1 - topic2 From 8f5af65f3a44376f97a2a52d4af1ddab80f13f28 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 11 Jan 2024 15:27:03 +0200 Subject: [PATCH 26/34] Namespace substitution vars (#408) closes #404 --- .../pipeline-components/helm-app.yaml | 16 +-- .../pipeline-components/kafka-app.yaml | 20 ++-- .../pipeline-components/kafka-connector.yaml | 16 +-- .../kafka-sink-connector.yaml | 16 +-- .../kafka-source-connector.yaml | 8 +- .../pipeline-components/kubernetes-app.yaml | 16 +-- .../pipeline-components/pipeline.yaml | 108 +++++++++--------- .../pipeline-components/producer-app.yaml | 12 +- .../sections/app-kafka-app.yaml | 4 +- .../sections/app-producer-app.yaml | 4 +- .../sections/app-streams-app.yaml | 4 +- .../pipeline-components/sections/from_.yaml | 8 +- .../pipeline-components/sections/prefix.yaml | 2 +- .../pipeline-components/sections/to.yaml | 6 +- .../pipeline-components/streams-app.yaml | 20 ++-- .../resources/pipeline-config/config.yaml | 4 +- .../pipeline-defaults/defaults-kafka-app.yaml | 4 +- .../defaults-kafka-connector.yaml | 16 +-- .../defaults-kubernetes-app.yaml | 16 +-- .../defaults-producer-app.yaml | 4 +- .../defaults-streams-app.yaml | 4 +- .../resources/pipeline-defaults/defaults.yaml | 44 +++---- .../resources/variables/config_env_vars.env | 4 +- .../resources/variables/config_env_vars.md | 4 +- docs/docs/schema/config.json | 8 +- docs/docs/schema/defaults.json | 18 +-- docs/docs/schema/pipeline.json | 10 +- .../core-concepts/variables/substitution.md | 2 +- docs/docs/user/migration-guide/v2-v3.md | 19 +++ .../bakdata/atm-fraud-detection/config.yaml | 4 +- .../bakdata/atm-fraud-detection/defaults.yaml | 6 +- .../bakdata/atm-fraud-detection/pipeline.yaml | 2 +- .../base_components/pipeline_component.py | 4 +- kpops/config.py | 4 +- kpops/pipeline.py | 11 +- .../snapshots/snap_test_schema_generation.py | 10 +- tests/cli/test_kpops_config.py | 4 +- tests/compiler/test_pipeline_name.py | 36 +++--- tests/components/test_helm_app.py | 14 +-- tests/components/test_kafka_app.py | 4 +- tests/components/test_kafka_connector.py | 4 +- tests/components/test_kubernetes_app.py | 2 +- tests/components/test_producer_app.py | 2 +- tests/components/test_streams_app.py | 2 +- tests/pipeline/resources/defaults.yaml | 4 +- .../no-topics-defaults/defaults.yaml | 6 +- .../defaults.yaml | 4 +- .../pipeline-with-env-defaults/defaults.yaml | 4 +- .../pipeline-with-short-topics/defaults.yaml | 2 +- .../read-from-component/pipeline.yaml | 2 +- .../temp-trim-release-name/defaults.yaml | 2 +- 51 files changed, 285 insertions(+), 265 deletions(-) diff --git a/docs/docs/resources/pipeline-components/helm-app.yaml b/docs/docs/resources/pipeline-components/helm-app.yaml index 8f0a59c86..1bd2ce3c0 100644 --- a/docs/docs/resources/pipeline-components/helm-app.yaml +++ b/docs/docs/resources/pipeline-components/helm-app.yaml @@ -3,16 +3,16 @@ name: helm-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index 5d5ad0bf6..83a67b4cf 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -4,16 +4,16 @@ name: kafka-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -29,11 +29,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -49,8 +49,8 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app # Helm repository configuration (optional) diff --git a/docs/docs/resources/pipeline-components/kafka-connector.yaml b/docs/docs/resources/pipeline-components/kafka-connector.yaml index d44aa7bce..ca6cfc6eb 100644 --- a/docs/docs/resources/pipeline-components/kafka-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-connector.yaml @@ -2,16 +2,16 @@ name: kafka-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -27,11 +27,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml index 017511e5b..06d14ffe1 100644 --- a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml @@ -3,16 +3,16 @@ name: kafka-sink-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml index d4cbcb24c..e38497b65 100644 --- a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml @@ -3,17 +3,17 @@ name: kafka-source-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # The source connector has no `from` section # from: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kubernetes-app.yaml b/docs/docs/resources/pipeline-components/kubernetes-app.yaml index 5170768c2..66ed21bb2 100644 --- a/docs/docs/resources/pipeline-components/kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-components/kubernetes-app.yaml @@ -3,16 +3,16 @@ name: kubernetes-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 4312a2f31..cdbd18d96 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -3,16 +3,16 @@ name: helm-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -67,16 +67,16 @@ name: kafka-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -92,11 +92,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -112,8 +112,8 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app # Helm repository configuration (optional) @@ -132,16 +132,16 @@ name: kafka-sink-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -157,11 +157,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -197,17 +197,17 @@ name: kafka-source-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # The source connector has no `from` section # from: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -246,16 +246,16 @@ name: kubernetes-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -271,11 +271,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -301,17 +301,17 @@ name: producer-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # from: # While the producer-app does inherit from kafka-app, it does not need a # `from` section, hence it does not support it. # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -327,8 +327,8 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 @@ -351,16 +351,16 @@ name: streams-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -376,11 +376,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -398,8 +398,8 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 49f0e4e43..784873617 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -6,17 +6,17 @@ name: producer-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # from: # While the producer-app does inherit from kafka-app, it does not need a # `from` section, hence it does not support it. # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -32,8 +32,8 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml index 6ea97790d..5ae8be6d6 100644 --- a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml @@ -2,7 +2,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml index 7eef930a9..0fe6680cd 100644 --- a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml @@ -2,8 +2,8 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml index d57bb22b4..e3577aa5f 100644 --- a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml @@ -4,8 +4,8 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-components/sections/from_.yaml b/docs/docs/resources/pipeline-components/sections/from_.yaml index 3f7f0dd22..777d10d0e 100644 --- a/docs/docs/resources/pipeline-components/sections/from_.yaml +++ b/docs/docs/resources/pipeline-components/sections/from_.yaml @@ -1,12 +1,12 @@ from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component diff --git a/docs/docs/resources/pipeline-components/sections/prefix.yaml b/docs/docs/resources/pipeline-components/sections/prefix.yaml index 91fbda223..b4d03f519 100644 --- a/docs/docs/resources/pipeline-components/sections/prefix.yaml +++ b/docs/docs/resources/pipeline-components/sections/prefix.yaml @@ -1,3 +1,3 @@ # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- diff --git a/docs/docs/resources/pipeline-components/sections/to.yaml b/docs/docs/resources/pipeline-components/sections/to.yaml index dd81be9ef..7ebaf60df 100644 --- a/docs/docs/resources/pipeline-components/sections/to.yaml +++ b/docs/docs/resources/pipeline-components/sections/to.yaml @@ -1,11 +1,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index 3f42b6096..1e79eaf0b 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -4,16 +4,16 @@ name: streams-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -29,11 +29,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -51,8 +51,8 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-config/config.yaml b/docs/docs/resources/pipeline-config/config.yaml index 3b08c0708..275382d46 100644 --- a/docs/docs/resources/pipeline-config/config.yaml +++ b/docs/docs/resources/pipeline-config/config.yaml @@ -16,9 +16,9 @@ defaults_filename_prefix: defaults # Configures topic names. topic_name_config: # Configures the value for the variable ${output_topic_name} - default_output_topic_name: ${pipeline_name}-${component_name} + default_output_topic_name: ${pipeline.name}-${component_name} # Configures the value for the variable ${error_topic_name} - default_error_topic_name: ${pipeline_name}-${component_name}-error + default_error_topic_name: ${pipeline.name}-${component_name}-error # Address of the Schema Registry schema_registry_url: "http://localhost:8081" # Address of the Kafka REST Proxy. diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index 5d621c7fc..7320042af 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -7,8 +7,8 @@ kafka-app: # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml index 8aa5e8ac2..489bf8bb1 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml @@ -5,16 +5,16 @@ kafka-connector: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -30,11 +30,11 @@ kafka-connector: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml index 5dd85e9ce..0780de384 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml @@ -5,16 +5,16 @@ kubernetes-app: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -30,11 +30,11 @@ kubernetes-app: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml index 5dbbb6ba6..a5b4a0f6f 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml @@ -10,8 +10,8 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml index 15026fe1e..4db627950 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml @@ -9,8 +9,8 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index cfa5e275f..3c1550af3 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -28,8 +28,8 @@ kafka-app: # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app version: "2.12.0" # Helm chart version @@ -40,16 +40,16 @@ kafka-app: kafka-connector: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -65,11 +65,11 @@ kafka-connector: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -121,16 +121,16 @@ kafka-source-connector: kubernetes-app: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -146,11 +146,11 @@ kubernetes-app: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -180,8 +180,8 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 @@ -198,8 +198,8 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index cc1f68943..42d2dead8 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -25,10 +25,10 @@ KPOPS_KAFKA_BROKERS # No default value, required KPOPS_DEFAULTS_FILENAME_PREFIX=defaults # topic_name_config.default_output_topic_name # Configures the value for the variable ${output_topic_name} -KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component.name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline.name}-${component.name} # topic_name_config.default_error_topic_name # Configures the value for the variable ${error_topic_name} -KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component.name}-error +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline.name}-${component.name}-error # schema_registry.enabled # Whether the Schema Registry handler should be initialized. KPOPS_SCHEMA_REGISTRY__ENABLED=False diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index fd635278a..ef0a7726f 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -7,8 +7,8 @@ These variables are a lower priority alternative to the settings in `config.yaml |KPOPS_PIPELINE_BASE_DIR |. |False |Base directory to the pipelines (default is current working directory) |pipeline_base_dir | |KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | |KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component.name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline.name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline.name}-${component.name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | |KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | |KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | |KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index c4ed0b1d4..98056fca0 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -119,13 +119,13 @@ "description": "Configure the topic name variables you can use in the pipeline definition.", "properties": { "default_error_topic_name": { - "default": "${pipeline_name}-${component.name}-error", + "default": "${pipeline.name}-${component.name}-error", "description": "Configures the value for the variable ${error_topic_name}", "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { - "default": "${pipeline_name}-${component.name}", + "default": "${pipeline.name}-${component.name}", "description": "Configures the value for the variable ${output_topic_name}", "title": "Default Output Topic Name", "type": "string" @@ -266,8 +266,8 @@ } ], "default": { - "default_error_topic_name": "${pipeline_name}-${component.name}-error", - "default_output_topic_name": "${pipeline_name}-${component.name}" + "default_error_topic_name": "${pipeline.name}-${component.name}-error", + "default_output_topic_name": "${pipeline.name}-${component.name}" }, "description": "Configure the topic name variables you can use in the pipeline definition." } diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index 137b547e9..d81314997 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -95,7 +95,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -253,7 +253,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -379,7 +379,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -496,7 +496,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -613,7 +613,7 @@ "title": "Offset Topic" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -746,7 +746,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -811,7 +811,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -864,7 +864,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -1113,7 +1113,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index da2f31fcb..f6bd2eeff 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -95,7 +95,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -271,7 +271,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -388,7 +388,7 @@ "title": "Offset Topic" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -494,7 +494,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -743,7 +743,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" diff --git a/docs/docs/user/core-concepts/variables/substitution.md b/docs/docs/user/core-concepts/variables/substitution.md index 71782180d..b1bfa97e3 100644 --- a/docs/docs/user/core-concepts/variables/substitution.md +++ b/docs/docs/user/core-concepts/variables/substitution.md @@ -41,7 +41,7 @@ Environment variables such as `$PATH` can be used in the pipeline definition and These are special variables that refer to the name and path of a pipeline. -- `${pipeline_name}`: Concatenated path of the parent directory where pipeline.yaml is defined in. +- `${pipeline.name}`: Concatenated path of the parent directory where pipeline.yaml is defined in. For instance, `./data/pipelines/v1/pipeline.yaml`, here the value for the variable would be `data-pipelines-v1`. - `${pipeline_name_}`: Similar to the previous variable, each `` contains a part of the path to the `pipeline.yaml` file. diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 9d94af628..c4b42c3fa 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -188,3 +188,22 @@ If you're using this functionality in your custom components, it needs to be upd """Render final component resources, e.g. Kubernetes manifests.""" return [] # list of manifests ``` + +## [Namespace substitution vars](https://github.com/bakdata/kpops/pull/408) + +The global configuration variables are now namespaced under the config key, such as `${config.kafka_brokers}`, `${config.schema_registry.url}`. Same with pipeline variables, e.g. `${pipeline_name} → ${pipeline.name}`. +This would make it more uniform with the existing `${component.}` variables. + +### pipeline.yaml + +```diff + name: kafka-app +- prefix: ${pipeline_name}- ++ prefix: ${pipeline.name}- + app: + streams: +- brokers: ${kafka_brokers} +- schemaRegistryUrl: ${schema_registry.url} ++ brokers: ${config.kafka_brokers} ++ schemaRegistryUrl: ${config.schema_registry.url} +``` diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index c3195147b..c20493eb7 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -1,6 +1,6 @@ topic_name_config: - default_error_topic_name: "${pipeline_name}-${component.name}-dead-letter-topic" - default_output_topic_name: "${pipeline_name}-${component.name}-topic" + default_error_topic_name: "${pipeline.name}-${component.name}-dead-letter-topic" + default_output_topic_name: "${pipeline.name}-${component.name}-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml index 2e9079f4f..a5a060378 100644 --- a/examples/bakdata/atm-fraud-detection/defaults.yaml +++ b/examples/bakdata/atm-fraud-detection/defaults.yaml @@ -10,8 +10,8 @@ kafka-connector: kafka-app: app: streams: - brokers: ${kafka_brokers} - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} optimizeLeaveGroupBehavior: false producer-app: @@ -23,7 +23,7 @@ producer-app: streams-app: app: labels: - pipeline: ${pipeline_name} + pipeline: ${pipeline.name} streams: optimizeLeaveGroupBehavior: false to: diff --git a/examples/bakdata/atm-fraud-detection/pipeline.yaml b/examples/bakdata/atm-fraud-detection/pipeline.yaml index 9982aa0a7..d166a21f4 100644 --- a/examples/bakdata/atm-fraud-detection/pipeline.yaml +++ b/examples/bakdata/atm-fraud-detection/pipeline.yaml @@ -83,7 +83,7 @@ app: connector.class: io.confluent.connect.jdbc.JdbcSinkConnector tasks.max: 1 - topics: ${pipeline_name}-account-linker-topic + topics: ${pipeline.name}-account-linker-topic connection.url: jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db connection.user: app1 connection.password: AppPassword diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index b53e03d43..e37e9dcc5 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -27,7 +27,7 @@ class PipelineComponent(BaseDefaultsComponent, ABC): :param name: Component name :param prefix: Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string., - defaults to "${pipeline_name}-" + defaults to "${pipeline.name}-" :param from_: Topic(s) and/or components from which the component will read input, defaults to None :param to: Topic(s) into which the component will write output, @@ -36,7 +36,7 @@ class PipelineComponent(BaseDefaultsComponent, ABC): name: str = Field(default=..., description=describe_attr("name", __doc__)) prefix: str = Field( - default="${pipeline_name}-", + default="${pipeline.name}-", description=describe_attr("prefix", __doc__), ) from_: FromSection | None = Field( diff --git a/kpops/config.py b/kpops/config.py index f71444a43..9bb57e104 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -21,11 +21,11 @@ class TopicNameConfig(BaseSettings): """Configure the topic name variables you can use in the pipeline definition.""" default_output_topic_name: str = Field( - default="${pipeline_name}-${component.name}", + default="${pipeline.name}-${component.name}", description="Configures the value for the variable ${output_topic_name}", ) default_error_topic_name: str = Field( - default="${pipeline_name}-${component.name}-error", + default="${pipeline.name}-${component.name}-error", description="Configures the value for the variable ${error_topic_name}", ) diff --git a/kpops/pipeline.py b/kpops/pipeline.py index 45a39c232..26629686a 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -266,6 +266,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: ) substitution = generate_substitution( config.model_dump(mode="json"), + "config", existing_substitution=component_substitution, separator=".", ) @@ -295,9 +296,9 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: For example, for a given path ./data/v1/dev/pipeline.yaml the pipeline_name would be set to data-v1-dev. Then the sub environment variables are set: - pipeline_name_0 = data - pipeline_name_1 = v1 - pipeline_name_2 = dev + pipeline.name_0 = data + pipeline.name_1 = v1 + pipeline.name_2 = dev :param base_dir: Base directory to the pipeline files :param path: Path to pipeline.yaml file @@ -307,9 +308,9 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: msg = "The pipeline-base-dir should not equal the pipeline-path" raise ValueError(msg) pipeline_name = "-".join(path_without_file) - ENV["pipeline_name"] = pipeline_name + ENV["pipeline.name"] = pipeline_name for level, parent in enumerate(path_without_file): - ENV[f"pipeline_name_{level}"] = parent + ENV[f"pipeline.name_{level}"] = parent @staticmethod def set_environment_name(environment: str | None) -> None: diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 4875c610b..f23e77422 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -32,7 +32,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -161,7 +161,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -213,7 +213,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -270,7 +270,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -323,7 +323,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index e52b2345a..5c9655ca3 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -18,11 +18,11 @@ def test_kpops_config_with_default_values(): assert default_config.defaults_filename_prefix == "defaults" assert ( default_config.topic_name_config.default_output_topic_name - == "${pipeline_name}-${component.name}" + == "${pipeline.name}-${component.name}" ) assert ( default_config.topic_name_config.default_error_topic_name - == "${pipeline_name}-${component.name}-error" + == "${pipeline.name}-${component.name}-error" ) assert default_config.schema_registry.enabled is False assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081") diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index cca9fe88c..99a228cfe 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -13,12 +13,12 @@ def test_should_set_pipeline_name_with_default_base_dir(): PipelineGenerator.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH) - assert ENV["pipeline_name"] == "some-random-path-for-testing" - assert ENV["pipeline_name_0"] == "some" - assert ENV["pipeline_name_1"] == "random" - assert ENV["pipeline_name_2"] == "path" - assert ENV["pipeline_name_3"] == "for" - assert ENV["pipeline_name_4"] == "testing" + assert ENV["pipeline.name"] == "some-random-path-for-testing" + assert ENV["pipeline.name_0"] == "some" + assert ENV["pipeline.name_1"] == "random" + assert ENV["pipeline.name_2"] == "path" + assert ENV["pipeline.name_3"] == "for" + assert ENV["pipeline.name_4"] == "testing" def test_should_set_pipeline_name_with_specific_relative_base_dir(): @@ -26,9 +26,9 @@ def test_should_set_pipeline_name_with_specific_relative_base_dir(): Path("./some/random/path"), PIPELINE_PATH ) - assert ENV["pipeline_name"] == "for-testing" - assert ENV["pipeline_name_0"] == "for" - assert ENV["pipeline_name_1"] == "testing" + assert ENV["pipeline.name"] == "for-testing" + assert ENV["pipeline.name_0"] == "for" + assert ENV["pipeline.name_1"] == "testing" def test_should_set_pipeline_name_with_specific_absolute_base_dir(): @@ -36,20 +36,20 @@ def test_should_set_pipeline_name_with_specific_absolute_base_dir(): Path("some/random/path"), PIPELINE_PATH ) - assert ENV["pipeline_name"] == "for-testing" - assert ENV["pipeline_name_0"] == "for" - assert ENV["pipeline_name_1"] == "testing" + assert ENV["pipeline.name"] == "for-testing" + assert ENV["pipeline.name_0"] == "for" + assert ENV["pipeline.name_1"] == "testing" def test_should_set_pipeline_name_with_absolute_base_dir(): PipelineGenerator.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH) - assert ENV["pipeline_name"] == "some-random-path-for-testing" - assert ENV["pipeline_name_0"] == "some" - assert ENV["pipeline_name_1"] == "random" - assert ENV["pipeline_name_2"] == "path" - assert ENV["pipeline_name_3"] == "for" - assert ENV["pipeline_name_4"] == "testing" + assert ENV["pipeline.name"] == "some-random-path-for-testing" + assert ENV["pipeline.name_0"] == "some" + assert ENV["pipeline.name_1"] == "random" + assert ENV["pipeline.name_2"] == "path" + assert ENV["pipeline.name_3"] == "for" + assert ENV["pipeline.name_4"] == "testing" def test_should_not_set_pipeline_name_with_the_same_base_dir(): diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index e43c9de41..f01f30d10 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -88,12 +88,12 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( helm_app.deploy(False) helm_mock.upgrade_install.assert_called_once_with( - "${pipeline_name}-test-helm-app", + "${pipeline.name}-test-helm-app", "test/test-chart", False, "test-namespace", { - "nameOverride": "${pipeline_name}-test-helm-app", + "nameOverride": "${pipeline.name}-test-helm-app", "foo": "test-value", }, HelmUpgradeInstallFlags(), @@ -136,12 +136,12 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( RepoAuthFlags(), ), mocker.call.upgrade_install( - "${pipeline_name}-test-helm-app", + "${pipeline.name}-test-helm-app", "test/test-chart", False, "test-namespace", { - "nameOverride": "${pipeline_name}-test-helm-app", + "nameOverride": "${pipeline.name}-test-helm-app", "foo": "test-value", }, HelmUpgradeInstallFlags(version="3.4.5"), @@ -176,12 +176,12 @@ def helm_chart(self) -> str: helm_mock.add_repo.assert_not_called() helm_mock.upgrade_install.assert_called_once_with( - "${pipeline_name}-test-app-with-local-chart", + "${pipeline.name}-test-app-with-local-chart", "path/to/helm/charts/", False, "test-namespace", { - "nameOverride": "${pipeline_name}-test-app-with-local-chart", + "nameOverride": "${pipeline.name}-test-app-with-local-chart", "foo": "test-value", }, HelmUpgradeInstallFlags(), @@ -212,7 +212,7 @@ def test_should_call_helm_uninstall_when_destroying_helm_app( helm_app.destroy(True) helm_mock.uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-test-helm-app", True + "test-namespace", "${pipeline.name}-test-helm-app", True ) log_info_mock.assert_called_once_with(magentaify(stdout)) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index d7e8fd5d4..21c9072f8 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -93,12 +93,12 @@ def test_should_deploy_kafka_app( print_helm_diff.assert_called_once() helm_upgrade_install.assert_called_once_with( - create_helm_release_name("${pipeline_name}-example-name"), + create_helm_release_name("${pipeline.name}-example-name"), "test/test-chart", True, "test-namespace", { - "nameOverride": "${pipeline_name}-example-name", + "nameOverride": "${pipeline.name}-example-name", "streams": {"brokers": "fake-broker:9092", "outputTopic": "test"}, }, HelmUpgradeInstallFlags(version="1.2.3"), diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 6c0e0dcc3..d352a6d8a 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -13,9 +13,9 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop" -CONNECTOR_FULL_NAME = "${pipeline_name}-" + CONNECTOR_NAME +CONNECTOR_FULL_NAME = "${pipeline.name}-" + CONNECTOR_NAME CONNECTOR_CLEAN_FULL_NAME = CONNECTOR_FULL_NAME + "-clean" -CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline_name}-test-connector-with-lon-449ec-clean" +CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline.name}-test-connector-with-lon-612f3-clean" CONNECTOR_CLASS = "com.bakdata.connect.TestConnector" diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index ebc2701fa..c949f9832 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -15,7 +15,7 @@ ) from kpops.config import KpopsConfig -HELM_RELEASE_NAME = create_helm_release_name("${pipeline_name}-test-kubernetes-app") +HELM_RELEASE_NAME = create_helm_release_name("${pipeline.name}-test-kubernetes-app") DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 07e78bb6a..2038c6909 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -18,7 +18,7 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" -PRODUCER_APP_FULL_NAME = "${pipeline_name}-" + PRODUCER_APP_NAME +PRODUCER_APP_FULL_NAME = "${pipeline.name}-" + PRODUCER_APP_NAME PRODUCER_APP_RELEASE_NAME = create_helm_release_name(PRODUCER_APP_FULL_NAME) PRODUCER_APP_CLEAN_FULL_NAME = PRODUCER_APP_FULL_NAME + "-clean" PRODUCER_APP_CLEAN_RELEASE_NAME = create_helm_release_name( diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index ebd5cf7d9..2de276643 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -22,7 +22,7 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" -STREAMS_APP_FULL_NAME = "${pipeline_name}-" + STREAMS_APP_NAME +STREAMS_APP_FULL_NAME = "${pipeline.name}-" + STREAMS_APP_NAME STREAMS_APP_RELEASE_NAME = create_helm_release_name(STREAMS_APP_FULL_NAME) STREAMS_APP_CLEAN_FULL_NAME = STREAMS_APP_FULL_NAME + "-clean" STREAMS_APP_CLEAN_RELEASE_NAME = create_helm_release_name( diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index b78293627..101e3e175 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -5,8 +5,8 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" - schema_registry_url: "${schema_registry.url}" + brokers: "${config.kafka_brokers}" + schema_registry_url: "${config.schema_registry.url}" version: "2.4.2" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml index ea3dd7d9e..7820898a3 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml @@ -1,8 +1,8 @@ kafka-app: app: streams: - brokers: "${kafka_brokers}" - schemaRegistryUrl: "${schema_registry.url}" + brokers: "${config.kafka_brokers}" + schemaRegistryUrl: "${config.schema_registry.url}" producer-app: to: @@ -14,7 +14,7 @@ producer-app: streams-app: app: labels: - pipeline: ${pipeline_name} + pipeline: ${pipeline.name} to: topics: ${error_topic_name}: diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml index b5954da19..ff053e990 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml @@ -7,5 +7,5 @@ kubernetes-app: kafka-app: app: streams: - brokers: ${kafka_brokers} - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index f9505c0ab..b8aeb6137 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -4,8 +4,8 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" - schemaRegistryUrl: "${schema_registry.url}" + brokers: "${config.kafka_brokers}" + schemaRegistryUrl: "${config.schema_registry.url}" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml index 3b9e93eb7..cf3b4831b 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml @@ -5,7 +5,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" + brokers: "${config.kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/resources/read-from-component/pipeline.yaml b/tests/pipeline/resources/read-from-component/pipeline.yaml index 902e8edd9..cc6bf72c7 100644 --- a/tests/pipeline/resources/read-from-component/pipeline.yaml +++ b/tests/pipeline/resources/read-from-component/pipeline.yaml @@ -44,7 +44,7 @@ name: consumer3 from: topics: - ${pipeline_name}-producer1: + ${pipeline.name}-producer1: type: input components: producer2: diff --git a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml index 55754eba1..c895105b7 100644 --- a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml +++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml @@ -4,7 +4,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" + brokers: "${config.kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" From 6bd6e7c90665b7b61ca6801b593594943aa50879 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 11 Jan 2024 18:14:10 +0200 Subject: [PATCH 27/34] Support multiple inheritance for doc generation (#406) Preceeds #398 --------- Co-authored-by: Salomon Popp --- .../dependencies/kpops_structure.yaml | 63 ++++++++++++++++--- hooks/gen_docs/gen_docs_components.py | 48 +++++++++----- hooks/gen_docs/gen_docs_env_vars.py | 23 +------ .../base_components/pipeline_component.py | 23 +++++++ kpops/utils/pydantic.py | 21 +++++++ 5 files changed, 131 insertions(+), 47 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 70dc43870..784d9ccc4 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -79,12 +79,57 @@ kpops_components_fields: - repo_config - version kpops_components_inheritance_ref: - helm-app: kubernetes-app - kafka-app: helm-app - kafka-connector: pipeline-component - kafka-sink-connector: kafka-connector - kafka-source-connector: kafka-connector - kubernetes-app: pipeline-component - pipeline-component: base-defaults-component - producer-app: kafka-app - streams-app: kafka-app + helm-app: + bases: + - kubernetes-app + parents: + - kubernetes-app + - pipeline-component + kafka-app: + bases: + - helm-app + parents: + - helm-app + - kubernetes-app + - pipeline-component + kafka-connector: + bases: + - pipeline-component + parents: + - pipeline-component + kafka-sink-connector: + bases: + - kafka-connector + parents: + - kafka-connector + - pipeline-component + kafka-source-connector: + bases: + - kafka-connector + parents: + - kafka-connector + - pipeline-component + kubernetes-app: + bases: + - pipeline-component + parents: + - pipeline-component + pipeline-component: + bases: [] + parents: [] + producer-app: + bases: + - kafka-app + parents: + - kafka-app + - helm-app + - kubernetes-app + - pipeline-component + streams-app: + bases: + - kafka-app + parents: + - kafka-app + - helm-app + - kubernetes-app + - pipeline-component diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index f1acf9973..58edfcf34 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -11,6 +11,7 @@ from kpops.cli.registry import _find_classes from kpops.components import KafkaConnector, PipelineComponent from kpops.utils.colorify import redify, yellowify +from kpops.utils.pydantic import issubclass_patched from kpops.utils.yaml import load_yaml_file PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py" @@ -33,14 +34,6 @@ ) KPOPS_COMPONENTS = tuple(_find_classes("kpops.components", PipelineComponent)) -KPOPS_COMPONENTS_INHERITANCE_REF = { - component.type: cast( - type[PipelineComponent], - component.__base__, - ).type - for component in KPOPS_COMPONENTS -} - KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name @@ -49,6 +42,27 @@ ] for component in KPOPS_COMPONENTS } +KPOPS_COMPONENTS_INHERITANCE_REF = { + component.type: { + "bases": [ + cast( + type[PipelineComponent], + base, + ).type + for base in component.__bases__ + if issubclass_patched(base, PipelineComponent) + ], + "parents": [ + cast( + type[PipelineComponent], + parent, + ).type + for parent in component.parents + ], + } + for component in KPOPS_COMPONENTS +} + # Dependency files should not be changed manually DANGEROUS_FILES_TO_CHANGE = { PATH_DOCS_COMPONENTS_DEPENDENCIES, @@ -92,14 +106,13 @@ def filter_sections( if section := filter_section(component_name, sections, target_section): component_sections.append(section) elif include_inherited: - temp_component_name = component_name - while ( - temp_component_name := KPOPS_COMPONENTS_INHERITANCE_REF[ - temp_component_name - ] - ) != PipelineComponent.type: + for component in KPOPS_COMPONENTS_INHERITANCE_REF[component_name][ + "parents" + ]: + if component == PipelineComponent.type: + break if section := filter_section( - temp_component_name, + component, sections, target_section, ): @@ -123,11 +136,12 @@ def filter_section( section = target_section + "-" + component_name + ".yaml" if section in sections: return section - if KPOPS_COMPONENTS_INHERITANCE_REF[component_name] == PipelineComponent.type: + if KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["bases"] == [ + PipelineComponent.type + ]: section = target_section + ".yaml" if section in sections: return section - return None return None diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 8f5fe5646..aea4b6af2 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -25,6 +25,7 @@ from hooks.gen_docs import IterableStrEnum from kpops.cli import main from kpops.config import KpopsConfig +from kpops.utils.pydantic import issubclass_patched PATH_DOCS_RESOURCES = ROOT / "docs/docs/resources" PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables" @@ -284,29 +285,9 @@ def collect_fields(model: type[BaseModel]) -> dict[str, Any]: :param model: settings class :return: ``dict`` of all fields in a settings class """ - - def patched_issubclass_of_basemodel(cls): - """Pydantic breaks issubclass. - - ``issubclass(set[str], set) # True`` - ``issubclass(BaseSettings, BaseModel) # True`` - ``issubclass(set[str], BaseModel) # raises exception`` - - :param cls: class to check - :return: Whether cls is subclass of ``BaseModel`` - """ - try: - return issubclass(cls, BaseModel) - except TypeError as e: - if str(e) == "issubclass() arg 1 must be a class": - return False - raise - seen_fields = {} for field_name, field_value in model.model_fields.items(): - if field_value.annotation and patched_issubclass_of_basemodel( - field_value.annotation - ): + if field_value.annotation and issubclass_patched(field_value.annotation): seen_fields[field_name] = collect_fields(field_value.annotation) else: seen_fields[field_name] = field_value diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index e37e9dcc5..4b09b35de 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -18,7 +18,14 @@ TopicConfig, ToSection, ) +from kpops.utils import cached_classproperty from kpops.utils.docstring import describe_attr +from kpops.utils.pydantic import issubclass_patched + +try: + from typing import Self +except ImportError: + from typing_extensions import Self class PipelineComponent(BaseDefaultsComponent, ABC): @@ -64,6 +71,22 @@ def __init__(self, **kwargs) -> None: def full_name(self) -> str: return self.prefix + self.name + @cached_classproperty + def parents(cls: type[Self]) -> tuple[type[PipelineComponent], ...]: # pyright: ignore[reportGeneralTypeIssues] + """Get parent components. + + :return: All ancestor KPOps components + """ + + def gen_parents(): + for base in cls.mro(): + # skip class itself and non-component ancestors + if base is cls or not issubclass_patched(base, PipelineComponent): + continue + yield base + + return tuple(gen_parents()) + def add_input_topics(self, topics: list[str]) -> None: """Add given topics to the list of input topics. diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 3b643af51..10c4b9415 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -95,6 +95,27 @@ def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, } +def issubclass_patched( + __cls: type, __class_or_tuple: type | tuple[type, ...] = BaseModel +) -> bool: + """Pydantic breaks ``issubclass``. + + ``issubclass(set[str], set) # True`` + ``issubclass(BaseSettings, BaseModel) # True`` + ``issubclass(set[str], BaseModel) # raises exception`` + + :param cls: class to check + :base: class(es) to check against, defaults to ``BaseModel`` + :return: Whether 'cls' is derived from another class or is the same class. + """ + try: + return issubclass(__cls, __class_or_tuple) + except TypeError as e: + if str(e) == "issubclass() arg 1 must be a class": + return False + raise + + class CamelCaseConfigModel(BaseModel): model_config = ConfigDict( alias_generator=to_camel, From bd9e1351d861d72c1964247d5d0ac6530baa9e16 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 15 Jan 2024 09:18:40 +0100 Subject: [PATCH 28/34] Refactor streams-bootstrap cleanup jobs as individual HelmApp (#398) Depends on #397 extracted the cleanup job needed for streams-bootstrap producer and streams apps to its own HelmApp nested inside the parent component - [x] Create docs for `StreamsBootstrap` base - [x] Update KPOps component inheritance graph - [x] Update docs gen for multiple component inheritance (#406) --- .../architecture/components-hierarchy.md | 6 +- ...aults_pipeline_component_dependencies.yaml | 4 +- .../dependencies/kpops_structure.yaml | 27 ++- .../pipeline_component_dependencies.yaml | 3 - .../pipeline-components/kafka-app.yaml | 12 -- .../pipeline-components/pipeline.yaml | 12 -- .../pipeline-defaults/defaults-kafka-app.yaml | 43 ++++- .../resources/pipeline-defaults/defaults.yaml | 43 ++++- docs/docs/schema/defaults.json | 161 +++++++++++++----- docs/docs/schema/pipeline.json | 23 ++- .../core-concepts/components/producer-app.md | 2 +- .../core-concepts/components/streams-app.md | 2 +- .../components/streams-bootstrap.md | 25 +++ docs/docs/user/migration-guide/v2-v3.md | 16 ++ docs/mkdocs.yml | 1 + kpops/components/__init__.py | 5 +- kpops/components/base_components/kafka_app.py | 134 +++++---------- .../components/streams_bootstrap/__init__.py | 36 +++- .../producer/producer_app.py | 46 +++-- .../streams_bootstrap/streams/model.py | 10 +- .../streams_bootstrap/streams/streams_app.py | 54 +++--- tests/cli/test_registry.py | 3 +- tests/components/test_producer_app.py | 140 ++++++++------- tests/components/test_streams_app.py | 138 ++++++++------- ...kafka_app.py => test_streams_bootstrap.py} | 35 ++-- tests/pipeline/test_components/components.py | 9 +- .../components.py | 9 +- 27 files changed, 627 insertions(+), 372 deletions(-) create mode 100644 docs/docs/user/core-concepts/components/streams-bootstrap.md rename tests/components/{test_kafka_app.py => test_streams_bootstrap.py} (75%) diff --git a/docs/docs/resources/architecture/components-hierarchy.md b/docs/docs/resources/architecture/components-hierarchy.md index 190c44f82..ce24acc46 100644 --- a/docs/docs/resources/architecture/components-hierarchy.md +++ b/docs/docs/resources/architecture/components-hierarchy.md @@ -1,10 +1,13 @@ ```mermaid flowchart BT KubernetesApp --> PipelineComponent + KafkaApp --> PipelineComponent HelmApp --> KubernetesApp - KafkaApp --> HelmApp + StreamsBootstrap --> HelmApp StreamsApp --> KafkaApp + StreamsApp --> StreamsBootstrap ProducerApp --> KafkaApp + ProducerApp --> StreamsBootstrap KafkaConnector --> PipelineComponent KafkaSourceConnector --> KafkaConnector KafkaSinkConnector --> KafkaConnector @@ -12,6 +15,7 @@ flowchart BT click KubernetesApp "/kpops/user/core-concepts/components/kubernetes-app" click HelmApp "/kpops/user/core-concepts/components/helm-app" click KafkaApp "/kpops/user/core-concepts/components/kafka-app" + click StreamsBootstrap "/kpops/user/core-concepts/components/streams-bootstrap" click StreamsApp "/kpops/user/core-concepts/components/streams-app" click ProducerApp "/kpops/user/core-concepts/components/producer-app" click KafkaConnector "/kpops/user/core-concepts/components/kafka-connector" diff --git a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml index 4e12885af..959596df0 100644 --- a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml @@ -2,8 +2,10 @@ helm-app.yaml: - app-helm-app.yaml - repo_config-helm-app.yaml kafka-app.yaml: +- prefix.yaml +- from_.yaml +- to.yaml - app-kafka-app.yaml -- version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml - from_.yaml diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 784d9ccc4..52192cb22 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -13,10 +13,7 @@ kpops_components_fields: - prefix - from_ - to - - namespace - app - - repo_config - - version kafka-connector: - name - prefix @@ -78,6 +75,15 @@ kpops_components_fields: - app - repo_config - version + streams-bootstrap: + - name + - prefix + - from_ + - to + - namespace + - app + - repo_config + - version kpops_components_inheritance_ref: helm-app: bases: @@ -87,10 +93,8 @@ kpops_components_inheritance_ref: - pipeline-component kafka-app: bases: - - helm-app + - pipeline-component parents: - - helm-app - - kubernetes-app - pipeline-component kafka-connector: bases: @@ -120,16 +124,27 @@ kpops_components_inheritance_ref: producer-app: bases: - kafka-app + - streams-bootstrap parents: - kafka-app + - streams-bootstrap - helm-app - kubernetes-app - pipeline-component streams-app: bases: - kafka-app + - streams-bootstrap parents: - kafka-app + - streams-bootstrap + - helm-app + - kubernetes-app + - pipeline-component + streams-bootstrap: + bases: + - helm-app + parents: - helm-app - kubernetes-app - pipeline-component diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index 8504a0135..b633db907 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -10,10 +10,7 @@ kafka-app.yaml: - prefix.yaml - from_.yaml - to.yaml -- namespace.yaml - app-kafka-app.yaml -- repo_config-helm-app.yaml -- version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml - from_.yaml diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index 83a67b4cf..60fbbfb13 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -44,7 +44,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -53,14 +52,3 @@ schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index cdbd18d96..12183b8e6 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -107,7 +107,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -116,17 +115,6 @@ schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version # Kafka sink connector - type: kafka-sink-connector name: kafka-sink-connector # required diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index 7320042af..a27bb38d1 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -3,6 +3,48 @@ # Parent of: ProducerApp, StreamsApp # Child of: KubernetesApp kafka-app: + # Pipeline prefix that will prefix every component name. If you wish to not + # have any prefix you can specify an empty string. + prefix: ${pipeline.name}- + from: # Must not be null + topics: # read from topic + ${pipeline.name}-input-topic: + type: input # Implied when role is NOT specified + ${pipeline.name}-extra-topic: + role: topic-role # Implies `type` to be extra + ${pipeline.name}-input-pattern-topic: + type: pattern # Implied to be an input pattern if `role` is undefined + ${pipeline.name}-extra-pattern-topic: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + components: # read from specific component + account-producer: + type: output # Implied when role is NOT specified + other-producer: + role: some-role # Implies `type` to be extra + component-as-input-pattern: + type: pattern # Implied to be an input pattern if `role` is undefined + component-as-extra-pattern: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + # Topic(s) into which the component will write output + to: + topics: + ${pipeline.name}-output-topic: + type: output # Implied when role is NOT specified + ${pipeline.name}-extra-topic: + role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined + ${pipeline.name}-error-topic: + type: error + # Currently KPOps supports Avro and JSON schemas. + key_schema: key-schema # must implement SchemaProvider to use + value_schema: value-schema + partitions_count: 1 + replication_factor: 1 + configs: # https://kafka.apache.org/documentation/#topicconfigs + cleanup.policy: compact + models: # SchemaProvider is initiated with the values given here + model: model # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -11,4 +53,3 @@ kafka-app: schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index 3c1550af3..05487c7c0 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -24,6 +24,48 @@ helm-app: # Parent of: ProducerApp, StreamsApp # Child of: KubernetesApp kafka-app: + # Pipeline prefix that will prefix every component name. If you wish to not + # have any prefix you can specify an empty string. + prefix: ${pipeline.name}- + from: # Must not be null + topics: # read from topic + ${pipeline.name}-input-topic: + type: input # Implied when role is NOT specified + ${pipeline.name}-extra-topic: + role: topic-role # Implies `type` to be extra + ${pipeline.name}-input-pattern-topic: + type: pattern # Implied to be an input pattern if `role` is undefined + ${pipeline.name}-extra-pattern-topic: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + components: # read from specific component + account-producer: + type: output # Implied when role is NOT specified + other-producer: + role: some-role # Implies `type` to be extra + component-as-input-pattern: + type: pattern # Implied to be an input pattern if `role` is undefined + component-as-extra-pattern: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + # Topic(s) into which the component will write output + to: + topics: + ${pipeline.name}-output-topic: + type: output # Implied when role is NOT specified + ${pipeline.name}-extra-topic: + role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined + ${pipeline.name}-error-topic: + type: error + # Currently KPOps supports Avro and JSON schemas. + key_schema: key-schema # must implement SchemaProvider to use + value_schema: value-schema + partitions_count: 1 + replication_factor: 1 + configs: # https://kafka.apache.org/documentation/#topicconfigs + cleanup.policy: compact + models: # SchemaProvider is initiated with the values given here + model: model # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -32,7 +74,6 @@ kafka-app: schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - version: "2.12.0" # Helm chart version # Kafka connector # # Parent of: KafkaSinkConnector, KafkaSourceConnector diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index d81314997..06ec5fdc0 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -247,36 +247,12 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "prefix": { "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ - { - "$ref": "#/$defs/HelmRepoConfig" - } - ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" - }, "to": { "anyOf": [ { @@ -288,24 +264,10 @@ ], "default": null, "description": "Topic(s) into which the component will write output" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app" ], "title": "KafkaApp", @@ -837,7 +799,7 @@ }, "ProducerApp": { "additionalProperties": true, - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -1079,7 +1041,7 @@ }, "StreamsApp": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams bootstrap app.", + "description": "StreamsApp component that configures a streams-bootstrap app.", "properties": { "app": { "allOf": [ @@ -1258,7 +1220,7 @@ }, "StreamsAppValues": { "additionalProperties": true, - "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { "anyOf": [ @@ -1270,7 +1232,7 @@ } ], "default": null, - "description": "Kubernetes Event-driven Autoscaling config" + "description": "Kubernetes event-driven autoscaling config" }, "nameOverride": { "anyOf": [ @@ -1291,7 +1253,7 @@ "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section" + "description": "streams-bootstrap streams section" } }, "required": [ @@ -1300,6 +1262,100 @@ "title": "StreamsAppValues", "type": "object" }, + "StreamsBootstrap": { + "additionalProperties": true, + "description": "Base for components with a streams-bootstrap Helm chart.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Helm app values" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline.name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app" + ], + "title": "StreamsBootstrap", + "type": "object" + }, "StreamsConfig": { "additionalProperties": true, "description": "Streams Bootstrap streams section.", @@ -1315,6 +1371,19 @@ "title": "Config", "type": "object" }, + "deleteOutput": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", + "title": "Deleteoutput" + }, "errorTopic": { "anyOf": [ { @@ -1569,6 +1638,9 @@ }, "streams-app": { "$ref": "#/$defs/StreamsApp" + }, + "streams-bootstrap": { + "$ref": "#/$defs/StreamsBootstrap" } }, "required": [ @@ -1580,7 +1652,8 @@ "kubernetes-app", "pipeline-component", "producer-app", - "streams-app" + "streams-app", + "streams-bootstrap" ], "title": "DefaultsSchema", "type": "object" diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index f6bd2eeff..557dbc486 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -467,7 +467,7 @@ }, "ProducerApp": { "additionalProperties": true, - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -709,7 +709,7 @@ }, "StreamsApp": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams bootstrap app.", + "description": "StreamsApp component that configures a streams-bootstrap app.", "properties": { "app": { "allOf": [ @@ -888,7 +888,7 @@ }, "StreamsAppValues": { "additionalProperties": true, - "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { "anyOf": [ @@ -900,7 +900,7 @@ } ], "default": null, - "description": "Kubernetes Event-driven Autoscaling config" + "description": "Kubernetes event-driven autoscaling config" }, "nameOverride": { "anyOf": [ @@ -921,7 +921,7 @@ "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section" + "description": "streams-bootstrap streams section" } }, "required": [ @@ -945,6 +945,19 @@ "title": "Config", "type": "object" }, + "deleteOutput": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", + "title": "Deleteoutput" + }, "errorTopic": { "anyOf": [ { diff --git a/docs/docs/user/core-concepts/components/producer-app.md b/docs/docs/user/core-concepts/components/producer-app.md index 1f55fa6d9..bff598d53 100644 --- a/docs/docs/user/core-concepts/components/producer-app.md +++ b/docs/docs/user/core-concepts/components/producer-app.md @@ -1,6 +1,6 @@ # ProducerApp -Subclass of [_KafkaApp_](kafka-app.md). +Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md). ### Usage diff --git a/docs/docs/user/core-concepts/components/streams-app.md b/docs/docs/user/core-concepts/components/streams-app.md index ac881ade2..d34705062 100644 --- a/docs/docs/user/core-concepts/components/streams-app.md +++ b/docs/docs/user/core-concepts/components/streams-app.md @@ -1,6 +1,6 @@ # StreamsApp -Subclass of [_KafkaApp_](kafka-app.md). +Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md). ### Usage diff --git a/docs/docs/user/core-concepts/components/streams-bootstrap.md b/docs/docs/user/core-concepts/components/streams-bootstrap.md new file mode 100644 index 000000000..52bb5fa0e --- /dev/null +++ b/docs/docs/user/core-concepts/components/streams-bootstrap.md @@ -0,0 +1,25 @@ +# StreamsApp + +Subclass of [_HelmApp_](helm-app.md). + +### Usage + +Configures a Helm app with [streams-bootstrap Helm charts](https://github.com/bakdata/streams-bootstrap){target=_blank}. + +### Operations + +#### deploy + +Deploy using Helm. + +#### destroy + +Uninstall Helm release. + +#### reset + +Do nothing. + +#### clean + +Do nothing. diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index c4b42c3fa..2c1eef100 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -40,6 +40,22 @@ All Helm-specific parts of the built-in [`KubernetesApp`](../core-concepts/compo ... ``` +## [Create StreamsBootstrap component & refactor cleanup jobs as individual HelmApp](https://github.com/bakdata/kpops/pull/398) + +Previously the default `KafkaApp` component configured the [streams-bootstrap](https://bakdata.github.io/streams-bootstrap/) Helm Charts. Now, this component is no longer tied to Helm (or Kubernetes). Instead, there is a new `StreamsBootstrap` component that configures the Helm Chart repository for the components that use it, e.g. `StreamsApp` and `ProducerApp`. If you are using non-default values for the Helm Chart repository or version, it has to be updated as shown below. + +#### defaults.yaml + +```diff + kafka-app: + app: + streams: ... + ++ streams-bootstrap: + repo_config: ... + version: ... +``` + ## [Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config](https://github.com/bakdata/kpops/pull/354) The breaking changes target the `config.yaml` file: diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index c6ef09c16..d436c94a5 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -95,6 +95,7 @@ nav: - Overview: user/core-concepts/components/overview.md - KubernetesApp: user/core-concepts/components/kubernetes-app.md - HelmApp: user/core-concepts/components/helm-app.md + - StreamsBootstrap: user/core-concepts/components/streams-bootstrap.md - KafkaApp: user/core-concepts/components/kafka-app.md - StreamsApp: user/core-concepts/components/streams-app.md - ProducerApp: user/core-concepts/components/producer-app.md diff --git a/kpops/components/__init__.py b/kpops/components/__init__.py index 98e1d3530..dc5fcee9c 100644 --- a/kpops/components/__init__.py +++ b/kpops/components/__init__.py @@ -7,7 +7,9 @@ PipelineComponent, ) from kpops.components.base_components.kafka_connector import KafkaConnector -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp +from kpops.components.streams_bootstrap import StreamsBootstrap +from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp +from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp __all__ = ( "HelmApp", @@ -16,6 +18,7 @@ "KafkaSinkConnector", "KafkaSourceConnector", "KubernetesApp", + "StreamsBootstrap", "ProducerApp", "StreamsApp", "PipelineComponent", diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c7c983e0d..7ee67b09c 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -7,10 +7,12 @@ from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( - HelmRepoConfig, - HelmUpgradeInstallFlags, + HelmFlags, ) -from kpops.components.base_components.helm_app import HelmApp, HelmAppValues +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name +from kpops.components.base_components.helm_app import HelmAppValues +from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -45,38 +47,59 @@ class KafkaAppValues(HelmAppValues): ) -class KafkaApp(HelmApp, ABC): +class KafkaAppCleaner(StreamsBootstrap): + """Helm app for resetting and cleaning a streams-bootstrap app.""" + + @property + @override + def helm_chart(self) -> str: + raise NotImplementedError + + @property + @override + def helm_release_name(self) -> str: + suffix = "-clean" + return create_helm_release_name(self.full_name + suffix, suffix) + + @property + @override + def helm_flags(self) -> HelmFlags: + return HelmFlags( + create_namespace=self.config.create_namespace, + version=self.version, + wait=True, + wait_for_jobs=True, + ) + + @override + def clean(self, dry_run: bool) -> None: + """Clean an app using a cleanup job. + + :param dry_run: Dry run command + """ + log.info(f"Uninstall old cleanup job for {self.helm_release_name}") + self.destroy(dry_run) + + log.info(f"Init cleanup job for {self.helm_release_name}") + self.deploy(dry_run) + + if not self.config.retain_clean_jobs: + log.info(f"Uninstall cleanup job for {self.helm_release_name}") + self.destroy(dry_run) + + +class KafkaApp(PipelineComponent, ABC): """Base component for Kafka-based components. Producer or streaming apps should inherit from this class. :param app: Application-specific settings - :param repo_config: Configuration of the Helm chart repo to be used for - deploying the component, - defaults to HelmRepoConfig(repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/") - :param version: Helm chart version, defaults to "2.9.0" """ app: KafkaAppValues = Field( default=..., description=describe_attr("app", __doc__), ) - repo_config: HelmRepoConfig = Field( - default=HelmRepoConfig( - repository_name="bakdata-streams-bootstrap", - url="https://bakdata.github.io/streams-bootstrap/", - ), - description=describe_attr("repo_config", __doc__), - ) - version: str | None = Field( - default="2.9.0", - description=describe_attr("version", __doc__), - ) - - @property - def clean_up_helm_chart(self) -> str: - """Helm chart used to destroy and clean this component.""" - raise NotImplementedError @override def deploy(self, dry_run: bool) -> None: @@ -90,66 +113,3 @@ def deploy(self, dry_run: bool) -> None: to_section=self.to, dry_run=dry_run ) super().deploy(dry_run) - - def _run_clean_up_job( - self, - values: dict, - dry_run: bool, - retain_clean_jobs: bool = False, - ) -> None: - """Clean an app using the respective cleanup job. - - :param values: The value YAML for the chart - :param dry_run: Dry run command - :param retain_clean_jobs: Whether to retain the cleanup job, defaults to False - """ - log.info(f"Uninstall old cleanup job for {self.clean_release_name}") - - self.__uninstall_clean_up_job(self.clean_release_name, dry_run) - - log.info(f"Init cleanup job for {self.clean_release_name}") - - stdout = self.__install_clean_up_job(self.clean_release_name, values, dry_run) - - if dry_run: - self.dry_run_handler.print_helm_diff(stdout, self.clean_release_name, log) - - if not retain_clean_jobs: - log.info(f"Uninstall cleanup job for {self.clean_release_name}") - self.__uninstall_clean_up_job(self.clean_release_name, dry_run) - - def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None: - """Uninstall clean up job. - - :param release_name: Name of the Helm release - :param dry_run: Whether to do a dry run of the command - """ - self.helm.uninstall(self.namespace, release_name, dry_run) - - def __install_clean_up_job( - self, - release_name: str, - values: dict, - dry_run: bool, - ) -> str: - """Install clean up job. - - :param release_name: Name of the Helm release - :param suffix: Suffix to add to the release name, e.g. "-clean" - :param values: The Helm values for the chart - :param dry_run: Whether to do a dry run of the command - :return: Return the output of the installation - """ - return self.helm.upgrade_install( - release_name, - self.clean_up_helm_chart, - dry_run, - self.namespace, - values, - HelmUpgradeInstallFlags( - create_namespace=self.config.create_namespace, - version=self.version, - wait=True, - wait_for_jobs=True, - ), - ) diff --git a/kpops/components/streams_bootstrap/__init__.py b/kpops/components/streams_bootstrap/__init__.py index 097d85b13..1b02b091b 100644 --- a/kpops/components/streams_bootstrap/__init__.py +++ b/kpops/components/streams_bootstrap/__init__.py @@ -1,7 +1,31 @@ -from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp +from abc import ABC -__all__ = [ - "ProducerApp", - "StreamsApp", -] +from pydantic import Field + +from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig +from kpops.components.base_components.helm_app import HelmApp +from kpops.utils.docstring import describe_attr + +STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( + repository_name="bakdata-streams-bootstrap", + url="https://bakdata.github.io/streams-bootstrap/", +) +STREAMS_BOOTSTRAP_VERSION = "2.9.0" + + +class StreamsBootstrap(HelmApp, ABC): + """Base for components with a streams-bootstrap Helm chart. + + :param repo_config: Configuration of the Helm chart repo to be used for + deploying the component, defaults to streams-bootstrap Helm repo + :param version: Helm chart version, defaults to "2.9.0" + """ + + repo_config: HelmRepoConfig = Field( + default=STREAMS_BOOTSTRAP_HELM_REPO, + description=describe_attr("repo_config", __doc__), + ) + version: str | None = Field( + default=STREAMS_BOOTSTRAP_VERSION, + description=describe_attr("version", __doc__), + ) diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index e37529bae..2d6a586b2 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -1,23 +1,38 @@ -# from __future__ import annotations +from functools import cached_property from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import KafkaApp +from kpops.components.base_components.kafka_app import ( + KafkaApp, + KafkaAppCleaner, +) from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, ) +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.producer.model import ProducerAppValues from kpops.utils.docstring import describe_attr -class ProducerApp(KafkaApp): +class ProducerAppCleaner(KafkaAppCleaner): + app: ProducerAppValues + + @property + @override + def helm_chart(self) -> str: + return ( + f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" + ) + + +class ProducerApp(KafkaApp, StreamsBootstrap): """Producer component. - This producer holds configuration to use as values for the streams bootstrap - producer helm chart. + This producer holds configuration to use as values for the streams-bootstrap + producer Helm chart. Note that the producer does not support error topics. @@ -36,6 +51,14 @@ class ProducerApp(KafkaApp): description=describe_attr("from_", __doc__), ) + @cached_property + def _cleaner(self) -> ProducerAppCleaner: + return ProducerAppCleaner( + config=self.config, + handlers=self.handlers, + **self.model_dump(), + ) + @override def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: match topic.type: @@ -58,17 +81,6 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None: def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.PRODUCER_APP.value}" - @property - @override - def clean_up_helm_chart(self) -> str: - return ( - f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" - ) - @override def clean(self, dry_run: bool) -> None: - self._run_clean_up_job( - values=self.to_helm_values(), - dry_run=dry_run, - retain_clean_jobs=self.config.retain_clean_jobs, - ) + self._cleaner.clean(dry_run) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index b52bc162c..95100b966 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -28,6 +28,7 @@ class StreamsConfig(KafkaStreamsConfig): :param output_topic: Output topic, defaults to None :param error_topic: Error topic, defaults to None :param config: Configuration, defaults to {} + :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to None """ input_topics: list[str] = Field( @@ -54,6 +55,9 @@ class StreamsConfig(KafkaStreamsConfig): config: dict[str, Any] = Field( default={}, description=describe_attr("config", __doc__) ) + delete_output: bool | None = Field( + default=None, description=describe_attr("delete_output", __doc__) + ) def add_input_topics(self, topics: list[str]) -> None: """Add given topics to the list of input topics. @@ -167,12 +171,12 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): class StreamsAppValues(KafkaAppValues): - """StreamsBoostrap app configurations. + """streams-bootstrap app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. - :param streams: Streams Bootstrap streams section - :param autoscaling: Kubernetes Event-driven Autoscaling config, defaults to None + :param streams: streams-bootstrap streams section + :param autoscaling: Kubernetes event-driven autoscaling config, defaults to None """ streams: StreamsConfig = Field( diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index e8a434b70..2c632e882 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -1,14 +1,29 @@ +from functools import cached_property + from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import KafkaApp +from kpops.components.base_components.kafka_app import ( + KafkaApp, + KafkaAppCleaner, +) +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr -class StreamsApp(KafkaApp): - """StreamsApp component that configures a streams bootstrap app. +class StreamsAppCleaner(KafkaAppCleaner): + app: StreamsAppValues + + @property + @override + def helm_chart(self) -> str: + return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" + + +class StreamsApp(KafkaApp, StreamsBootstrap): + """StreamsApp component that configures a streams-bootstrap app. :param app: Application-specific settings """ @@ -18,6 +33,14 @@ class StreamsApp(KafkaApp): description=describe_attr("app", __doc__), ) + @cached_property + def _cleaner(self) -> StreamsAppCleaner: + return StreamsAppCleaner( + config=self.config, + handlers=self.handlers, + **self.model_dump(), + ) + @override def add_input_topics(self, topics: list[str]) -> None: self.app.streams.add_input_topics(topics) @@ -51,29 +74,12 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None: def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.STREAMS_APP.value}" - @property - @override - def clean_up_helm_chart(self) -> str: - return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" - @override def reset(self, dry_run: bool) -> None: - self.__run_streams_clean_up_job(dry_run, delete_output=False) + self._cleaner.app.streams.delete_output = False + self._cleaner.clean(dry_run) @override def clean(self, dry_run: bool) -> None: - self.__run_streams_clean_up_job(dry_run, delete_output=True) - - def __run_streams_clean_up_job(self, dry_run: bool, delete_output: bool) -> None: - """Run clean job for this Streams app. - - :param dry_run: Whether to do a dry run of the command - :param delete_output: Whether to delete the output of the app that is being cleaned - """ - values = self.to_helm_values() - values["streams"]["deleteOutput"] = delete_output - self._run_clean_up_job( - values=values, - dry_run=dry_run, - retain_clean_jobs=self.config.retain_clean_jobs, - ) + self._cleaner.app.streams.delete_output = True + self._cleaner.clean(dry_run) diff --git a/tests/cli/test_registry.py b/tests/cli/test_registry.py index bc6a7a2f9..473c340c4 100644 --- a/tests/cli/test_registry.py +++ b/tests/cli/test_registry.py @@ -36,7 +36,7 @@ def test_find_builtin_classes(): class_.__name__ for class_ in _find_classes("kpops.components", PipelineComponent) ] - assert len(components) == 9 + assert len(components) == 10 assert components == [ "HelmApp", "KafkaApp", @@ -47,6 +47,7 @@ def test_find_builtin_classes(): "PipelineComponent", "ProducerApp", "StreamsApp", + "StreamsBootstrap", ] diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 2038c6909..e143e3f74 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -168,11 +168,13 @@ def test_should_not_reset_producer_app( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app._cleaner.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object( + producer_app._cleaner.helm, "uninstall" ) - mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock_helm_print_helm_diff = mocker.patch.object( - producer_app.dry_run_handler, "print_helm_diff" + producer_app._cleaner.dry_run_handler, "print_helm_diff" ) mock = mocker.MagicMock() @@ -182,45 +184,55 @@ def test_should_not_reset_producer_app( producer_app.clean(dry_run=True) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - True, - ), - mocker.call.helm_upgrade_install( - PRODUCER_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/producer-app-cleanup-job", - True, - "test-namespace", - { - "nameOverride": PRODUCER_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + True, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm_upgrade_install( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + True, + "test-namespace", + { + "nameOverride": PRODUCER_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + }, }, - }, - HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True), - ), - mocker.call.print_helm_diff( - ANY, - PRODUCER_APP_CLEAN_RELEASE_NAME, - logging.getLogger("KafkaApp"), - ), - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - True, - ), - ] + HelmUpgradeInstallFlags( + version="2.4.2", wait=True, wait_for_jobs=True + ), + ), + mocker.call.print_helm_diff( + ANY, + PRODUCER_APP_CLEAN_RELEASE_NAME, + logging.getLogger("HelmApp"), + ), + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + True, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( self, mocker: MockerFixture, producer_app: ProducerApp ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app._cleaner.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object( + producer_app._cleaner.helm, "uninstall" ) - mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -228,29 +240,37 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w producer_app.clean(dry_run=False) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - False, - ), - mocker.call.helm_upgrade_install( - PRODUCER_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/producer-app-cleanup-job", - False, - "test-namespace", - { - "nameOverride": PRODUCER_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + False, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm_upgrade_install( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + False, + "test-namespace", + { + "nameOverride": PRODUCER_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + }, }, - }, - HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True), - ), - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - False, - ), - ] + HelmUpgradeInstallFlags( + version="2.4.2", wait=True, wait_for_jobs=True + ), + ), + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + False, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 2de276643..e76973773 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -1,5 +1,5 @@ from pathlib import Path -from unittest.mock import MagicMock +from unittest.mock import ANY, MagicMock import pytest from pytest_mock import MockerFixture @@ -17,6 +17,7 @@ TopicConfig, ToSection, ) +from kpops.components.streams_bootstrap.streams.streams_app import StreamsAppCleaner from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -370,10 +371,11 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): def test_reset_when_dry_run_is_false( self, streams_app: StreamsApp, mocker: MockerFixture ): - mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" - ) - mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") + cleaner = streams_app._cleaner + assert isinstance(cleaner, StreamsAppCleaner) + + mock_helm_upgrade_install = mocker.patch.object(cleaner.helm, "upgrade_install") + mock_helm_uninstall = mocker.patch.object(cleaner.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -382,33 +384,41 @@ def test_reset_when_dry_run_is_false( dry_run = False streams_app.reset(dry_run=dry_run) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - mocker.call.helm_upgrade_install( - STREAMS_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/streams-app-cleanup-job", - dry_run, - "test-namespace", - { - "nameOverride": STREAMS_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - "deleteOutput": False, + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ # FIXME: why is this in the call stack? + ANY, # __str__ + mocker.call.helm_upgrade_install( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "nameOverride": STREAMS_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + "deleteOutput": False, + }, }, - }, - HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True), - ), - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ] + HelmUpgradeInstallFlags( + version="2.9.0", wait=True, wait_for_jobs=True + ), + ), + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( self, @@ -416,9 +426,11 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" + streams_app._cleaner.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object( + streams_app._cleaner.helm, "uninstall" ) - mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -427,30 +439,38 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( dry_run = False streams_app.clean(dry_run=dry_run) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - mocker.call.helm_upgrade_install( - STREAMS_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/streams-app-cleanup-job", - dry_run, - "test-namespace", - { - "nameOverride": STREAMS_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - "deleteOutput": True, + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm_upgrade_install( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "nameOverride": STREAMS_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + "deleteOutput": True, + }, }, - }, - HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True), - ), - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ] + HelmUpgradeInstallFlags( + version="2.9.0", wait=True, wait_for_jobs=True + ), + ), + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_streams_bootstrap.py similarity index 75% rename from tests/components/test_kafka_app.py rename to tests/components/test_streams_bootstrap.py index 21c9072f8..127485e30 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_streams_bootstrap.py @@ -11,13 +11,13 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.base_components import KafkaApp +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.config import KpopsConfig DEFAULTS_PATH = Path(__file__).parent / "resources" -class TestKafkaApp: +class TestStreamsBootstrap: @pytest.fixture() def config(self) -> KpopsConfig: return KpopsConfig( @@ -34,36 +34,29 @@ def handlers(self) -> ComponentHandlers: ) def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers): - kafka_app = KafkaApp( + streams_bootstrap = StreamsBootstrap( name="example-name", config=config, handlers=handlers, **{ "namespace": "test-namespace", - "app": { - "streams": { - "outputTopic": "test", - "brokers": "fake-broker:9092", - }, - }, + "app": {}, }, ) - assert kafka_app.app.streams.brokers == "fake-broker:9092" - - assert kafka_app.repo_config == HelmRepoConfig( + assert streams_bootstrap.repo_config == HelmRepoConfig( repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", ) - assert kafka_app.version == "2.9.0" - assert kafka_app.namespace == "test-namespace" + assert streams_bootstrap.version == "2.9.0" + assert streams_bootstrap.namespace == "test-namespace" - def test_should_deploy_kafka_app( + def test_should_deploy_streams_bootstrap_app( self, config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): - kafka_app = KafkaApp( + streams_bootstrap = StreamsBootstrap( name="example-name", config=config, handlers=handlers, @@ -78,18 +71,20 @@ def test_should_deploy_kafka_app( "version": "1.2.3", }, ) - helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install") + helm_upgrade_install = mocker.patch.object( + streams_bootstrap.helm, "upgrade_install" + ) print_helm_diff = mocker.patch.object( - kafka_app.dry_run_handler, "print_helm_diff" + streams_bootstrap.dry_run_handler, "print_helm_diff" ) mocker.patch.object( - KafkaApp, + StreamsBootstrap, "helm_chart", return_value="test/test-chart", new_callable=mocker.PropertyMock, ) - kafka_app.deploy(dry_run=True) + streams_bootstrap.deploy(dry_run=True) print_helm_diff.assert_called_once() helm_upgrade_install.assert_called_once_with( diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index d45882ea1..20f781545 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -5,15 +5,18 @@ Schema, SchemaProvider, ) -from kpops.components import KafkaSinkConnector -from kpops.components.base_components import PipelineComponent +from kpops.components import ( + KafkaSinkConnector, + PipelineComponent, + ProducerApp, + StreamsApp, +) from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, ToSection, ) -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp class ScheduledProducer(ProducerApp): diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index d5684178c..686aac26c 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -1,10 +1,13 @@ from typing_extensions import override from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig -from kpops.components import KafkaSinkConnector -from kpops.components.base_components import PipelineComponent +from kpops.components import ( + KafkaSinkConnector, + PipelineComponent, + ProducerApp, + StreamsApp, +) from kpops.components.base_components.models.to_section import OutputTopicTypes -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp class ScheduledProducer(ProducerApp): From 0c2feaae46712fa6db12638f13d8a26a9ae3d716 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 16 Jan 2024 14:42:25 +0200 Subject: [PATCH 29/34] Update docs for v3 (#416) Closes #410 --- .../resources/pipeline-config/config.yaml | 43 +++++++++++-------- .../core-concepts/variables/substitution.md | 6 ++- 2 files changed, 30 insertions(+), 19 deletions(-) diff --git a/docs/docs/resources/pipeline-config/config.yaml b/docs/docs/resources/pipeline-config/config.yaml index 275382d46..0707280c7 100644 --- a/docs/docs/resources/pipeline-config/config.yaml +++ b/docs/docs/resources/pipeline-config/config.yaml @@ -3,28 +3,35 @@ # The path to the folder containing the defaults.yaml file and the environment # defaults files. defaults_path: . -# The environment you want to generate and deploy the pipeline to. Suffix your -# environment files with this value (e.g. defaults_development.yaml and -# pipeline_development.yaml for environment=development). -# REQUIRED -environment: development +# Custom Python module defining project-specific KPOps components +components_module: null +# Base directory to the pipelines (default is current working directory) +pipeline_base_dir: . # The Kafka brokers address. # REQUIRED -brokers: "http://broker1:9092,http://broker2:9092" +kafka_brokers: "http://broker1:9092,http://broker2:9092" # The name of the defaults file and the prefix of the defaults environment file. defaults_filename_prefix: defaults -# Configures topic names. +# Configure the topic name variables you can use in the pipeline definition. topic_name_config: # Configures the value for the variable ${output_topic_name} - default_output_topic_name: ${pipeline.name}-${component_name} + default_output_topic_name: ${pipeline.name}-${component.name} # Configures the value for the variable ${error_topic_name} - default_error_topic_name: ${pipeline.name}-${component_name}-error -# Address of the Schema Registry -schema_registry_url: "http://localhost:8081" -# Address of the Kafka REST Proxy. -kafka_rest_host: "http://localhost:8082" -# Address of Kafka Connect. -kafka_connect_host: "http://localhost:8083" + default_error_topic_name: ${pipeline.name}-${component.name}-error +# Configuration for Schema Registry. +schema_registry: + # Whether the Schema Registry handler should be initialized. + enabled: false + # Address of the Schema Registry. + url: "http://localhost:8081" +# Configuration for the Kafka REST Proxy. +kafka_rest: + # Address of the Kafka REST Proxy. + url: "http://localhost:8082" +# Configuration for Kafka Connect. +kafka_connect: + # Address of Kafka Connect. + url: "http://localhost:8083" # The timeout in seconds that specifies when actions like deletion or deploy # timeout. timeout: 300 @@ -33,14 +40,16 @@ timeout: 300 create_namespace: false # Global flags for Helm. helm_config: - # Set the name of the kubeconfig context. (--kube-context) + # Name of kubeconfig context (`--kube-context`) context: name # Run Helm in Debug mode. debug: false + # Kubernetes API version used for Capabilities.APIVersions + api_version: null # Configure Helm Diff. helm_diff_config: # Set of keys that should not be checked. - ignore: + ignore: - name - imageTag # Whether to retain clean up jobs in the cluster or uninstall the, after diff --git a/docs/docs/user/core-concepts/variables/substitution.md b/docs/docs/user/core-concepts/variables/substitution.md index b1bfa97e3..eb4076c79 100644 --- a/docs/docs/user/core-concepts/variables/substitution.md +++ b/docs/docs/user/core-concepts/variables/substitution.md @@ -23,11 +23,13 @@ All of them are prefixed with `component.` and follow the following form: `compo These variables include all fields in the [config](../config.md) and refer to the pipeline configuration that is independent of the components. +All such variables are prefixed with `config.` and are of the same form as the [component-specific variables](#component-specific-variables). + !!! info Aliases - `error_topic_name` is an alias for `topic_name_config.default_error_topic_name` - `output_topic_name` is an alias for `topic_name_config.default_output_topic_name` + `error_topic_name` is an alias for `config.topic_name_config.default_error_topic_name` + `output_topic_name` is an alias for `config.topic_name_config.default_output_topic_name` From 4b40e666d05cb024248316a5e34103d11f57c6eb Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 16 Jan 2024 13:45:27 +0100 Subject: [PATCH 30/34] Refactor Kafka Connector resetter as individual HelmApp (#400) Depends on #398 --- ...aults_pipeline_component_dependencies.yaml | 3 - .../dependencies/kpops_structure.yaml | 12 +- .../pipeline_component_dependencies.yaml | 9 - .../pipeline-components/kafka-connector.yaml | 11 - .../kafka-sink-connector.yaml | 11 - .../kafka-source-connector.yaml | 11 - .../pipeline-components/pipeline.yaml | 22 -- .../defaults-kafka-connector.yaml | 11 - .../resources/pipeline-defaults/defaults.yaml | 11 - docs/docs/schema/defaults.json | 166 +++------ docs/docs/schema/pipeline.json | 110 ++---- docs/docs/user/migration-guide/v2-v3.md | 13 + .../bakdata/atm-fraud-detection/defaults.yaml | 3 - .../base_defaults_component.py | 4 +- kpops/components/base_components/helm_app.py | 6 - .../base_components/kafka_connector.py | 338 ++++++------------ .../base_components/kubernetes_app.py | 2 +- tests/components/test_kafka_connector.py | 53 ++- tests/components/test_kafka_sink_connector.py | 213 ++++++----- .../components/test_kafka_source_connector.py | 193 +++++----- tests/pipeline/resources/defaults.yaml | 18 +- .../kafka-connect-sink/pipeline.yaml | 1 - tests/pipeline/snapshots/snap_test_example.py | 33 +- .../pipeline/snapshots/snap_test_generate.py | 216 ++++++++--- tests/pipeline/test_components/components.py | 2 +- .../components.py | 1 - 26 files changed, 669 insertions(+), 804 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml index 959596df0..a47ad6b50 100644 --- a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml @@ -10,10 +10,7 @@ kafka-connector.yaml: - prefix.yaml - from_.yaml - to.yaml -- namespace.yaml - app-kafka-connector.yaml -- repo_config-kafka-connector.yaml -- version-kafka-connector.yaml - resetter_values.yaml kafka-sink-connector.yaml: [] kafka-source-connector.yaml: diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 52192cb22..2ac8b59b2 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -19,30 +19,24 @@ kpops_components_fields: - prefix - from_ - to - - namespace - app - - repo_config - - version + - resetter_namespace - resetter_values kafka-sink-connector: - name - prefix - from_ - to - - namespace - app - - repo_config - - version + - resetter_namespace - resetter_values kafka-source-connector: - name - prefix - from_ - to - - namespace - app - - repo_config - - version + - resetter_namespace - resetter_values - offset_topic kubernetes-app: diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index b633db907..c7d08112c 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -15,28 +15,19 @@ kafka-connector.yaml: - prefix.yaml - from_.yaml - to.yaml -- namespace.yaml - app-kafka-connector.yaml -- repo_config-kafka-connector.yaml -- version-kafka-connector.yaml - resetter_values.yaml kafka-sink-connector.yaml: - prefix.yaml - from_.yaml - to.yaml -- namespace.yaml - app-kafka-connector.yaml -- repo_config-kafka-connector.yaml -- version-kafka-connector.yaml - resetter_values.yaml kafka-source-connector.yaml: - prefix.yaml - from_-kafka-source-connector.yaml - to.yaml -- namespace.yaml - app-kafka-connector.yaml -- repo_config-kafka-connector.yaml -- version-kafka-connector.yaml - resetter_values.yaml - offset_topic-kafka-source-connector.yaml kubernetes-app.yaml: diff --git a/docs/docs/resources/pipeline-components/kafka-connector.yaml b/docs/docs/resources/pipeline-components/kafka-connector.yaml index ca6cfc6eb..b231ae4cc 100644 --- a/docs/docs/resources/pipeline-components/kafka-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-connector.yaml @@ -42,22 +42,11 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: diff --git a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml index 06d14ffe1..8e100d1b3 100644 --- a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml @@ -43,22 +43,11 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: diff --git a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml index e38497b65..fc1f4e8c4 100644 --- a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml @@ -24,22 +24,11 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 12183b8e6..9a3f93a9e 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -160,22 +160,11 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: @@ -206,22 +195,11 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml index 489bf8bb1..40a8c117d 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml @@ -45,22 +45,11 @@ kafka-connector: cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index 05487c7c0..9711a8c6f 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -121,22 +121,11 @@ kafka-connector: cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` contains application-specific settings, hence it does not have a rigid # structure. The fields below are just an example. Extensive documentation on # connectors: https://kafka.apache.org/documentation/#connectconfigs app: # required tasks.max: 1 - # Helm repository configuration for resetter - repo_config: - repository_name: my-repo # required - url: https://bakdata.github.io/kafka-connect-resetter/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.6" # Helm chart version # Overriding Kafka Connect Resetter Helm values. E.g. to override the # Image Tag etc. resetter_values: diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index 06ec5fdc0..aa5db63da 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -90,7 +90,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, @@ -335,40 +335,32 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "prefix": { "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ + "resetter_namespace": { + "anyOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "type": "string" + }, + { + "type": "null" } ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + "default": null, + "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed", + "title": "Resetter Namespace" }, "resetter_values": { - "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", - "title": "Resetter Values", - "type": "object" + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc." }, "to": { "anyOf": [ @@ -381,24 +373,10 @@ ], "default": null, "description": "Topic(s) into which the component will write output" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "1.0.4", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app" ], "title": "KafkaConnector", @@ -452,40 +430,32 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "prefix": { "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ + "resetter_namespace": { + "anyOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "type": "string" + }, + { + "type": "null" } ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + "default": null, + "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed", + "title": "Resetter Namespace" }, "resetter_values": { - "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", - "title": "Resetter Values", - "type": "object" + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc." }, "to": { "anyOf": [ @@ -502,24 +472,10 @@ "type": { "const": "kafka-sink-connector", "title": "Type" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "1.0.4", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app", "type" ], @@ -556,11 +512,6 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "offset_topic": { "anyOf": [ { @@ -580,29 +531,26 @@ "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ + "resetter_namespace": { + "anyOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "type": "string" + }, + { + "type": "null" } ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + "default": null, + "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed", + "title": "Resetter Namespace" }, "resetter_values": { - "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", - "title": "Resetter Values", - "type": "object" + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc." }, "to": { "anyOf": [ @@ -619,24 +567,10 @@ "type": { "const": "kafka-source-connector", "title": "Type" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "1.0.4", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app", "type" ], @@ -703,7 +637,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, @@ -821,7 +755,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, @@ -1070,7 +1004,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, @@ -1293,7 +1227,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 557dbc486..186863f62 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -90,7 +90,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, @@ -265,40 +265,32 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "prefix": { "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ + "resetter_namespace": { + "anyOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "type": "string" + }, + { + "type": "null" } ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + "default": null, + "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed", + "title": "Resetter Namespace" }, "resetter_values": { - "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", - "title": "Resetter Values", - "type": "object" + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc." }, "to": { "anyOf": [ @@ -315,24 +307,10 @@ "type": { "const": "kafka-sink-connector", "title": "Type" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "1.0.4", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app", "type" ], @@ -369,11 +347,6 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "offset_topic": { "anyOf": [ { @@ -393,29 +366,26 @@ "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ + "resetter_namespace": { + "anyOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "type": "string" + }, + { + "type": "null" } ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-kafka-connect-resetter", - "url": "https://bakdata.github.io/kafka-connect-resetter/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + "default": null, + "description": "Kubernetes namespace in which the Kafka Connect resetter shall be deployed", + "title": "Resetter Namespace" }, "resetter_values": { - "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", - "title": "Resetter Values", - "type": "object" + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc." }, "to": { "anyOf": [ @@ -432,24 +402,10 @@ "type": { "const": "kafka-source-connector", "title": "Type" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "1.0.4", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app", "type" ], @@ -489,7 +445,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, @@ -738,7 +694,7 @@ "type": "string" }, "namespace": { - "description": "Namespace in which the component shall be deployed", + "description": "Kubernetes namespace in which the component shall be deployed", "title": "Namespace", "type": "string" }, diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 2c1eef100..c7b5bfc99 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -56,6 +56,19 @@ Previously the default `KafkaApp` component configured the [streams-bootstrap](h version: ... ``` +## [Refactor Kafka Connector resetter as individual HelmApp](https://github.com/bakdata/kpops/pull/400) + +Internally, the [Kafka Connector resetter](bakdata-kafka-connect-resetter/kafka-connect-resetter) is now its own standard `HelmApp`, removing a lot of the shared code. +It is configured using the `resetter_namespace` (formerly `namespace`) and `resetter_values` attributes. + +#### defaults.yaml + +```diff + kafka-connector: +- namespace: my-namespace ++ resetter_namespace: my-namespace +``` + ## [Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config](https://github.com/bakdata/kpops/pull/354) The breaking changes target the `config.yaml` file: diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml index a5a060378..2183f91d6 100644 --- a/examples/bakdata/atm-fraud-detection/defaults.yaml +++ b/examples/bakdata/atm-fraud-detection/defaults.yaml @@ -4,9 +4,6 @@ pipeline-component: kubernetes-app: namespace: ${NAMESPACE} -kafka-connector: - namespace: ${NAMESPACE} - kafka-app: app: streams: diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index e286b3f1f..aab0a3484 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -95,9 +95,7 @@ def extend_with_defaults(self, **kwargs) -> dict: typer.style( "Enriching component of type ", fg=typer.colors.GREEN, bold=False ) - + typer.style( - kwargs.get("type"), fg=typer.colors.GREEN, bold=True, underline=True - ) + + typer.style(self.type, fg=typer.colors.GREEN, bold=True, underline=True) ) main_default_file_path, environment_default_file_path = get_defaults_file_paths( config, ENV.get("environment") diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 07c3c6831..b8978c5af 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -101,12 +101,6 @@ def helm_release_name(self) -> str: """The name for the Helm release. Can be overridden.""" return create_helm_release_name(self.full_name) - @property - def clean_release_name(self) -> str: - """The name for the Helm release for cleanup jobs. Can be overridden.""" - suffix = "-clean" - return create_helm_release_name(self.full_name + suffix, suffix) - @property def helm_chart(self) -> str: """Return component's Helm chart.""" diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 246e30b11..38f490458 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,19 +3,14 @@ import logging from abc import ABC from functools import cached_property -from typing import NoReturn +from typing import Any, NoReturn -from pydantic import Field, PrivateAttr, ValidationInfo, field_validator +from pydantic import Field, PrivateAttr, ValidationInfo, computed_field, field_validator from typing_extensions import override -from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler -from kpops.component_handlers.helm_wrapper.helm import Helm -from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff from kpops.component_handlers.helm_wrapper.model import ( HelmFlags, HelmRepoConfig, - HelmTemplateFlags, - HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.component_handlers.kafka_connect.model import ( @@ -25,8 +20,8 @@ KafkaConnectorType, ) from kpops.components.base_components.base_defaults_component import deduplicate +from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.components.base_components.models.from_section import FromTopic -from kpops.components.base_components.models.resource import Resource from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr @@ -34,41 +29,104 @@ log = logging.getLogger("KafkaConnector") -class KafkaConnector(PipelineComponent, ABC): - """Base class for all Kafka connectors. +class KafkaConnectorResetter(HelmApp): + """Helm app for resetting and cleaning a Kafka Connector. - Should only be used to set defaults - - :param app: Application-specific settings :param repo_config: Configuration of the Helm chart repo to be used for - deploying the component, - defaults to HelmRepoConfig(repository_name="bakdata-kafka-connect-resetter", url="https://bakdata.github.io/kafka-connect-resetter/") - :param namespace: Namespace in which the component shall be deployed + deploying the component, defaults to kafka-connect-resetter Helm repo :param version: Helm chart version, defaults to "1.0.4" - :param resetter_values: Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc., - defaults to dict """ - namespace: str = Field( - default=..., - description=describe_attr("namespace", __doc__), - ) - app: KafkaConnectorConfig = Field( - default=..., - description=describe_attr("app", __doc__), - ) + app: KafkaConnectorResetterValues repo_config: HelmRepoConfig = Field( default=HelmRepoConfig( repository_name="bakdata-kafka-connect-resetter", url="https://bakdata.github.io/kafka-connect-resetter/", - ), - description=describe_attr("repo_config", __doc__), + ) ) version: str | None = Field( default="1.0.4", description=describe_attr("version", __doc__) ) - resetter_values: dict = Field( - default_factory=dict, + suffix: str = "-clean" + + @property + @override + def full_name(self) -> str: + return super().full_name + self.suffix + + @property + @override + def helm_chart(self) -> str: + return f"{self.repo_config.repository_name}/kafka-connect-resetter" + + @property + @override + def helm_release_name(self) -> str: + return create_helm_release_name(self.full_name, self.suffix) + + @property + @override + def helm_flags(self) -> HelmFlags: + return HelmFlags( + create_namespace=self.config.create_namespace, + version=self.version, + wait_for_jobs=True, + wait=True, + ) + + @override + def reset(self, dry_run: bool) -> None: + """Reset connector. + + At first, it deletes the previous cleanup job (connector resetter) + to make sure that there is no running clean job in the cluster. Then it releases a cleanup job. + If retain_clean_jobs config is set to false the cleanup job will be deleted subsequently. + + :param dry_run: If the cleanup should be run in dry run mode or not + """ + log.info( + magentaify( + f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.app.config.connector}" + ) + ) + self.destroy(dry_run) + + log.info( + magentaify( + f"Connector Cleanup: deploy Connect {self.app.connector_type} resetter for {self.app.config.connector}" + ) + ) + self.deploy(dry_run) + + if not self.config.retain_clean_jobs: + log.info(magentaify("Connector Cleanup: uninstall Kafka Resetter.")) + self.destroy(dry_run) + + @override + def clean(self, dry_run: bool) -> None: + self.reset(dry_run) + + +class KafkaConnector(PipelineComponent, ABC): + """Base class for all Kafka connectors. + + Should only be used to set defaults + + :param app: Application-specific settings + :param resetter_namespace: Kubernetes namespace in which the Kafka Connect resetter shall be deployed + :param resetter_values: Overriding Kafka Connect resetter Helm values, e.g. to override the image tag etc., + defaults to empty HelmAppValues + """ + + app: KafkaConnectorConfig = Field( + default=..., + description=describe_attr("app", __doc__), + ) + resetter_namespace: str | None = Field( + default=None, description=describe_attr("resetter_namespace", __doc__) + ) + resetter_values: HelmAppValues = Field( + default_factory=HelmAppValues, description=describe_attr("resetter_values", __doc__), ) _connector_type: KafkaConnectorType = PrivateAttr() @@ -90,47 +148,27 @@ def connector_config_should_have_component_name( app["name"] = component_name return KafkaConnectorConfig(**app) + @computed_field @cached_property - def helm(self) -> Helm: - """Helm object that contains component-specific config such as repo.""" - helm_repo_config = self.repo_config - helm = Helm(self.config.helm_config) - helm.add_repo( - helm_repo_config.repository_name, - helm_repo_config.url, - helm_repo_config.repo_auth_flags, - ) - return helm - - @property - def _resetter_release_name(self) -> str: - suffix = "-clean" - return create_helm_release_name(self.full_name + suffix, suffix) - - @property - def _resetter_helm_chart(self) -> str: - return f"{self.repo_config.repository_name}/kafka-connect-resetter" - - @cached_property - def dry_run_handler(self) -> DryRunHandler: - helm_diff = HelmDiff(self.config.helm_diff_config) - return DryRunHandler(self.helm, helm_diff, self.namespace) - - @property - def helm_flags(self) -> HelmFlags: - """Return shared flags for Helm commands.""" - return HelmFlags( - **self.repo_config.repo_auth_flags.model_dump(), - version=self.version, - create_namespace=self.config.create_namespace, - ) - - @property - def template_flags(self) -> HelmTemplateFlags: - """Return flags for Helm template command.""" - return HelmTemplateFlags( - **self.helm_flags.model_dump(), - api_version=self.config.helm_config.api_version, + def _resetter(self) -> KafkaConnectorResetter: + kwargs: dict[str, Any] = {} + if self.resetter_namespace: + kwargs["namespace"] = self.resetter_namespace + return KafkaConnectorResetter( + config=self.config, + handlers=self.handlers, + **kwargs, + **self.model_dump( + exclude={"_resetter", "resetter_values", "resetter_namespace", "app"} + ), + app=KafkaConnectorResetterValues( + connector_type=self._connector_type.value, + config=KafkaConnectorResetterConfig( + connector=self.full_name, + brokers=self.config.kafka_brokers, + ), + **self.resetter_values.model_dump(), + ), ) @override @@ -162,105 +200,6 @@ def clean(self, dry_run: bool) -> None: ) self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run) - def _run_connect_resetter( - self, - dry_run: bool, - retain_clean_jobs: bool, - **kwargs, - ) -> None: - """Clean the connector from the cluster. - - At first, it deletes the previous cleanup job (connector resetter) - to make sure that there is no running clean job in the cluster. Then it releases a cleanup job. - If the retain_clean_jobs flag is set to false the cleanup job will be deleted. - - :param dry_run: If the cleanup should be run in dry run mode or not - :param retain_clean_jobs: If the cleanup job should be kept - :param kwargs: Other values for the KafkaConnectorResetter - """ - log.info( - magentaify( - f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.full_name}" - ) - ) - self.__uninstall_connect_resetter(self._resetter_release_name, dry_run) - - log.info( - magentaify( - f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}" - ) - ) - - stdout = self.__install_connect_resetter(dry_run, **kwargs) - - if dry_run: - self.dry_run_handler.print_helm_diff( - stdout, self._resetter_release_name, log - ) - - if not retain_clean_jobs: - log.info(magentaify("Connector Cleanup: uninstall Kafka Resetter.")) - self.__uninstall_connect_resetter(self._resetter_release_name, dry_run) - - def __install_connect_resetter( - self, - dry_run: bool, - **kwargs, - ) -> str: - """Install connector resetter. - - :param dry_run: Whether to dry run the command - :return: The output of `helm upgrade --install` - """ - return self.helm.upgrade_install( - release_name=self._resetter_release_name, - namespace=self.namespace, - chart=self._resetter_helm_chart, - dry_run=dry_run, - flags=HelmUpgradeInstallFlags( - create_namespace=self.config.create_namespace, - version=self.version, - wait_for_jobs=True, - wait=True, - ), - values=self._get_kafka_connect_resetter_values( - **kwargs, - ), - ) - - def _get_kafka_connect_resetter_values( - self, - **kwargs, - ) -> dict: - """Get connector resetter helm chart values. - - :return: The Helm chart values of the connector resetter - """ - return { - **KafkaConnectorResetterValues( - config=KafkaConnectorResetterConfig( - connector=self.full_name, - brokers=self.config.kafka_brokers, - **kwargs, - ), - connector_type=self._connector_type.value, - name_override=self.full_name + "-clean", - ).model_dump(), - **self.resetter_values, - } - - def __uninstall_connect_resetter(self, release_name: str, dry_run: bool) -> None: - """Uninstall connector resetter. - - :param release_name: Name of the release to be uninstalled - :param dry_run: Whether to do a dry run of the command - """ - self.helm.uninstall( - namespace=self.namespace, - release_name=release_name, - dry_run=dry_run, - ) - class KafkaSourceConnector(KafkaConnector): """Kafka source connector model. @@ -282,38 +221,16 @@ def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: msg = "Kafka source connector doesn't support FromSection" raise NotImplementedError(msg) - @override - def manifest(self) -> Resource: - values = self._get_kafka_connect_resetter_values( - offset_topic=self.offset_topic, - ) - return self.helm.template( - self._resetter_release_name, - self._resetter_helm_chart, - self.namespace, - values, - self.template_flags, - ) - @override def reset(self, dry_run: bool) -> None: - self.__run_kafka_connect_resetter(dry_run) + self._resetter.app.config.offset_topic = self.offset_topic + self._resetter.reset(dry_run) @override def clean(self, dry_run: bool) -> None: super().clean(dry_run) - self.__run_kafka_connect_resetter(dry_run) - - def __run_kafka_connect_resetter(self, dry_run: bool) -> None: - """Run the connector resetter. - - :param dry_run: Whether to do a dry run of the command - """ - self._run_connect_resetter( - dry_run=dry_run, - retain_clean_jobs=self.config.retain_clean_jobs, - offset_topic=self.offset_topic, - ) + self._resetter.app.config.offset_topic = self.offset_topic + self._resetter.clean(dry_run) class KafkaSinkConnector(KafkaConnector): @@ -328,17 +245,6 @@ def add_input_topics(self, topics: list[str]) -> None: topics = deduplicate(topics) setattr(self.app, "topics", ",".join(topics)) - @override - def manifest(self) -> Resource: - values = self._get_kafka_connect_resetter_values() - return self.helm.template( - self._resetter_release_name, - self._resetter_helm_chart, - self.namespace, - values, - self.template_flags, - ) - @override def set_input_pattern(self, name: str) -> None: setattr(self.app, "topics.regex", name) @@ -349,23 +255,11 @@ def set_error_topic(self, topic_name: str) -> None: @override def reset(self, dry_run: bool) -> None: - self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=False) + self._resetter.app.config.delete_consumer_group = False + self._resetter.reset(dry_run) @override def clean(self, dry_run: bool) -> None: super().clean(dry_run) - self.__run_kafka_connect_resetter(dry_run, delete_consumer_group=True) - - def __run_kafka_connect_resetter( - self, dry_run: bool, delete_consumer_group: bool - ) -> None: - """Run the connector resetter. - - :param dry_run: Whether to do a dry run of the command - :param delete_consumer_group: Whether the consumer group should be deleted or not - """ - self._run_connect_resetter( - dry_run=dry_run, - retain_clean_jobs=self.config.retain_clean_jobs, - delete_consumer_group=delete_consumer_group, - ) + self._resetter.app.config.delete_consumer_group = True + self._resetter.clean(dry_run) diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 2b4065191..5f3c3e67d 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -31,7 +31,7 @@ class KubernetesApp(PipelineComponent, ABC): All built-in components are Kubernetes apps, except for the Kafka connectors. - :param namespace: Namespace in which the component shall be deployed + :param namespace: Kubernetes namespace in which the component shall be deployed :param app: Application-specific settings """ diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index d352a6d8a..3e01259a2 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -8,7 +8,9 @@ from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig -from kpops.components.base_components.kafka_connector import KafkaConnector +from kpops.components.base_components.kafka_connector import ( + KafkaConnector, +) from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -17,6 +19,7 @@ CONNECTOR_CLEAN_FULL_NAME = CONNECTOR_FULL_NAME + "-clean" CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline.name}-test-connector-with-lon-612f3-clean" CONNECTOR_CLASS = "com.bakdata.connect.TestConnector" +RESETTER_NAMESPACE = "test-namespace" class TestKafkaConnector: @@ -43,13 +46,13 @@ def handlers(self) -> ComponentHandlers: @pytest.fixture(autouse=True) def helm_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kafka_connector.Helm" + "kpops.components.base_components.helm_app.Helm" ).return_value @pytest.fixture() - def dry_run_handler(self, mocker: MockerFixture) -> MagicMock: + def dry_run_handler_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( - "kpops.components.base_components.kafka_connector.DryRunHandler" + "kpops.components.base_components.helm_app.DryRunHandler" ).return_value @pytest.fixture() @@ -61,27 +64,35 @@ def connector_config(self) -> KafkaConnectorConfig: } ) - def test_connector_config_name_override( + @pytest.fixture() + def connector( self, config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, - ): - connector = KafkaConnector( + ) -> KafkaConnector: + return KafkaConnector( name=CONNECTOR_NAME, config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, ) + + def test_connector_config_name_override( + self, + connector: KafkaConnector, + config: KpopsConfig, + handlers: ComponentHandlers, + ): assert connector.app.name == CONNECTOR_FULL_NAME connector = KafkaConnector( name=CONNECTOR_NAME, config=config, handlers=handlers, - app={"connector.class": CONNECTOR_CLASS}, # type: ignore[reportGeneralTypeIssues] - namespace="test-namespace", + app={"connector.class": CONNECTOR_CLASS}, # type: ignore[reportGeneralTypeIssues], gets enriched + resetter_namespace=RESETTER_NAMESPACE, ) assert connector.app.name == CONNECTOR_FULL_NAME @@ -95,8 +106,7 @@ def test_connector_config_name_override( name=CONNECTOR_NAME, config=config, handlers=handlers, - app={"connector.class": CONNECTOR_CLASS, "name": "different-name"}, # type: ignore[reportGeneralTypeIssues] - namespace="test-namespace", + app={"connector.class": CONNECTOR_CLASS, "name": "different-name"}, # type: ignore[reportGeneralTypeIssues], gets enriched ) with pytest.raises( @@ -109,22 +119,5 @@ def test_connector_config_name_override( name=CONNECTOR_NAME, config=config, handlers=handlers, - app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore[reportGeneralTypeIssues] - namespace="test-namespace", + app={"connector.class": CONNECTOR_CLASS, "name": ""}, # type: ignore[reportGeneralTypeIssues], gets enriched ) - - def test_resetter_release_name( - self, - config: KpopsConfig, - handlers: ComponentHandlers, - connector_config: KafkaConnectorConfig, - ): - connector = KafkaConnector( - name=CONNECTOR_NAME, - config=config, - handlers=handlers, - app=connector_config, - namespace="test-namespace", - ) - assert connector.app.name == CONNECTOR_FULL_NAME - assert connector._resetter_release_name == CONNECTOR_CLEAN_RELEASE_NAME diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 81b5049d9..ef4f7caa3 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -1,4 +1,4 @@ -from unittest.mock import MagicMock, call +from unittest.mock import ANY, MagicMock, call import pytest from pytest_mock import MockerFixture @@ -13,6 +13,7 @@ KafkaConnectorType, ) from kpops.components import KafkaSinkConnector +from kpops.components.base_components.kafka_connector import KafkaConnectorResetter from kpops.components.base_components.models.from_section import ( FromSection, FromTopic, @@ -31,9 +32,12 @@ CONNECTOR_CLEAN_RELEASE_NAME, CONNECTOR_FULL_NAME, CONNECTOR_NAME, + RESETTER_NAMESPACE, TestKafkaConnector, ) +CONNECTOR_TYPE = KafkaConnectorType.SINK.value + class TestKafkaSinkConnector(TestKafkaConnector): @pytest.fixture() @@ -52,7 +56,7 @@ def connector( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( @@ -62,6 +66,12 @@ def connector( ), ) + def test_resetter_release_name(self, connector: KafkaSinkConnector): + assert connector.app.name == CONNECTOR_FULL_NAME + resetter = connector._resetter + assert isinstance(resetter, KafkaConnectorResetter) + assert connector._resetter.helm_release_name == CONNECTOR_CLEAN_RELEASE_NAME + def test_connector_config_parsing( self, config: KpopsConfig, @@ -76,7 +86,7 @@ def test_connector_config_parsing( app=KafkaConnectorConfig( **{**connector_config.model_dump(), "topics": topic_name} ), - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, ) assert getattr(connector.app, "topics") == topic_name @@ -88,7 +98,7 @@ def test_connector_config_parsing( app=KafkaConnectorConfig( **{**connector_config.model_dump(), "topics.regex": topic_pattern} ), - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, ) assert getattr(connector.app, "topics.regex") == topic_pattern @@ -105,7 +115,7 @@ def test_from_section_parsing_input_topic( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type topics={ topic1: FromTopic(type=InputTopicTypes.INPUT), @@ -131,7 +141,7 @@ def test_from_section_parsing_input_pattern( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type topics={topic_pattern: FromTopic(type=InputTopicTypes.PATTERN)} ), @@ -177,18 +187,18 @@ def test_destroy( def test_reset_when_dry_run_is_true( self, connector: KafkaSinkConnector, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, ): dry_run = True connector.reset(dry_run=dry_run) - dry_run_handler.print_helm_diff.assert_called_once() + dry_run_handler_mock.print_helm_diff.assert_called_once() def test_reset_when_dry_run_is_false( self, connector: KafkaSinkConnector, + dry_run_handler_mock: MagicMock, helm_mock: MagicMock, - dry_run_handler: MagicMock, mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( @@ -197,71 +207,78 @@ def test_reset_when_dry_run_is_false( mock_clean_connector = mocker.patch.object( connector.handlers.connector_handler, "clean_connector" ) + mock_resetter_reset = mocker.spy(connector._resetter, "reset") + mock = mocker.MagicMock() mock.attach_mock(mock_clean_connector, "mock_clean_connector") mock.attach_mock(helm_mock, "helm") dry_run = False connector.reset(dry_run=dry_run) - - assert mock.mock_calls == [ - mocker.call.helm.add_repo( - "bakdata-kafka-connect-resetter", - "https://bakdata.github.io/kafka-connect-resetter/", - RepoAuthFlags(), - ), - mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=dry_run, - ), - mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - namespace="test-namespace", - chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", - dry_run=dry_run, - flags=HelmUpgradeInstallFlags( - version="1.0.4", - wait=True, - wait_for_jobs=True, + mock_resetter_reset.assert_called_once_with(dry_run) + + mock.assert_has_calls( + [ + mocker.call.helm.add_repo( + "bakdata-kafka-connect-resetter", + "https://bakdata.github.io/kafka-connect-resetter/", + RepoAuthFlags(), ), - values={ - "connectorType": "sink", - "config": { - "brokers": "broker:9092", - "connector": CONNECTOR_FULL_NAME, - "deleteConsumerGroup": False, + mocker.call.helm.uninstall( + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm.upgrade_install( + CONNECTOR_CLEAN_RELEASE_NAME, + "bakdata-kafka-connect-resetter/kafka-connect-resetter", + dry_run, + RESETTER_NAMESPACE, + { + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, + "connectorType": CONNECTOR_TYPE, + "config": { + "brokers": "broker:9092", + "connector": CONNECTOR_FULL_NAME, + "deleteConsumerGroup": False, + }, }, - "nameOverride": CONNECTOR_CLEAN_FULL_NAME, - }, - ), - mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=dry_run, - ), - ] + HelmUpgradeInstallFlags( + version="1.0.4", + wait=True, + wait_for_jobs=True, + ), + ), + mocker.call.helm.uninstall( + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) - dry_run_handler.print_helm_diff.assert_not_called() + dry_run_handler_mock.print_helm_diff.assert_not_called() mock_delete_topics.assert_not_called() def test_clean_when_dry_run_is_true( self, connector: KafkaSinkConnector, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, ): dry_run = True connector.clean(dry_run=dry_run) - dry_run_handler.print_helm_diff.assert_called_once() + dry_run_handler_mock.print_helm_diff.assert_called_once() def test_clean_when_dry_run_is_false( self, connector: KafkaSinkConnector, - config: KpopsConfig, - handlers: ComponentHandlers, helm_mock: MagicMock, log_info_mock: MagicMock, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, mocker: MockerFixture, ): mock_delete_topics = mocker.patch.object( @@ -301,43 +318,47 @@ def test_clean_when_dry_run_is_false( RepoAuthFlags(), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=dry_run, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - namespace="test-namespace", - chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", - dry_run=dry_run, - flags=HelmUpgradeInstallFlags( - version="1.0.4", - wait=True, - wait_for_jobs=True, - ), - values={ - "connectorType": "sink", + CONNECTOR_CLEAN_RELEASE_NAME, + "bakdata-kafka-connect-resetter/kafka-connect-resetter", + dry_run, + RESETTER_NAMESPACE, + { + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, + "connectorType": CONNECTOR_TYPE, "config": { "brokers": "broker:9092", "connector": CONNECTOR_FULL_NAME, "deleteConsumerGroup": True, }, - "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, + HelmUpgradeInstallFlags( + version="1.0.4", + wait=True, + wait_for_jobs=True, + ), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=dry_run, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ ] - dry_run_handler.print_helm_diff.assert_not_called() + dry_run_handler_mock.print_helm_diff.assert_not_called() def test_clean_without_to_when_dry_run_is_true( self, config: KpopsConfig, handlers: ComponentHandlers, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, connector_config: KafkaConnectorConfig, ): connector = KafkaSinkConnector( @@ -345,19 +366,19 @@ def test_clean_without_to_when_dry_run_is_true( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, ) dry_run = True connector.clean(dry_run) - dry_run_handler.print_helm_diff.assert_called_once() + dry_run_handler_mock.print_helm_diff.assert_called_once() def test_clean_without_to_when_dry_run_is_false( self, config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, mocker: MockerFixture, connector_config: KafkaConnectorConfig, ): @@ -366,7 +387,7 @@ def test_clean_without_to_when_dry_run_is_false( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, ) mock_delete_topics = mocker.patch.object( @@ -395,36 +416,40 @@ def test_clean_without_to_when_dry_run_is_false( ), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=dry_run, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - namespace="test-namespace", - chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", - dry_run=dry_run, - flags=HelmUpgradeInstallFlags( - version="1.0.4", - wait=True, - wait_for_jobs=True, - ), - values={ - "connectorType": "sink", + CONNECTOR_CLEAN_RELEASE_NAME, + "bakdata-kafka-connect-resetter/kafka-connect-resetter", + dry_run, + RESETTER_NAMESPACE, + { + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, + "connectorType": CONNECTOR_TYPE, "config": { "brokers": "broker:9092", "connector": CONNECTOR_FULL_NAME, "deleteConsumerGroup": True, }, - "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, + HelmUpgradeInstallFlags( + version="1.0.4", + wait=True, + wait_for_jobs=True, + ), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=dry_run, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ ] - dry_run_handler.print_helm_diff.assert_not_called() + dry_run_handler_mock.print_helm_diff.assert_not_called() mock_delete_topics.assert_not_called() diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index a34efc364..31511e81f 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -1,4 +1,4 @@ -from unittest.mock import MagicMock +from unittest.mock import ANY, MagicMock import pytest from pytest_mock import MockerFixture @@ -8,8 +8,14 @@ HelmUpgradeInstallFlags, RepoAuthFlags, ) -from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig -from kpops.components.base_components.kafka_connector import KafkaSourceConnector +from kpops.component_handlers.kafka_connect.model import ( + KafkaConnectorConfig, + KafkaConnectorType, +) +from kpops.components.base_components.kafka_connector import ( + KafkaConnectorResetter, + KafkaSourceConnector, +) from kpops.components.base_components.models.from_section import ( FromSection, FromTopic, @@ -28,10 +34,13 @@ CONNECTOR_CLEAN_RELEASE_NAME, CONNECTOR_FULL_NAME, CONNECTOR_NAME, + RESETTER_NAMESPACE, TestKafkaConnector, ) +CONNECTOR_TYPE = KafkaConnectorType.SOURCE.value CLEAN_SUFFIX = "-clean" +OFFSETS_TOPIC = "kafka-connect-offsets" class TestKafkaSourceConnector(TestKafkaConnector): @@ -47,7 +56,7 @@ def connector( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, to=ToSection( topics={ TopicName("${output_topic_name}"): TopicConfig( @@ -55,9 +64,15 @@ def connector( ), } ), - offset_topic="kafka-connect-offsets", + offset_topic=OFFSETS_TOPIC, ) + def test_resetter_release_name(self, connector: KafkaSourceConnector): + assert connector.app.name == CONNECTOR_FULL_NAME + resetter = connector._resetter + assert isinstance(resetter, KafkaConnectorResetter) + assert connector._resetter.helm_release_name == CONNECTOR_CLEAN_RELEASE_NAME + def test_from_section_raises_exception( self, config: KpopsConfig, @@ -70,7 +85,7 @@ def test_from_section_raises_exception( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", + resetter_namespace=RESETTER_NAMESPACE, from_=FromSection( # pyright: ignore[reportGeneralTypeIssues] wrong diagnostic when using TopicName as topics key type topics={ TopicName("connector-topic"): FromTopic( @@ -107,7 +122,7 @@ def test_destroy( connector: KafkaSourceConnector, mocker: MockerFixture, ): - ENV["KPOPS_KAFKA_CONNECT_RESETTER_OFFSET_TOPIC"] = "kafka-connect-offsets" + ENV["KPOPS_KAFKA_CONNECT_RESETTER_OFFSET_TOPIC"] = OFFSETS_TOPIC assert connector.handlers.connector_handler mock_destroy_connector = mocker.patch.object( @@ -123,18 +138,18 @@ def test_destroy( def test_reset_when_dry_run_is_true( self, connector: KafkaSourceConnector, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, ): assert connector.handlers.connector_handler connector.reset(dry_run=True) - dry_run_handler.print_helm_diff.assert_called_once() + dry_run_handler_mock.print_helm_diff.assert_called_once() def test_reset_when_dry_run_is_false( self, connector: KafkaSourceConnector, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, helm_mock: MagicMock, mocker: MockerFixture, ): @@ -159,55 +174,59 @@ def test_reset_when_dry_run_is_false( RepoAuthFlags(), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=False, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + False, ), + ANY, # __bool__ + ANY, # __str__ mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - namespace="test-namespace", - chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", - dry_run=False, - flags=HelmUpgradeInstallFlags( - version="1.0.4", - wait=True, - wait_for_jobs=True, - ), - values={ - "connectorType": "source", + CONNECTOR_CLEAN_RELEASE_NAME, + "bakdata-kafka-connect-resetter/kafka-connect-resetter", + False, + RESETTER_NAMESPACE, + { + "connectorType": CONNECTOR_TYPE, "config": { "brokers": "broker:9092", "connector": CONNECTOR_FULL_NAME, - "offsetTopic": "kafka-connect-offsets", + "offsetTopic": OFFSETS_TOPIC, }, "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, + HelmUpgradeInstallFlags( + version="1.0.4", + wait=True, + wait_for_jobs=True, + ), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=False, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + False, ), + ANY, # __bool__ + ANY, # __str__ ] mock_delete_topics.assert_not_called() - dry_run_handler.print_helm_diff.assert_not_called() + dry_run_handler_mock.print_helm_diff.assert_not_called() def test_clean_when_dry_run_is_true( self, connector: KafkaSourceConnector, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, ): assert connector.handlers.connector_handler connector.clean(dry_run=True) - dry_run_handler.print_helm_diff.assert_called_once() + dry_run_handler_mock.print_helm_diff.assert_called_once() def test_clean_when_dry_run_is_false( self, connector: KafkaSourceConnector, helm_mock: MagicMock, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, mocker: MockerFixture, ): assert connector.handlers.connector_handler @@ -224,55 +243,60 @@ def test_clean_when_dry_run_is_false( mock.attach_mock(mock_clean_connector, "mock_clean_connector") mock.attach_mock(helm_mock, "helm") - connector.clean(dry_run=False) + dry_run = False + connector.clean(dry_run) assert mock.mock_calls == [ - mocker.call.mock_delete_topics(connector.to, dry_run=False), + mocker.call.mock_delete_topics(connector.to, dry_run=dry_run), mocker.call.helm.add_repo( "bakdata-kafka-connect-resetter", "https://bakdata.github.io/kafka-connect-resetter/", RepoAuthFlags(), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=False, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - namespace="test-namespace", - chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", - dry_run=False, - flags=HelmUpgradeInstallFlags( - version="1.0.4", - wait=True, - wait_for_jobs=True, - ), - values={ - "connectorType": "source", + CONNECTOR_CLEAN_RELEASE_NAME, + "bakdata-kafka-connect-resetter/kafka-connect-resetter", + dry_run, + RESETTER_NAMESPACE, + { + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, + "connectorType": CONNECTOR_TYPE, "config": { "brokers": "broker:9092", "connector": CONNECTOR_FULL_NAME, - "offsetTopic": "kafka-connect-offsets", + "offsetTopic": OFFSETS_TOPIC, }, - "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, + HelmUpgradeInstallFlags( + version="1.0.4", + wait=True, + wait_for_jobs=True, + ), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=False, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ ] - dry_run_handler.print_helm_diff.assert_not_called() + dry_run_handler_mock.print_helm_diff.assert_not_called() def test_clean_without_to_when_dry_run_is_false( self, config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, mocker: MockerFixture, connector_config: KafkaConnectorConfig, ): @@ -281,8 +305,8 @@ def test_clean_without_to_when_dry_run_is_false( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", - offset_topic="kafka-connect-offsets", + resetter_namespace=RESETTER_NAMESPACE, + offset_topic=OFFSETS_TOPIC, ) assert connector.to is None @@ -300,7 +324,8 @@ def test_clean_without_to_when_dry_run_is_false( mock.attach_mock(mock_clean_connector, "mock_clean_connector") mock.attach_mock(helm_mock, "helm") - connector.clean(dry_run=False) + dry_run = False + connector.clean(dry_run) assert mock.mock_calls == [ mocker.call.helm.add_repo( @@ -309,45 +334,49 @@ def test_clean_without_to_when_dry_run_is_false( RepoAuthFlags(), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=False, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ mocker.call.helm.upgrade_install( - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - namespace="test-namespace", - chart="bakdata-kafka-connect-resetter/kafka-connect-resetter", - dry_run=False, - flags=HelmUpgradeInstallFlags( - version="1.0.4", - wait=True, - wait_for_jobs=True, - ), - values={ - "connectorType": "source", + CONNECTOR_CLEAN_RELEASE_NAME, + "bakdata-kafka-connect-resetter/kafka-connect-resetter", + dry_run, + RESETTER_NAMESPACE, + { + "nameOverride": CONNECTOR_CLEAN_FULL_NAME, + "connectorType": CONNECTOR_TYPE, "config": { "brokers": "broker:9092", "connector": CONNECTOR_FULL_NAME, - "offsetTopic": "kafka-connect-offsets", + "offsetTopic": OFFSETS_TOPIC, }, - "nameOverride": CONNECTOR_CLEAN_FULL_NAME, }, + HelmUpgradeInstallFlags( + version="1.0.4", + wait=True, + wait_for_jobs=True, + ), ), mocker.call.helm.uninstall( - namespace="test-namespace", - release_name=CONNECTOR_CLEAN_RELEASE_NAME, - dry_run=False, + RESETTER_NAMESPACE, + CONNECTOR_CLEAN_RELEASE_NAME, + dry_run, ), + ANY, # __bool__ + ANY, # __str__ ] mock_delete_topics.assert_not_called() - dry_run_handler.print_helm_diff.assert_not_called() + dry_run_handler_mock.print_helm_diff.assert_not_called() def test_clean_without_to_when_dry_run_is_true( self, config: KpopsConfig, handlers: ComponentHandlers, - dry_run_handler: MagicMock, + dry_run_handler_mock: MagicMock, connector_config: KafkaConnectorConfig, ): connector = KafkaSourceConnector( @@ -355,8 +384,8 @@ def test_clean_without_to_when_dry_run_is_true( config=config, handlers=handlers, app=connector_config, - namespace="test-namespace", - offset_topic="kafka-connect-offsets", + resetter_namespace=RESETTER_NAMESPACE, + offset_topic=OFFSETS_TOPIC, ) assert connector.to is None @@ -364,4 +393,4 @@ def test_clean_without_to_when_dry_run_is_true( connector.clean(dry_run=True) - dry_run_handler.print_helm_diff.assert_called_once() + dry_run_handler_mock.print_helm_diff.assert_called_once() diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index 101e3e175..810e6b5f8 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -1,12 +1,14 @@ +pipeline-component: + name: ${component.type} + kubernetes-app: - name: "${component.type}" namespace: example-namespace kafka-app: app: streams: - brokers: "${config.kafka_brokers}" - schema_registry_url: "${config.schema_registry.url}" + brokers: ${config.kafka_brokers} + schema_registry_url: ${config.schema_registry.url} version: "2.4.2" producer-app: {} # inherits from kafka-app @@ -49,7 +51,7 @@ converter: enabled: true consumerGroup: converter-${output_topic_name} maxReplicas: 1 - lagThreshold: "10000" + lagThreshold: 10000 to: topics: ${output_topic_name}: @@ -72,7 +74,7 @@ filter: autoscaling: enabled: true maxReplicas: 1 - lagThreshold: "10000" + lagThreshold: 10000 consumerGroup: filter-${output_topic_name} topics: - "${output_topic_name}" @@ -91,7 +93,7 @@ should-inflate: autoscaling: enabled: true maxReplicas: 1 - lagThreshold: "10000" + lagThreshold: 10000 consumerGroup: filter-${output_topic_name} topics: - "${output_topic_name}" @@ -103,9 +105,7 @@ should-inflate: configs: retention.ms: "-1" -kafka-connector: - name: "sink-connector" - namespace: "example-namespace" +kafka-sink-connector: app: batch.size: "2000" behavior.on.malformed.documents: "warn" diff --git a/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml b/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml index 02a28015a..fc012737a 100644 --- a/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml +++ b/tests/pipeline/resources/kafka-connect-sink/pipeline.yaml @@ -12,7 +12,6 @@ type: output - type: kafka-sink-connector - namespace: example-namespace name: es-sink-connector app: connector.class: io.confluent.connect.elasticsearch.ElasticsearchSinkConnector diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index a88a7ee4a..77ba66496 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -305,6 +305,28 @@ 'version': '2.9.0' }, { + '_resetter': { + 'app': { + 'config': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'connector': 'postgresql-connector' + }, + 'connectorType': 'sink' + }, + 'name': 'postgresql-connector', + 'namespace': '${NAMESPACE}', + 'prefix': '', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'suffix': '-clean', + 'type': 'kafka-connector-resetter', + 'version': '1.0.4' + }, 'app': { 'auto.create': True, 'connection.ds.pool.size': 5, @@ -330,18 +352,9 @@ 'value.converter.schema.registry.url': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' }, 'name': 'postgresql-connector', - 'namespace': '${NAMESPACE}', 'prefix': '', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, 'resetter_values': { }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + 'type': 'kafka-sink-connector' } ] diff --git a/tests/pipeline/snapshots/snap_test_generate.py b/tests/pipeline/snapshots/snap_test_generate.py index f6d75f3e0..9a3302e40 100644 --- a/tests/pipeline/snapshots/snap_test_generate.py +++ b/tests/pipeline/snapshots/snap_test_generate.py @@ -292,6 +292,44 @@ 'version': '2.4.2' }, { + '_resetter': { + 'app': { + 'config': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'connector': 'resources-pipeline-with-inflate-should-inflate-inflated-sink-connector' + }, + 'connectorType': 'sink' + }, + 'name': 'should-inflate-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-pipeline-with-inflate-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'suffix': '-clean', + 'to': { + 'models': { + }, + 'topics': { + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + }, + 'should-inflate-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + } + } + }, + 'type': 'kafka-connector-resetter', + 'version': '1.0.4' + }, 'app': { 'batch.size': '2000', 'behavior.on.malformed.documents': 'warn', @@ -308,15 +346,8 @@ 'transforms.changeTopic.replacement': 'resources-pipeline-with-inflate-should-inflate-index-v1' }, 'name': 'should-inflate-inflated-sink-connector', - 'namespace': 'example-namespace', 'prefix': 'resources-pipeline-with-inflate-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, + 'resetter_namespace': 'example-namespace', 'resetter_values': { }, 'to': { @@ -335,8 +366,7 @@ } } }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + 'type': 'kafka-sink-connector' }, { 'app': { @@ -446,6 +476,28 @@ 'version': '2.4.2' }, { + '_resetter': { + 'app': { + 'config': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'connector': 'resources-kafka-connect-sink-es-sink-connector' + }, + 'connectorType': 'sink' + }, + 'name': 'es-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'suffix': '-clean', + 'type': 'kafka-connector-resetter', + 'version': '1.0.4' + }, 'app': { 'batch.size': '2000', 'behavior.on.malformed.documents': 'warn', @@ -461,19 +513,10 @@ 'topics': 'example-output' }, 'name': 'es-sink-connector', - 'namespace': 'example-namespace', 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, 'resetter_values': { }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + 'type': 'kafka-sink-connector' } ] @@ -1245,6 +1288,44 @@ 'version': '2.4.2' }, { + '_resetter': { + 'app': { + 'config': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'connector': 'resources-read-from-component-inflate-step-inflated-sink-connector' + }, + 'connectorType': 'sink' + }, + 'name': 'inflate-step-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'suffix': '-clean', + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'kafka-connector-resetter', + 'version': '1.0.4' + }, 'app': { 'batch.size': '2000', 'behavior.on.malformed.documents': 'warn', @@ -1261,15 +1342,8 @@ 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-index-v1' }, 'name': 'inflate-step-inflated-sink-connector', - 'namespace': 'example-namespace', 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, + 'resetter_namespace': 'example-namespace', 'resetter_values': { }, 'to': { @@ -1288,8 +1362,7 @@ } } }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + 'type': 'kafka-sink-connector' }, { 'app': { @@ -1403,6 +1476,44 @@ 'version': '2.4.2' }, { + '_resetter': { + 'app': { + 'config': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'connector': 'resources-read-from-component-inflate-step-without-prefix-inflated-sink-connector' + }, + 'connectorType': 'sink' + }, + 'name': 'inflate-step-without-prefix-inflated-sink-connector', + 'namespace': 'example-namespace', + 'prefix': 'resources-read-from-component-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'suffix': '-clean', + 'to': { + 'models': { + }, + 'topics': { + 'inflate-step-without-prefix-inflated-sink-connector': { + 'configs': { + }, + 'role': 'test' + }, + 'kafka-sink-connector': { + 'configs': { + }, + 'type': 'output' + } + } + }, + 'type': 'kafka-connector-resetter', + 'version': '1.0.4' + }, 'app': { 'batch.size': '2000', 'behavior.on.malformed.documents': 'warn', @@ -1419,15 +1530,8 @@ 'transforms.changeTopic.replacement': 'resources-read-from-component-inflate-step-without-prefix-index-v1' }, 'name': 'inflate-step-without-prefix-inflated-sink-connector', - 'namespace': 'example-namespace', 'prefix': 'resources-read-from-component-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, + 'resetter_namespace': 'example-namespace', 'resetter_values': { }, 'to': { @@ -1446,8 +1550,7 @@ } } }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + 'type': 'kafka-sink-connector' }, { 'app': { @@ -2212,6 +2315,28 @@ 'version': '2.9.0' }, { + '_resetter': { + 'app': { + 'config': { + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', + 'connector': 'resources-kafka-connect-sink-es-sink-connector' + }, + 'connectorType': 'sink' + }, + 'name': 'es-sink-connector', + 'namespace': 'development-namespace', + 'prefix': 'resources-kafka-connect-sink-', + 'repo_config': { + 'repo_auth_flags': { + 'insecure_skip_tls_verify': False + }, + 'repository_name': 'bakdata-kafka-connect-resetter', + 'url': 'https://bakdata.github.io/kafka-connect-resetter/' + }, + 'suffix': '-clean', + 'type': 'kafka-connector-resetter', + 'version': '1.0.4' + }, 'app': { 'batch.size': '2000', 'behavior.on.malformed.documents': 'warn', @@ -2227,18 +2352,9 @@ 'topics': 'example-output' }, 'name': 'es-sink-connector', - 'namespace': 'example-namespace', 'prefix': 'resources-kafka-connect-sink-', - 'repo_config': { - 'repo_auth_flags': { - 'insecure_skip_tls_verify': False - }, - 'repository_name': 'bakdata-kafka-connect-resetter', - 'url': 'https://bakdata.github.io/kafka-connect-resetter/' - }, 'resetter_values': { }, - 'type': 'kafka-sink-connector', - 'version': '1.0.4' + 'type': 'kafka-sink-connector' } ] diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 20f781545..e8c3e2c83 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -46,7 +46,7 @@ def inflate(self) -> list[PipelineComponent]: name=f"{self.name}-inflated-sink-connector", config=self.config, handlers=self.handlers, - namespace="example-namespace", + resetter_namespace="example-namespace", # FIXME app={ # type: ignore[reportGeneralTypeIssues] "topics": topic_name, diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index 686aac26c..c87c668a0 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -31,7 +31,6 @@ def inflate(self) -> list[PipelineComponent]: name="sink-connector", config=self.config, handlers=self.handlers, - namespace="example-namespace", app=KafkaConnectorConfig( **{ "topics": topic_name, From 61bcecae5290b9a1954bbdffcf6acc8543330f2c Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 16 Jan 2024 15:11:06 +0200 Subject: [PATCH 31/34] Update tests resources (#417) closes #411 --- tests/pipeline/resources/dotenv/config.yaml | 3 --- .../resources/env-specific-config-only/config_production.yaml | 3 --- tests/pipeline/resources/multi-config/config.yaml | 3 --- 3 files changed, 9 deletions(-) diff --git a/tests/pipeline/resources/dotenv/config.yaml b/tests/pipeline/resources/dotenv/config.yaml index 66fb3e410..3abfdffd4 100644 --- a/tests/pipeline/resources/dotenv/config.yaml +++ b/tests/pipeline/resources/dotenv/config.yaml @@ -1,7 +1,4 @@ defaults_path: ../defaults.yaml -topic_name_config: - default_error_topic_name: "${component_name}-dead-letter-topic" - default_output_topic_name: "${component_name}-test-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" kafka_connect: url: "http://localhost:8083" diff --git a/tests/pipeline/resources/env-specific-config-only/config_production.yaml b/tests/pipeline/resources/env-specific-config-only/config_production.yaml index 74910f62d..2e40128d4 100644 --- a/tests/pipeline/resources/env-specific-config-only/config_production.yaml +++ b/tests/pipeline/resources/env-specific-config-only/config_production.yaml @@ -1,7 +1,4 @@ defaults_path: ../no-topics-defaults -topic_name_config: - default_error_topic_name: "${component_name}-dead-letter-topic" - default_output_topic_name: "${component_name}-test-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" kafka_connect: url: "http://localhost:8083" diff --git a/tests/pipeline/resources/multi-config/config.yaml b/tests/pipeline/resources/multi-config/config.yaml index 74910f62d..2e40128d4 100644 --- a/tests/pipeline/resources/multi-config/config.yaml +++ b/tests/pipeline/resources/multi-config/config.yaml @@ -1,7 +1,4 @@ defaults_path: ../no-topics-defaults -topic_name_config: - default_error_topic_name: "${component_name}-dead-letter-topic" - default_output_topic_name: "${component_name}-test-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" kafka_connect: url: "http://localhost:8083" From 10adb816555cca74ea1e803e42f84079f00fd0ba Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 16 Jan 2024 16:43:36 +0100 Subject: [PATCH 32/34] Fix enrichment of nested Pydantic BaseModel (#415) --- kpops/component_handlers/kubernetes/model.py | 13 +++----- .../base_defaults_component.py | 32 ++++++++++++++----- kpops/pipeline.py | 3 +- kpops/utils/dict_ops.py | 28 ++++++++-------- kpops/utils/types.py | 9 ++++++ tests/components/resources/defaults.yaml | 2 ++ .../test_base_defaults_component.py | 17 ++++++++++ .../pipeline/snapshots/snap_test_generate.py | 3 -- tests/pipeline/test_components/components.py | 4 +-- tests/utils/test_dict_ops.py | 8 ++++- 10 files changed, 82 insertions(+), 37 deletions(-) create mode 100644 kpops/utils/types.py diff --git a/kpops/component_handlers/kubernetes/model.py b/kpops/component_handlers/kubernetes/model.py index a88063e0a..bc9ff2730 100644 --- a/kpops/component_handlers/kubernetes/model.py +++ b/kpops/component_handlers/kubernetes/model.py @@ -1,30 +1,27 @@ import json from collections import UserDict from collections.abc import Iterator -from typing import TypeAlias import yaml +from kpops.utils.types import JsonType + try: from typing import Self except ImportError: from typing_extensions import Self -# JSON values -Json: TypeAlias = dict[str, "Json"] | list["Json"] | str | int | float | bool | None - - -class KubernetesManifest(UserDict[str, Json]): +class KubernetesManifest(UserDict[str, JsonType]): """Representation of a Kubernetes API object as YAML/JSON mapping.""" @classmethod def from_yaml(cls, /, content: str) -> Iterator[Self]: - manifests: Iterator[dict[str, Json]] = yaml.load_all(content, yaml.Loader) + manifests: Iterator[dict[str, JsonType]] = yaml.load_all(content, yaml.Loader) for manifest in manifests: yield cls(manifest) @classmethod def from_json(cls, /, content: str) -> Self: - manifest: dict[str, Json] = json.loads(content) + manifest: dict[str, JsonType] = json.loads(content) return cls(manifest) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index aab0a3484..37c5851c9 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -3,18 +3,25 @@ from abc import ABC from collections import deque from collections.abc import Sequence +from dataclasses import asdict, is_dataclass from functools import cached_property from pathlib import Path -from typing import TypeVar +from typing import Any, TypeVar +import pydantic import typer -from pydantic import AliasChoices, ConfigDict, Field, computed_field +from pydantic import ( + AliasChoices, + ConfigDict, + Field, + computed_field, +) from pydantic.json_schema import SkipJsonSchema from kpops.component_handlers import ComponentHandlers from kpops.config import KpopsConfig from kpops.utils import cached_classproperty -from kpops.utils.dict_ops import update_nested +from kpops.utils.dict_ops import update_nested, update_nested_pair from kpops.utils.docstring import describe_attr from kpops.utils.environment import ENV from kpops.utils.pydantic import DescConfigModel, to_dash @@ -84,26 +91,35 @@ def type(cls: type[Self]) -> str: # pyright: ignore[reportGeneralTypeIssues] """ return to_dash(cls.__name__) - def extend_with_defaults(self, **kwargs) -> dict: + @classmethod + def extend_with_defaults(cls, **kwargs: Any) -> dict[str, Any]: """Merge parent components' defaults with own. :param kwargs: The init kwargs for pydantic :returns: Enriched kwargs with inheritted defaults """ - config: KpopsConfig = kwargs["config"] + config = kwargs["config"] + assert isinstance(config, KpopsConfig) + + for k, v in kwargs.items(): + if isinstance(v, pydantic.BaseModel): + kwargs[k] = v.model_dump(exclude_unset=True) + elif is_dataclass(v): + kwargs[k] = asdict(v) + log.debug( typer.style( "Enriching component of type ", fg=typer.colors.GREEN, bold=False ) - + typer.style(self.type, fg=typer.colors.GREEN, bold=True, underline=True) + + typer.style(cls.type, fg=typer.colors.GREEN, bold=True, underline=True) ) main_default_file_path, environment_default_file_path = get_defaults_file_paths( config, ENV.get("environment") ) defaults = load_defaults( - self.__class__, main_default_file_path, environment_default_file_path + cls, main_default_file_path, environment_default_file_path ) - return update_nested(kwargs, defaults) + return update_nested_pair(kwargs, defaults) def _validate_custom(self, **kwargs) -> None: """Run custom validation on component. diff --git a/kpops/pipeline.py b/kpops/pipeline.py index 26629686a..aff9ca475 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -12,6 +12,7 @@ from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV +from kpops.utils.types import JsonType from kpops.utils.yaml import load_yaml_file, substitute_nested if TYPE_CHECKING: @@ -254,7 +255,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: # Leftover variables that were previously introduced in the component by the substitution # functions, still hardcoded, because of their names. # TODO(Ivan Yordanov): Get rid of them - substitution_hardcoded = { + substitution_hardcoded: dict[str, JsonType] = { "error_topic_name": config.topic_name_config.default_error_topic_name, "output_topic_name": config.topic_name_config.default_output_topic_name, } diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index fa8aecd7b..fa5f73997 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -6,8 +6,12 @@ from typing_extensions import override +_V = TypeVar("_V", bound=object) -def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: + +def update_nested_pair( + original_dict: dict[str, _V], other_dict: Mapping[str, _V] +) -> dict[str, _V]: """Nested update for 2 dictionaries. Adds all new fields in ``other_dict`` to ``original_dict``. @@ -30,7 +34,7 @@ def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: return original_dict -def update_nested(*argv: dict) -> dict: +def update_nested(*argv: dict[str, _V]) -> dict[str, _V]: """Merge multiple configuration dicts. The dicts have multiple layers. These layers will be merged recursively. @@ -46,13 +50,15 @@ def update_nested(*argv: dict) -> dict: if len(argv) == 1: return argv[0] if len(argv) == 2: - return update_nested_pair(argv[0], argv[1]) + return update_nested_pair(*argv) return update_nested(update_nested_pair(argv[0], argv[1]), *argv[2:]) def flatten_mapping( - nested_mapping: Mapping[str, Any], prefix: str | None = None, separator: str = "_" -) -> dict[str, Any]: + nested_mapping: Mapping[str, _V], + prefix: str | None = None, + separator: str = "_", +) -> dict[str, _V]: """Flattens a Mapping. :param nested_mapping: Nested mapping that is to be flattened @@ -78,13 +84,10 @@ def flatten_mapping( return top -_V = TypeVar("_V") - - def generate_substitution( input: dict[str, _V], prefix: str | None = None, - existing_substitution: dict | None = None, + existing_substitution: dict[str, _V] | None = None, separator: str | None = None, ) -> dict[str, _V]: """Generate a complete substitution dict from a given dict. @@ -97,11 +100,10 @@ def generate_substitution( :param substitution: existing substitution to include :returns: Substitution dict of all variables related to the model. """ + existing_substitution = existing_substitution or {} if separator is None: - return update_nested( - existing_substitution or {}, flatten_mapping(input, prefix) - ) - return update_nested( + return update_nested_pair(existing_substitution, flatten_mapping(input, prefix)) + return update_nested_pair( existing_substitution or {}, flatten_mapping(input, prefix, separator) ) diff --git a/kpops/utils/types.py b/kpops/utils/types.py new file mode 100644 index 000000000..d41225e35 --- /dev/null +++ b/kpops/utils/types.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from collections.abc import Mapping, Sequence +from typing import TypeAlias + +# JSON values +JsonType: TypeAlias = ( + Mapping[str, "JsonType"] | Sequence["JsonType"] | str | int | float | bool | None +) diff --git a/tests/components/resources/defaults.yaml b/tests/components/resources/defaults.yaml index 40ed52a21..a854a6d95 100644 --- a/tests/components/resources/defaults.yaml +++ b/tests/components/resources/defaults.yaml @@ -5,5 +5,7 @@ child: name: fake-child-name nice: fake-value: must-be-overwritten + nested: + foo: foo env-var-test: name: $pipeline_name diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index e449da9a5..360d40b6c 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -1,6 +1,9 @@ +from __future__ import annotations + from pathlib import Path from unittest.mock import MagicMock +import pydantic import pytest from kpops.component_handlers import ComponentHandlers @@ -21,10 +24,15 @@ class Parent(BaseDefaultsComponent): hard_coded: str = "hard_coded_value" +class Nested(pydantic.BaseModel): + model_config = pydantic.ConfigDict(extra="allow") + + class Child(Parent): __test__ = False nice: dict | None = None another_hard_coded: str = "another_hard_coded_value" + nested: Nested | None = None class GrandChild(Child): @@ -69,6 +77,7 @@ class TestBaseDefaultsComponent: "name": "fake-child-name", "nice": {"fake-value": "must-be-overwritten"}, "value": 1.0, + "nested": {"foo": "foo"}, }, ), ], @@ -97,6 +106,7 @@ def test_load_defaults( "name": "fake-child-name", "nice": {"fake-value": "fake"}, "value": 2.0, + "nested": {"foo": "foo"}, }, ), ], @@ -187,3 +197,10 @@ def test_env_var_substitution( assert component.name == str( DEFAULTS_PATH ), "Environment variables should be substituted" + + def test_merge_defaults(self, config: KpopsConfig, handlers: ComponentHandlers): + component = GrandChild( + config=config, handlers=handlers, nested=Nested(**{"bar": False}) + ) + assert isinstance(component.nested, Nested) + assert component.nested == Nested(**{"foo": "foo", "bar": False}) diff --git a/tests/pipeline/snapshots/snap_test_generate.py b/tests/pipeline/snapshots/snap_test_generate.py index 9a3302e40..436d7e9a2 100644 --- a/tests/pipeline/snapshots/snap_test_generate.py +++ b/tests/pipeline/snapshots/snap_test_generate.py @@ -347,7 +347,6 @@ }, 'name': 'should-inflate-inflated-sink-connector', 'prefix': 'resources-pipeline-with-inflate-', - 'resetter_namespace': 'example-namespace', 'resetter_values': { }, 'to': { @@ -1343,7 +1342,6 @@ }, 'name': 'inflate-step-inflated-sink-connector', 'prefix': 'resources-read-from-component-', - 'resetter_namespace': 'example-namespace', 'resetter_values': { }, 'to': { @@ -1531,7 +1529,6 @@ }, 'name': 'inflate-step-without-prefix-inflated-sink-connector', 'prefix': 'resources-read-from-component-', - 'resetter_namespace': 'example-namespace', 'resetter_values': { }, 'to': { diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index e8c3e2c83..7964b2102 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -46,9 +46,7 @@ def inflate(self) -> list[PipelineComponent]: name=f"{self.name}-inflated-sink-connector", config=self.config, handlers=self.handlers, - resetter_namespace="example-namespace", - # FIXME - app={ # type: ignore[reportGeneralTypeIssues] + app={ # type: ignore[reportGeneralTypeIssues], required `connector.class` comes from defaults during enrichment "topics": topic_name, "transforms.changeTopic.replacement": f"{topic_name}-index-v1", }, diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py index 224934d87..197c1013a 100644 --- a/tests/utils/test_dict_ops.py +++ b/tests/utils/test_dict_ops.py @@ -4,6 +4,7 @@ from pydantic import BaseModel from kpops.utils.dict_ops import generate_substitution, update_nested_pair +from kpops.utils.types import JsonType class TestDictOps: @@ -47,7 +48,12 @@ class TestDictOps: ), ], ) - def test_update_nested_pair(self, d1: dict, d2: dict, expected: dict): + def test_update_nested_pair( + self, + d1: dict[str, JsonType], + d2: dict[str, JsonType], + expected: dict[str, JsonType], + ): assert update_nested_pair(d1, d2) == expected def test_substitution_generation(self): From 15c4667d0ea3b9a495f3b8c31bc7eef899a78444 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 17 Jan 2024 10:21:57 +0200 Subject: [PATCH 33/34] Summarize all breaking changes in diffs at the top of the migration guide (#419) --- docs/docs/user/migration-guide/v2-v3.md | 105 +++++++++++++++++++++++- 1 file changed, 104 insertions(+), 1 deletion(-) diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index c7b5bfc99..2d8d2cf42 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -1,5 +1,7 @@ # Migrate from V2 to V3 +[**Jump to the summary**](#summary) + ## [Use hash and trim long Helm release names instead of only trimming](https://github.com/bakdata/kpops/pull/390) KPOps handles long (more than 53 characters) Helm releases names differently. Helm will not find your (long) old release names anymore. Therefore, it is recommended that you should once destroy your pipeline with KPOps v2 to remove old Helm release names. After a clean destroy, re-deploy your pipeline with the KPOps v3. @@ -223,7 +225,7 @@ If you're using this functionality in your custom components, it needs to be upd The global configuration variables are now namespaced under the config key, such as `${config.kafka_brokers}`, `${config.schema_registry.url}`. Same with pipeline variables, e.g. `${pipeline_name} → ${pipeline.name}`. This would make it more uniform with the existing `${component.}` variables. -### pipeline.yaml +#### pipeline.yaml ```diff name: kafka-app @@ -236,3 +238,104 @@ This would make it more uniform with the existing `${component.}` variables + brokers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} ``` + +## Summary + + + +!!! warning + + [**Helm will not find your (long) old release names anymore.**](#use-hash-and-trim-long-helm-release-names-instead-of-only-trimming) + +??? example "defaults.yaml" + + ```diff + kafka-app: + app: + streams: ... + + + streams-bootstrap: + repo_config: ... + version: ... + ``` + +??? example "pipeline.yaml" + + ```diff + - - type: kubernetes-app + + - type: helm-app + ... + - type: kafka-app + app: + - brokers: ${brokers} + + brokers: ${config.kafka_brokers} + labels: + - app_schedule: "${component_app_schedule}" + + app_schedule: "${component.app.schedule}" + ... + - type: kafka-connector + - namespace: my-namespace + + resetter_namespace: my-namespace + ... + ``` + +??? example "config.yaml" + + ```diff + - environment: development + + + components_module: components + + + pipeline_base_dir: pipelines + + - brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" + + kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" + + - kafka_rest_host: "http://my-custom-rest.url:8082" + + kafka_rest: + + url: "http://my-custom-rest.url:8082" + + - kafka_connect_host: "http://my-custom-connect.url:8083" + + kafka_connect: + + url: "http://my-custom-connect.url:8083" + + - schema_registry_url: "http://my-custom-sr.url:8081" + + schema_registry: + + enabled: true + + url: "http://my-custom-sr.url:8081" + + topic_name_config: + - default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" + + default_error_topic_name: "${pipeline.name}-${component.name}-dead-letter-topic" + ... + ``` + +??? example "custom_module.py" + + ```diff + - from kpops.components import KubernetesApp + + from kpops.components import HelmApp + + from kpops.components.base_components.models.resource import Resource + + - class CustomHelmApp(KubernetesApp): + + class CustomHelmApp(HelmApp): + + @override + - def template(self) -> None: + + def manifest(self) -> Resource: + """Render final component resources, e.g. Kubernetes manifests.""" + return [] # list of manifests + ... + ``` + +??? example "github_ci_workflow.yaml" + + ```diff + steps: + - name: ... + - uses: bakdata/kpops/actions/kpops-runner@main + + uses: bakdata/kpops@main + ... + ``` + + From a90984b19671d3266ca61e023a21f0dc517b75e1 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 17 Jan 2024 10:04:01 +0100 Subject: [PATCH 34/34] Fix wrong Helm release name character limit (#418) --- docs/docs/user/migration-guide/v2-v3.md | 4 ++-- kpops/component_handlers/helm_wrapper/utils.py | 11 ++++++----- tests/component_handlers/helm_wrapper/test_utils.py | 6 +++--- tests/components/test_kafka_connector.py | 2 +- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 2d8d2cf42..1556c0339 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -6,12 +6,12 @@ KPOps handles long (more than 53 characters) Helm releases names differently. Helm will not find your (long) old release names anymore. Therefore, it is recommended that you should once destroy your pipeline with KPOps v2 to remove old Helm release names. After a clean destroy, re-deploy your pipeline with the KPOps v3. -For example if you have a component with the Helm release name `example-component-name-too-long-fake-fakefakefakefakefake`. The new release name will shorten the original name to 52 characters and then replace the last 6 characters of the trimmed name with the first 5 characters of the result of SHA-1(helm_release_name). +For example if you have a component with the Helm release name `example-component-name-too-long-fake-fakefakefakefakefake`. The new release name will shorten the original name to 53 characters and then replace the last 6 characters of the trimmed name with the first 5 characters of the result of SHA-1(helm_release_name). ```console -example-component-name-too-long-fake-fakefakef-0a7fc ----> 52 chars +example-component-name-too-long-fake-fakefakef-0a7fc ----> 53 chars ---------------------------------------------- ----- ^Shortened helm_release_name ^first 5 characters of SHA1(helm_release_name) ``` diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index 5f5577842..4b892996f 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -4,16 +4,17 @@ log = logging.getLogger("HelmUtils") ENCODING = "utf-8" -RELEASE_NAME_MAX_LEN = 52 +RELEASE_NAME_MAX_LEN = 53 def create_helm_release_name(name: str, suffix: str = "") -> str: """Shortens the long Helm release name. - Creates a 52 character long release name if the name length exceeds the Helm release character length. - It first trims the string and fetches the first RELEASE_NAME_MAX_LEN - len(suffix) characters. - Then it replaces the last 6 characters with the SHA-1 encoded string (with "-") to avoid collision - and append the suffix if given. + Helm has a limit of 53 characters for release names. + If the name exceeds the character limit: + 1. trim the string and fetch the first RELEASE_NAME_MAX_LEN - len(suffix) characters. + 2. replace the last 6 characters with the SHA-1 encoded string (with "-") to avoid collision + 3. append the suffix if given :param name: The Helm release name to be shortened. :param suffix: The release suffix to preserve diff --git a/tests/component_handlers/helm_wrapper/test_utils.py b/tests/component_handlers/helm_wrapper/test_utils.py index 4a7111d88..a0acc4707 100644 --- a/tests/component_handlers/helm_wrapper/test_utils.py +++ b/tests/component_handlers/helm_wrapper/test_utils.py @@ -8,9 +8,9 @@ def test_helm_release_name_for_long_names(): actual_release_name = create_helm_release_name(long_release_name) - expected_helm_release_name = "example-component-name-too-long-fake-fakefakef-0a7fc" + expected_helm_release_name = "example-component-name-too-long-fake-fakefakefa-0a7fc" assert expected_helm_release_name == actual_release_name - assert len(expected_helm_release_name) == 52 + assert len(expected_helm_release_name) == 53 def test_helm_release_name_for_install_and_clean_must_be_different(): @@ -30,4 +30,4 @@ def test_helm_release_name_for_short_names(): actual_helm_release_name = create_helm_release_name(short_release_name) assert actual_helm_release_name == short_release_name - assert len(actual_helm_release_name) < 53 + assert len(actual_helm_release_name) <= 53 diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 3e01259a2..16d178f02 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -17,7 +17,7 @@ CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop" CONNECTOR_FULL_NAME = "${pipeline.name}-" + CONNECTOR_NAME CONNECTOR_CLEAN_FULL_NAME = CONNECTOR_FULL_NAME + "-clean" -CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline.name}-test-connector-with-lon-612f3-clean" +CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline.name}-test-connector-with-long-612f3-clean" CONNECTOR_CLASS = "com.bakdata.connect.TestConnector" RESETTER_NAMESPACE = "test-namespace"