From 62bfb0339236a62cf5518fced8823954cddf7479 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 12:29:05 +0100 Subject: [PATCH 01/36] Refactor workaround for setting Helm app `nameOverride` --- docs/docs/schema/pipeline.json | 33 ++++++++++++------- kpops/components/base_components/helm_app.py | 16 ++++++++- kpops/components/base_components/kafka_app.py | 9 ++--- kpops/pipeline.py | 10 ------ tests/components/test_helm_app.py | 32 ++++++++++-------- 5 files changed, 57 insertions(+), 43 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 0882ccfa5..dcc11287c 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -66,10 +66,10 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KubernetesAppConfig" + "$ref": "#/$defs/HelmAppConfig" } ], - "description": "Application-specific settings" + "description": "Helm app values" }, "from": { "anyOf": [ @@ -146,6 +146,26 @@ "title": "HelmApp", "type": "object" }, + "HelmAppConfig": { + "additionalProperties": true, + "description": "", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "title": "Nameoverride" + } + }, + "title": "HelmAppConfig", + "type": "object" + }, "HelmRepoConfig": { "description": "Helm repository configuration.", "properties": { @@ -420,13 +440,6 @@ "title": "KafkaSourceConnector", "type": "object" }, - "KubernetesAppConfig": { - "additionalProperties": true, - "description": "Settings specific to Kubernetes apps.", - "properties": {}, - "title": "KubernetesAppConfig", - "type": "object" - }, "OutputTopicTypes": { "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)", "enum": [ @@ -588,7 +601,6 @@ } ], "default": null, - "description": "Override name with this value", "title": "Nameoverride" }, "streams": { @@ -873,7 +885,6 @@ } ], "default": null, - "description": "Override name with this value", "title": "Nameoverride" }, "streams": { diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 5d70bacfd..1c61b0473 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -16,7 +16,10 @@ HelmTemplateFlags, HelmUpgradeInstallFlags, ) -from kpops.components.base_components.kubernetes_app import KubernetesApp +from kpops.components.base_components.kubernetes_app import ( + KubernetesApp, + KubernetesAppConfig, +) from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import exclude_by_name @@ -24,6 +27,10 @@ log = logging.getLogger("HelmApp") +class HelmAppConfig(KubernetesAppConfig): # TODO: rename HelmAppValues + name_override: str | None = None + + class HelmApp(KubernetesApp): """Kubernetes app managed through Helm with an associated Helm chart. @@ -31,6 +38,7 @@ class HelmApp(KubernetesApp): deploying the component, defaults to None this means that the command "helm repo add" is not called and Helm expects a path to local Helm chart. :param version: Helm chart version, defaults to None + :param app: Helm app values """ repo_config: HelmRepoConfig | None = Field( @@ -41,6 +49,10 @@ class HelmApp(KubernetesApp): default=None, description=describe_attr("version", __doc__), ) + app: HelmAppConfig = Field( + default=..., + description=describe_attr("app", __doc__), + ) @cached_property def helm(self) -> Helm: @@ -142,6 +154,8 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ + if self.app.name_override is None: + self.app.name_override = self.full_name return self.app.model_dump( by_alias=True, exclude_none=True, exclude_defaults=True ) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index b62e54bab..f4e766233 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -11,8 +11,7 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import trim_release_name -from kpops.components.base_components.helm_app import HelmApp -from kpops.components.base_components.kubernetes_app import KubernetesAppConfig +from kpops.components.base_components.helm_app import HelmApp, HelmAppConfig from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -36,19 +35,15 @@ class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): ) -class KafkaAppConfig(KubernetesAppConfig): +class KafkaAppConfig(HelmAppConfig): """Settings specific to Kafka Apps. :param streams: Kafka streams config - :param name_override: Override name with this value, defaults to None """ streams: KafkaStreamsConfig = Field( default=..., description=describe_attr("streams", __doc__) ) - name_override: str | None = Field( - default=None, description=describe_attr("name_override", __doc__) - ) class KafkaApp(HelmApp, ABC): diff --git a/kpops/pipeline.py b/kpops/pipeline.py index ad69521e1..a409f8e35 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -3,7 +3,6 @@ import json import logging from collections import Counter -from contextlib import suppress from dataclasses import dataclass, field from typing import TYPE_CHECKING @@ -53,7 +52,6 @@ def find(self, component_name: str) -> PipelineComponent: raise ValueError(msg) def add(self, component: PipelineComponent) -> None: - self._populate_component_name(component) self.root.append(component) def __bool__(self) -> bool: @@ -78,14 +76,6 @@ def validate_unique_names(self) -> None: msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}" raise ValidationError(msg) - @staticmethod - def _populate_component_name(component: PipelineComponent) -> None: # TODO: remove - with suppress( - AttributeError # Some components like Kafka Connect do not have a name_override attribute - ): - if (app := getattr(component, "app")) and app.name_override is None: - app.name_override = component.full_name - def create_env_components_index( environment_components: list[dict], diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index 0b933b1e9..021617c94 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -12,18 +12,13 @@ HelmUpgradeInstallFlags, RepoAuthFlags, ) -from kpops.components.base_components.helm_app import HelmApp -from kpops.components.base_components.kubernetes_app import KubernetesAppConfig +from kpops.components.base_components.helm_app import HelmApp, HelmAppConfig from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify DEFAULTS_PATH = Path(__file__).parent / "resources" -class HelmTestValue(KubernetesAppConfig): - name_override: str - - class TestHelmApp: @pytest.fixture() def config(self) -> KpopsConfig: @@ -51,8 +46,8 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.helm_app.log.info") @pytest.fixture() - def app_value(self) -> HelmTestValue: - return HelmTestValue(name_override="test-value") + def app_value(self) -> HelmAppConfig: + return HelmAppConfig(**{"foo": "test-value"}) @pytest.fixture() def repo_config(self) -> HelmRepoConfig: @@ -63,7 +58,7 @@ def helm_app( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: HelmTestValue, + app_value: HelmAppConfig, repo_config: HelmRepoConfig, ) -> HelmApp: return HelmApp( @@ -97,7 +92,10 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( "test/test-chart", False, "test-namespace", - {"nameOverride": "test-value"}, + { + "nameOverride": "${pipeline_name}-test-helm-app", + "foo": "test-value", + }, HelmUpgradeInstallFlags(), ) @@ -107,7 +105,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( handlers: ComponentHandlers, helm_mock: MagicMock, mocker: MockerFixture, - app_value: HelmTestValue, + app_value: HelmAppConfig, ): repo_config = HelmRepoConfig( repository_name="test-repo", url="https://test.com/charts/" @@ -142,7 +140,10 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( "test/test-chart", False, "test-namespace", - {"nameOverride": "test-value"}, + { + "nameOverride": "${pipeline_name}-test-helm-app", + "foo": "test-value", + }, HelmUpgradeInstallFlags(version="3.4.5"), ), ] @@ -152,7 +153,7 @@ def test_should_deploy_app_with_local_helm_chart( config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, - app_value: HelmTestValue, + app_value: HelmAppConfig, ): class AppWithLocalChart(HelmApp): repo_config: None = None @@ -179,7 +180,10 @@ def helm_chart(self) -> str: "path/to/helm/charts/", False, "test-namespace", - {"nameOverride": "test-value"}, + { + "nameOverride": "${pipeline_name}-test-app-with-local-chart", + "foo": "test-value", + }, HelmUpgradeInstallFlags(), ) From 55089bebfee3765d818622e984f88140f8925660 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 13:30:53 +0100 Subject: [PATCH 02/36] Update snapshot assertions --- tests/components/test_kafka_app.py | 1 + tests/components/test_producer_app.py | 3 ++ tests/components/test_streams_app.py | 5 ++- tests/pipeline/snapshots/snap_test_example.py | 5 --- .../pipeline/snapshots/snap_test_pipeline.py | 37 ------------------- 5 files changed, 8 insertions(+), 43 deletions(-) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index 06af5d4f5..1ec8d3752 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -97,6 +97,7 @@ def test_should_deploy_kafka_app( True, "test-namespace", { + "nameOverride": "${pipeline_name}-example-name", "streams": {"brokers": "fake-broker:9092", "outputTopic": "test"}, }, HelmUpgradeInstallFlags(version="1.2.3"), diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 89ca25bdd..66ab9861f 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -120,6 +120,7 @@ def test_deploy_order_when_dry_run_is_false( False, "test-namespace", { + "nameOverride": "${pipeline_name}-" + self.PRODUCER_APP_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", @@ -184,6 +185,7 @@ def test_should_not_reset_producer_app( True, "test-namespace", { + "nameOverride": "${pipeline_name}-" + self.PRODUCER_APP_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", @@ -229,6 +231,7 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w False, "test-namespace", { + "nameOverride": "${pipeline_name}-" + self.PRODUCER_APP_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 93f6022f2..d4e6fb6d9 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -323,6 +323,7 @@ def test_deploy_order_when_dry_run_is_false( dry_run, "test-namespace", { + "nameOverride": "${pipeline_name}-" + self.STREAMS_APP_NAME, "streams": { "brokers": "fake-broker:9092", "extraOutputTopics": { @@ -331,7 +332,7 @@ def test_deploy_order_when_dry_run_is_false( }, "outputTopic": "${output_topic_name}", "errorTopic": "${error_topic_name}", - } + }, }, HelmUpgradeInstallFlags( create_namespace=False, @@ -384,6 +385,7 @@ def test_reset_when_dry_run_is_false( dry_run, "test-namespace", { + "nameOverride": "${pipeline_name}-" + self.STREAMS_APP_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", @@ -428,6 +430,7 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( dry_run, "test-namespace", { + "nameOverride": "${pipeline_name}-" + self.STREAMS_APP_NAME, "streams": { "brokers": "fake-broker:9092", "outputTopic": "${output_topic_name}", diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 14d3d650c..a88a7ee4a 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -13,7 +13,6 @@ 'debug': True, 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', 'imageTag': '1.0.0', - 'nameOverride': 'account-producer', 'prometheus': { 'jmx': { 'enabled': False @@ -64,7 +63,6 @@ 'debug': True, 'image': '${DOCKER_REGISTRY}/atm-demo-transactionavroproducer', 'imageTag': '1.0.0', - 'nameOverride': 'transaction-avro-producer', 'prometheus': { 'jmx': { 'enabled': False @@ -120,7 +118,6 @@ 'labels': { 'pipeline': 'bakdata-atm-fraud-detection' }, - 'nameOverride': 'transaction-joiner', 'prometheus': { 'jmx': { 'enabled': False @@ -182,7 +179,6 @@ 'labels': { 'pipeline': 'bakdata-atm-fraud-detection' }, - 'nameOverride': 'fraud-detector', 'prometheus': { 'jmx': { 'enabled': False @@ -244,7 +240,6 @@ 'labels': { 'pipeline': 'bakdata-atm-fraud-detection' }, - 'nameOverride': 'account-linker', 'prometheus': { 'jmx': { 'enabled': False diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index c9fee4d4b..0da4f9260 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -10,7 +10,6 @@ snapshots['TestPipeline.test_default_config test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-custom-config-app1', 'resources': { 'limits': { 'memory': '2G' @@ -58,7 +57,6 @@ 'labels': { 'pipeline': 'resources-custom-config' }, - 'nameOverride': 'resources-custom-config-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'resources-custom-config-app2-error', @@ -110,7 +108,6 @@ }, 'image': 'example-registry/fake-image', 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-inflate-scheduled-producer', 'schedule': '30 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -165,7 +162,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-pipeline-with-inflate-converter', 'resources': { 'limits': { 'memory': '2G' @@ -242,7 +238,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate', 'replicaCount': 4, 'resources': { 'requests': { @@ -345,7 +340,6 @@ }, { 'app': { - 'nameOverride': 'resources-pipeline-with-inflate-should-inflate-inflated-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -397,7 +391,6 @@ { 'app': { 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -492,7 +485,6 @@ }, 'image': 'example-registry/fake-image', 'imageTag': '0.0.1', - 'nameOverride': 'resources-first-pipeline-scheduled-producer', 'schedule': '30 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -547,7 +539,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-first-pipeline-converter', 'resources': { 'limits': { 'memory': '2G' @@ -624,7 +615,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', 'replicaCount': 4, 'resources': { 'requests': { @@ -683,7 +673,6 @@ snapshots['TestPipeline.test_model_serialization test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-pipeline-with-paths-account-producer', 'streams': { 'brokers': 'test', 'extraOutputTopics': { @@ -716,7 +705,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-no-input-topic-pipeline-app1', 'resources': { 'limits': { 'memory': '2G' @@ -779,7 +767,6 @@ }, { 'app': { - 'nameOverride': 'resources-no-input-topic-pipeline-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -839,7 +826,6 @@ { 'app': { 'image': 'fake-image', - 'nameOverride': 'resources-no-user-defined-components-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -904,7 +890,6 @@ }, 'image': 'example-registry/fake-image', 'imageTag': '0.0.1', - 'nameOverride': 'resources-pipeline-with-envs-input-producer', 'schedule': '20 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -959,7 +944,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-pipeline-with-envs-converter', 'resources': { 'limits': { 'memory': '2G' @@ -1036,7 +1020,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-pipeline-with-envs-filter', 'replicaCount': 4, 'resources': { 'requests': { @@ -1098,7 +1081,6 @@ 'debug': True, 'image': '${DOCKER_REGISTRY}/atm-demo-accountproducer', 'imageTag': '1.0.0', - 'nameOverride': 'from-pipeline-component-account-producer', 'prometheus': { 'jmx': { 'enabled': False @@ -1132,7 +1114,6 @@ snapshots['TestPipeline.test_read_from_component test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-read-from-component-producer1', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { @@ -1167,7 +1148,6 @@ }, { 'app': { - 'nameOverride': 'producer2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { @@ -1217,7 +1197,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'resources-read-from-component-inflate-step', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1314,7 +1293,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-inflated-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1377,7 +1355,6 @@ }, 'image': 'fake-registry/filter', 'imageTag': '2.4.1', - 'nameOverride': 'inflate-step-without-prefix', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1474,7 +1451,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-inflate-step-without-prefix-inflated-streams-app', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1522,7 +1498,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer1', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1579,7 +1554,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1634,7 +1608,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer3', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1689,7 +1662,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer4', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1740,7 +1712,6 @@ }, { 'app': { - 'nameOverride': 'resources-read-from-component-consumer5', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { @@ -1804,7 +1775,6 @@ 'app_schedule': '30 3/8 * * *', 'app_type': 'scheduled-producer' }, - 'nameOverride': 'resources-component-type-substitution-scheduled-producer', 'schedule': '30 3/8 * * *', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', @@ -1859,7 +1829,6 @@ 'commandLine': { 'CONVERT_XML': True }, - 'nameOverride': 'resources-component-type-substitution-converter', 'resources': { 'limits': { 'memory': '2G' @@ -1943,7 +1912,6 @@ 'filter': 'filter-app-filter', 'test_placeholder_in_placeholder': 'filter-app-filter' }, - 'nameOverride': 'resources-component-type-substitution-filter-app', 'replicaCount': 4, 'resources': { 'requests': { @@ -2002,7 +1970,6 @@ snapshots['TestPipeline.test_with_custom_config_with_absolute_defaults_path test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-custom-config-app1', 'resources': { 'limits': { 'memory': '2G' @@ -2050,7 +2017,6 @@ 'labels': { 'pipeline': 'resources-custom-config' }, - 'nameOverride': 'resources-custom-config-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'app2-dead-letter-topic', @@ -2097,7 +2063,6 @@ snapshots['TestPipeline.test_with_custom_config_with_relative_defaults_path test-pipeline'] = [ { 'app': { - 'nameOverride': 'resources-custom-config-app1', 'resources': { 'limits': { 'memory': '2G' @@ -2145,7 +2110,6 @@ 'labels': { 'pipeline': 'resources-custom-config' }, - 'nameOverride': 'resources-custom-config-app2', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'app2-dead-letter-topic', @@ -2193,7 +2157,6 @@ { 'app': { 'image': 'fake-image', - 'nameOverride': 'resources-kafka-connect-sink-streams-app-development', 'streams': { 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'config': { From 8d9bb60124da9400cd9e73a7f7aebd16fc32af86 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 13:36:08 +0100 Subject: [PATCH 03/36] Use consistent naming for app values --- docs/docs/schema/pipeline.json | 76 +++++++++---------- kpops/components/base_components/helm_app.py | 6 +- kpops/components/base_components/kafka_app.py | 6 +- .../base_components/kubernetes_app.py | 4 +- .../streams_bootstrap/producer/model.py | 4 +- .../producer/producer_app.py | 4 +- .../streams_bootstrap/streams/model.py | 4 +- .../streams_bootstrap/streams/streams_app.py | 4 +- tests/components/test_helm_app.py | 12 +-- tests/components/test_kubernetes_app.py | 4 +- 10 files changed, 62 insertions(+), 62 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index dcc11287c..ea5ce7f7e 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -66,7 +66,7 @@ "app": { "allOf": [ { - "$ref": "#/$defs/HelmAppConfig" + "$ref": "#/$defs/HelmAppValues" } ], "description": "Helm app values" @@ -146,7 +146,7 @@ "title": "HelmApp", "type": "object" }, - "HelmAppConfig": { + "HelmAppValues": { "additionalProperties": true, "description": "", "properties": { @@ -163,7 +163,7 @@ "title": "Nameoverride" } }, - "title": "HelmAppConfig", + "title": "HelmAppValues", "type": "object" }, "HelmRepoConfig": { @@ -456,7 +456,7 @@ "app": { "allOf": [ { - "$ref": "#/$defs/ProducerValues" + "$ref": "#/$defs/ProducerAppValues" } ], "description": "Application-specific settings" @@ -536,6 +536,37 @@ "title": "ProducerApp", "type": "object" }, + "ProducerAppValues": { + "additionalProperties": true, + "description": "Settings specific to producers.", + "properties": { + "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "title": "Nameoverride" + }, + "streams": { + "allOf": [ + { + "$ref": "#/$defs/ProducerStreamsConfig" + } + ], + "description": "Kafka Streams settings" + } + }, + "required": [ + "streams" + ], + "title": "ProducerAppValues", + "type": "object" + }, "ProducerStreamsConfig": { "additionalProperties": true, "description": "Kafka Streams settings specific to Producer.", @@ -587,37 +618,6 @@ "title": "ProducerStreamsConfig", "type": "object" }, - "ProducerValues": { - "additionalProperties": true, - "description": "Settings specific to producers.", - "properties": { - "nameOverride": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "title": "Nameoverride" - }, - "streams": { - "allOf": [ - { - "$ref": "#/$defs/ProducerStreamsConfig" - } - ], - "description": "Kafka Streams settings" - } - }, - "required": [ - "streams" - ], - "title": "ProducerValues", - "type": "object" - }, "RepoAuthFlags": { "description": "Authorisation-related flags for `helm repo`.", "properties": { @@ -692,7 +692,7 @@ "app": { "allOf": [ { - "$ref": "#/$defs/StreamsAppConfig" + "$ref": "#/$defs/StreamsAppValues" } ], "description": "Application-specific settings" @@ -859,7 +859,7 @@ "title": "StreamsAppAutoScaling", "type": "object" }, - "StreamsAppConfig": { + "StreamsAppValues": { "additionalProperties": true, "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { @@ -899,7 +899,7 @@ "required": [ "streams" ], - "title": "StreamsAppConfig", + "title": "StreamsAppValues", "type": "object" }, "StreamsConfig": { diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 1c61b0473..4583b7acb 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -18,7 +18,7 @@ ) from kpops.components.base_components.kubernetes_app import ( KubernetesApp, - KubernetesAppConfig, + KubernetesAppValues, ) from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr @@ -27,7 +27,7 @@ log = logging.getLogger("HelmApp") -class HelmAppConfig(KubernetesAppConfig): # TODO: rename HelmAppValues +class HelmAppValues(KubernetesAppValues): name_override: str | None = None @@ -49,7 +49,7 @@ class HelmApp(KubernetesApp): default=None, description=describe_attr("version", __doc__), ) - app: HelmAppConfig = Field( + app: HelmAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index f4e766233..df6f54568 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -11,7 +11,7 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import trim_release_name -from kpops.components.base_components.helm_app import HelmApp, HelmAppConfig +from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -35,7 +35,7 @@ class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): ) -class KafkaAppConfig(HelmAppConfig): +class KafkaAppValues(HelmAppValues): """Settings specific to Kafka Apps. :param streams: Kafka streams config @@ -58,7 +58,7 @@ class KafkaApp(HelmApp, ABC): :param version: Helm chart version, defaults to "2.9.0" """ - app: KafkaAppConfig = Field( + app: KafkaAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index cae474cee..2b4065191 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -18,7 +18,7 @@ ) -class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): +class KubernetesAppValues(CamelCaseConfigModel, DescConfigModel): """Settings specific to Kubernetes apps.""" model_config = ConfigDict( @@ -39,7 +39,7 @@ class KubernetesApp(PipelineComponent, ABC): default=..., description=describe_attr("namespace", __doc__), ) - app: KubernetesAppConfig = Field( + app: KubernetesAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 01bda1dbc..53db5af67 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,7 +1,7 @@ from pydantic import ConfigDict, Field from kpops.components.base_components.kafka_app import ( - KafkaAppConfig, + KafkaAppValues, KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr @@ -22,7 +22,7 @@ class ProducerStreamsConfig(KafkaStreamsConfig): ) -class ProducerValues(KafkaAppConfig): +class ProducerAppValues(KafkaAppValues): """Settings specific to producers. :param streams: Kafka Streams settings diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 6091cdd77..e37529bae 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -9,7 +9,7 @@ TopicConfig, ) from kpops.components.streams_bootstrap.app_type import AppType -from kpops.components.streams_bootstrap.producer.model import ProducerValues +from kpops.components.streams_bootstrap.producer.model import ProducerAppValues from kpops.utils.docstring import describe_attr @@ -25,7 +25,7 @@ class ProducerApp(KafkaApp): :param from_: Producer doesn't support FromSection, defaults to None """ - app: ProducerValues = Field( + app: ProducerAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 2c8b952ce..b52bc162c 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -5,7 +5,7 @@ from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( - KafkaAppConfig, + KafkaAppValues, KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr @@ -166,7 +166,7 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): model_config = ConfigDict(extra="allow") -class StreamsAppConfig(KafkaAppConfig): +class StreamsAppValues(KafkaAppValues): """StreamsBoostrap app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index a466b4eba..e8a434b70 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -3,7 +3,7 @@ from kpops.components.base_components.kafka_app import KafkaApp from kpops.components.streams_bootstrap.app_type import AppType -from kpops.components.streams_bootstrap.streams.model import StreamsAppConfig +from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr @@ -13,7 +13,7 @@ class StreamsApp(KafkaApp): :param app: Application-specific settings """ - app: StreamsAppConfig = Field( + app: StreamsAppValues = Field( default=..., description=describe_attr("app", __doc__), ) diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index 021617c94..8afef52d9 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -12,7 +12,7 @@ HelmUpgradeInstallFlags, RepoAuthFlags, ) -from kpops.components.base_components.helm_app import HelmApp, HelmAppConfig +from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify @@ -46,8 +46,8 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.helm_app.log.info") @pytest.fixture() - def app_value(self) -> HelmAppConfig: - return HelmAppConfig(**{"foo": "test-value"}) + def app_value(self) -> HelmAppValues: + return HelmAppValues(**{"foo": "test-value"}) @pytest.fixture() def repo_config(self) -> HelmRepoConfig: @@ -58,7 +58,7 @@ def helm_app( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: HelmAppConfig, + app_value: HelmAppValues, repo_config: HelmRepoConfig, ) -> HelmApp: return HelmApp( @@ -105,7 +105,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( handlers: ComponentHandlers, helm_mock: MagicMock, mocker: MockerFixture, - app_value: HelmAppConfig, + app_value: HelmAppValues, ): repo_config = HelmRepoConfig( repository_name="test-repo", url="https://test.com/charts/" @@ -153,7 +153,7 @@ def test_should_deploy_app_with_local_helm_chart( config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, - app_value: HelmAppConfig, + app_value: HelmAppValues, ): class AppWithLocalChart(HelmApp): repo_config: None = None diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 95ab11f6c..8c887221d 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -7,14 +7,14 @@ from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.kubernetes_app import ( KubernetesApp, - KubernetesAppConfig, + KubernetesAppValues, ) from kpops.config import KpopsConfig DEFAULTS_PATH = Path(__file__).parent / "resources" -class KubernetesTestValue(KubernetesAppConfig): +class KubernetesTestValue(KubernetesAppValues): foo: str From 934e4ededb7b5dace226304158412809bf82da97 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 13:51:46 +0100 Subject: [PATCH 04/36] Rename fixture --- tests/components/test_helm_app.py | 14 +++++++------- tests/components/test_kubernetes_app.py | 14 +++++++------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index 8afef52d9..e43c9de41 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -46,7 +46,7 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.helm_app.log.info") @pytest.fixture() - def app_value(self) -> HelmAppValues: + def app_values(self) -> HelmAppValues: return HelmAppValues(**{"foo": "test-value"}) @pytest.fixture() @@ -58,14 +58,14 @@ def helm_app( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: HelmAppValues, + app_values: HelmAppValues, repo_config: HelmRepoConfig, ) -> HelmApp: return HelmApp( name="test-helm-app", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", repo_config=repo_config, ) @@ -105,7 +105,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( handlers: ComponentHandlers, helm_mock: MagicMock, mocker: MockerFixture, - app_value: HelmAppValues, + app_values: HelmAppValues, ): repo_config = HelmRepoConfig( repository_name="test-repo", url="https://test.com/charts/" @@ -114,7 +114,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( name="test-helm-app", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", repo_config=repo_config, version="3.4.5", @@ -153,7 +153,7 @@ def test_should_deploy_app_with_local_helm_chart( config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, - app_value: HelmAppValues, + app_values: HelmAppValues, ): class AppWithLocalChart(HelmApp): repo_config: None = None @@ -167,7 +167,7 @@ def helm_chart(self) -> str: name="test-app-with-local-chart", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 8c887221d..aeec95479 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -36,7 +36,7 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.components.base_components.kubernetes_app.log.info") @pytest.fixture() - def app_value(self) -> KubernetesTestValue: + def app_values(self) -> KubernetesTestValue: return KubernetesTestValue(foo="foo") @pytest.fixture() @@ -44,13 +44,13 @@ def kubernetes_app( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: KubernetesTestValue, + app_values: KubernetesTestValue, ) -> KubernetesApp: return KubernetesApp( name="test-kubernetes-app", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -58,7 +58,7 @@ def test_should_raise_value_error_when_name_is_not_valid( self, config: KpopsConfig, handlers: ComponentHandlers, - app_value: KubernetesTestValue, + app_values: KubernetesTestValue, ): with pytest.raises( ValueError, match=r"The component name .* is invalid for Kubernetes." @@ -67,7 +67,7 @@ def test_should_raise_value_error_when_name_is_not_valid( name="Not-Compatible*", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -78,7 +78,7 @@ def test_should_raise_value_error_when_name_is_not_valid( name="snake_case*", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) @@ -86,6 +86,6 @@ def test_should_raise_value_error_when_name_is_not_valid( name="valid-name", config=config, handlers=handlers, - app=app_value, + app=app_values, namespace="test-namespace", ) From 1c4e3134b99de5ffb9964a4c9b16d7b70b620e83 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 13:56:36 +0100 Subject: [PATCH 05/36] Refactor --- kpops/component_handlers/kafka_connect/model.py | 11 ++--------- kpops/components/base_components/helm_app.py | 13 ++++++++++--- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index a7ec45af9..2182ff1b6 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -11,6 +11,7 @@ from pydantic.json_schema import SkipJsonSchema from typing_extensions import override +from kpops.components.base_components.helm_app import HelmAppValues from kpops.utils.pydantic import ( CamelCaseConfigModel, DescConfigModel, @@ -99,14 +100,6 @@ class KafkaConnectResetterConfig(CamelCaseConfigModel): offset_topic: str | None = None -class KafkaConnectResetterValues(CamelCaseConfigModel): +class KafkaConnectResetterValues(HelmAppValues): connector_type: Literal["source", "sink"] config: KafkaConnectResetterConfig - name_override: str - - # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` - # BEWARE! All default values are enforced, hard to replicate without - # access to ``model_dump`` - @override - def model_dump(self, **_) -> dict[str, Any]: - return super().model_dump(by_alias=True, exclude_none=True) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 4583b7acb..d0aac74d8 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -30,6 +30,15 @@ class HelmAppValues(KubernetesAppValues): name_override: str | None = None + # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` + # BEWARE! All default values are enforced, hard to replicate without + # access to ``model_dump`` + @override + def model_dump(self, **_) -> dict[str, Any]: + return super().model_dump( + by_alias=True, exclude_none=True, exclude_defaults=True + ) + class HelmApp(KubernetesApp): """Kubernetes app managed through Helm with an associated Helm chart. @@ -156,9 +165,7 @@ def to_helm_values(self) -> dict: """ if self.app.name_override is None: self.app.name_override = self.full_name - return self.app.model_dump( - by_alias=True, exclude_none=True, exclude_defaults=True - ) + return self.app.model_dump() def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component. From 2309a7b1e66c0f71c3d3884b5790c6c90de33e51 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 14:04:12 +0100 Subject: [PATCH 06/36] Rename --- kpops/component_handlers/kafka_connect/model.py | 6 +++--- kpops/components/base_components/kafka_connector.py | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 2182ff1b6..59cdba7b9 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -93,13 +93,13 @@ class KafkaConnectConfigErrorResponse(BaseModel): configs: list[KafkaConnectConfigDescription] -class KafkaConnectResetterConfig(CamelCaseConfigModel): +class KafkaConnectorResetterConfig(CamelCaseConfigModel): brokers: str connector: str delete_consumer_group: bool | None = None offset_topic: str | None = None -class KafkaConnectResetterValues(HelmAppValues): +class KafkaConnectorResetterValues(HelmAppValues): connector_type: Literal["source", "sink"] - config: KafkaConnectResetterConfig + config: KafkaConnectorResetterConfig diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 7af2c5ae4..6f19754d2 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -20,9 +20,9 @@ from kpops.component_handlers.helm_wrapper.utils import trim_release_name from kpops.component_handlers.kafka_connect.model import ( KafkaConnectorConfig, + KafkaConnectorResetterConfig, + KafkaConnectorResetterValues, KafkaConnectorType, - KafkaConnectResetterConfig, - KafkaConnectResetterValues, ) from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.models.from_section import FromTopic @@ -176,7 +176,7 @@ def _run_connect_resetter( :param dry_run: If the cleanup should be run in dry run mode or not :param retain_clean_jobs: If the cleanup job should be kept - :param kwargs: Other values for the KafkaConnectResetter + :param kwargs: Other values for the KafkaConnectorResetter """ log.info( magentaify( @@ -237,8 +237,8 @@ def _get_kafka_connect_resetter_values( :return: The Helm chart values of the connector resetter """ return { - **KafkaConnectResetterValues( - config=KafkaConnectResetterConfig( + **KafkaConnectorResetterValues( + config=KafkaConnectorResetterConfig( connector=self.full_name, brokers=self.config.kafka_brokers, **kwargs, From f67a23f7c001794913aea3e419432c46e72c7ceb Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 16:27:40 +0100 Subject: [PATCH 07/36] Refactor streams-bootstrap cleanup --- kpops/components/base_components/helm_app.py | 6 - kpops/components/base_components/kafka_app.py | 124 ++++++++---------- .../producer/producer_app.py | 39 +++--- .../streams_bootstrap/streams/streams_app.py | 46 +++---- tests/components/test_streams_app.py | 2 +- 5 files changed, 107 insertions(+), 110 deletions(-) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 36e3bd21b..d52976c7a 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -100,12 +100,6 @@ def helm_release_name(self) -> str: """The name for the Helm release. Can be overridden.""" return create_helm_release_name(self.full_name) - @property - def clean_release_name(self) -> str: - """The name for the Helm release for cleanup jobs. Can be overridden.""" - suffix = "-clean" - return create_helm_release_name(self.full_name + suffix, suffix) - @property def helm_chart(self) -> str: """Return component's Helm chart.""" diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c7c983e0d..e4e376fe2 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -7,9 +7,11 @@ from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( + HelmFlags, HelmRepoConfig, HelmUpgradeInstallFlags, ) +from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -45,6 +47,54 @@ class KafkaAppValues(HelmAppValues): ) +class StreamsBootstrapHelmApp(HelmApp): + repo_config: HelmRepoConfig = Field( + default=HelmRepoConfig( + repository_name="bakdata-streams-bootstrap", + url="https://bakdata.github.io/streams-bootstrap/", + ), + description=describe_attr("repo_config", __doc__), + ) + + +class KafkaAppCleaner(StreamsBootstrapHelmApp): + @property + @override + def helm_chart(self) -> str: + raise NotImplementedError + + @property + @override + def helm_release_name(self) -> str: + suffix = "-clean" + return create_helm_release_name(self.full_name + suffix, suffix) + + @property + @override + def helm_flags(self) -> HelmFlags: + return HelmUpgradeInstallFlags( + create_namespace=self.config.create_namespace, + version=self.version, + wait=True, + wait_for_jobs=True, + ) + + def run(self, dry_run: bool) -> None: + """Clean an app using the respective cleanup job. + + :param dry_run: Dry run command + """ + log.info(f"Uninstall old cleanup job for {self.helm_release_name}") + self.destroy(dry_run=dry_run) + + log.info(f"Init cleanup job for {self.helm_release_name}") + self.deploy(dry_run=dry_run) + + if not self.config.retain_clean_jobs: + log.info(f"Uninstall cleanup job for {self.helm_release_name}") + self.destroy(dry_run=dry_run) + + class KafkaApp(HelmApp, ABC): """Base component for Kafka-based components. @@ -74,9 +124,14 @@ class KafkaApp(HelmApp, ABC): ) @property - def clean_up_helm_chart(self) -> str: - """Helm chart used to destroy and clean this component.""" - raise NotImplementedError + def _cleaner(self) -> KafkaAppCleaner: + return KafkaAppCleaner( + config=self.config, + handlers=self.handlers, + name=self.name, + namespace=self.namespace, + app=self.app, + ) @override def deploy(self, dry_run: bool) -> None: @@ -90,66 +145,3 @@ def deploy(self, dry_run: bool) -> None: to_section=self.to, dry_run=dry_run ) super().deploy(dry_run) - - def _run_clean_up_job( - self, - values: dict, - dry_run: bool, - retain_clean_jobs: bool = False, - ) -> None: - """Clean an app using the respective cleanup job. - - :param values: The value YAML for the chart - :param dry_run: Dry run command - :param retain_clean_jobs: Whether to retain the cleanup job, defaults to False - """ - log.info(f"Uninstall old cleanup job for {self.clean_release_name}") - - self.__uninstall_clean_up_job(self.clean_release_name, dry_run) - - log.info(f"Init cleanup job for {self.clean_release_name}") - - stdout = self.__install_clean_up_job(self.clean_release_name, values, dry_run) - - if dry_run: - self.dry_run_handler.print_helm_diff(stdout, self.clean_release_name, log) - - if not retain_clean_jobs: - log.info(f"Uninstall cleanup job for {self.clean_release_name}") - self.__uninstall_clean_up_job(self.clean_release_name, dry_run) - - def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None: - """Uninstall clean up job. - - :param release_name: Name of the Helm release - :param dry_run: Whether to do a dry run of the command - """ - self.helm.uninstall(self.namespace, release_name, dry_run) - - def __install_clean_up_job( - self, - release_name: str, - values: dict, - dry_run: bool, - ) -> str: - """Install clean up job. - - :param release_name: Name of the Helm release - :param suffix: Suffix to add to the release name, e.g. "-clean" - :param values: The Helm values for the chart - :param dry_run: Whether to do a dry run of the command - :return: Return the output of the installation - """ - return self.helm.upgrade_install( - release_name, - self.clean_up_helm_chart, - dry_run, - self.namespace, - values, - HelmUpgradeInstallFlags( - create_namespace=self.config.create_namespace, - version=self.version, - wait=True, - wait_for_jobs=True, - ), - ) diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index e37529bae..b92479e4c 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -1,9 +1,7 @@ -# from __future__ import annotations - from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import KafkaApp +from kpops.components.base_components.kafka_app import KafkaApp, KafkaAppCleaner from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, @@ -13,6 +11,17 @@ from kpops.utils.docstring import describe_attr +class ProducerAppCleaner(KafkaAppCleaner): + app: ProducerAppValues + + @property + @override + def helm_chart(self) -> str: + return ( + f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" + ) + + class ProducerApp(KafkaApp): """Producer component. @@ -36,6 +45,16 @@ class ProducerApp(KafkaApp): description=describe_attr("from_", __doc__), ) + @property + def _cleaner(self) -> ProducerAppCleaner: + return ProducerAppCleaner( + config=self.config, + handlers=self.handlers, + name=self.name, + namespace=self.namespace, + app=self.app, + ) + @override def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: match topic.type: @@ -58,17 +77,7 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None: def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.PRODUCER_APP.value}" - @property - @override - def clean_up_helm_chart(self) -> str: - return ( - f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" - ) - @override def clean(self, dry_run: bool) -> None: - self._run_clean_up_job( - values=self.to_helm_values(), - dry_run=dry_run, - retain_clean_jobs=self.config.retain_clean_jobs, - ) + self._cleaner.app.streams.delete_output = True # TODO: sensible? + self._cleaner.run(dry_run) diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index e8a434b70..10752f444 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -1,12 +1,21 @@ from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import KafkaApp +from kpops.components.base_components.kafka_app import KafkaApp, KafkaAppCleaner from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr +class StreamsAppCleaner(KafkaAppCleaner): + app: StreamsAppValues + + @property + @override + def helm_chart(self) -> str: + return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" + + class StreamsApp(KafkaApp): """StreamsApp component that configures a streams bootstrap app. @@ -18,6 +27,16 @@ class StreamsApp(KafkaApp): description=describe_attr("app", __doc__), ) + @property + def _cleaner(self) -> StreamsAppCleaner: + return StreamsAppCleaner( + config=self.config, + handlers=self.handlers, + name=self.name, + namespace=self.namespace, + app=self.app, + ) + @override def add_input_topics(self, topics: list[str]) -> None: self.app.streams.add_input_topics(topics) @@ -51,29 +70,12 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None: def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.STREAMS_APP.value}" - @property - @override - def clean_up_helm_chart(self) -> str: - return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" - @override def reset(self, dry_run: bool) -> None: - self.__run_streams_clean_up_job(dry_run, delete_output=False) + self._cleaner.app.streams.delete_output = False + self._cleaner.run(dry_run) @override def clean(self, dry_run: bool) -> None: - self.__run_streams_clean_up_job(dry_run, delete_output=True) - - def __run_streams_clean_up_job(self, dry_run: bool, delete_output: bool) -> None: - """Run clean job for this Streams app. - - :param dry_run: Whether to do a dry run of the command - :param delete_output: Whether to delete the output of the app that is being cleaned - """ - values = self.to_helm_values() - values["streams"]["deleteOutput"] = delete_output - self._run_clean_up_job( - values=values, - dry_run=dry_run, - retain_clean_jobs=self.config.retain_clean_jobs, - ) + self._cleaner.app.streams.delete_output = True + self._cleaner.run(dry_run) diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index ebd5cf7d9..429c6490a 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -371,7 +371,7 @@ def test_reset_when_dry_run_is_false( self, streams_app: StreamsApp, mocker: MockerFixture ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" + streams_app._cleaner.helm, "upgrade_install" ) mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") From 0578cb06e060fa89b3112314545ee040fdd273c4 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 17:48:03 +0100 Subject: [PATCH 08/36] Update tests --- docs/docs/schema/pipeline.json | 13 ++ kpops/components/base_components/kafka_app.py | 10 +- .../producer/producer_app.py | 9 +- .../streams_bootstrap/streams/model.py | 4 + .../streams_bootstrap/streams/streams_app.py | 8 +- tests/components/test_producer_app.py | 140 ++++++++++-------- tests/components/test_streams_app.py | 138 +++++++++-------- 7 files changed, 187 insertions(+), 135 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 77198a215..2a9c565d0 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -920,6 +920,19 @@ "title": "Config", "type": "object" }, + "deleteOutput": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", + "title": "Deleteoutput" + }, "errorTopic": { "anyOf": [ { diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index e4e376fe2..630ab4d40 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -94,6 +94,8 @@ def run(self, dry_run: bool) -> None: log.info(f"Uninstall cleanup job for {self.helm_release_name}") self.destroy(dry_run=dry_run) + # def factory # TODO? + class KafkaApp(HelmApp, ABC): """Base component for Kafka-based components. @@ -125,13 +127,7 @@ class KafkaApp(HelmApp, ABC): @property def _cleaner(self) -> KafkaAppCleaner: - return KafkaAppCleaner( - config=self.config, - handlers=self.handlers, - name=self.name, - namespace=self.namespace, - app=self.app, - ) + raise NotImplementedError @override def deploy(self, dry_run: bool) -> None: diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index b92479e4c..31aad8938 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -1,3 +1,5 @@ +from functools import cached_property + from pydantic import Field from typing_extensions import override @@ -45,14 +47,12 @@ class ProducerApp(KafkaApp): description=describe_attr("from_", __doc__), ) - @property + @cached_property def _cleaner(self) -> ProducerAppCleaner: return ProducerAppCleaner( config=self.config, handlers=self.handlers, - name=self.name, - namespace=self.namespace, - app=self.app, + **self.model_dump(), ) @override @@ -79,5 +79,4 @@ def helm_chart(self) -> str: @override def clean(self, dry_run: bool) -> None: - self._cleaner.app.streams.delete_output = True # TODO: sensible? self._cleaner.run(dry_run) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index b52bc162c..a162365fe 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -28,6 +28,7 @@ class StreamsConfig(KafkaStreamsConfig): :param output_topic: Output topic, defaults to None :param error_topic: Error topic, defaults to None :param config: Configuration, defaults to {} + :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to False """ input_topics: list[str] = Field( @@ -54,6 +55,9 @@ class StreamsConfig(KafkaStreamsConfig): config: dict[str, Any] = Field( default={}, description=describe_attr("config", __doc__) ) + delete_output: bool | None = Field( + default=None, description=describe_attr("delete_output", __doc__) + ) def add_input_topics(self, topics: list[str]) -> None: """Add given topics to the list of input topics. diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index 10752f444..d0aadd294 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -1,3 +1,5 @@ +from functools import cached_property + from pydantic import Field from typing_extensions import override @@ -27,14 +29,12 @@ class StreamsApp(KafkaApp): description=describe_attr("app", __doc__), ) - @property + @cached_property def _cleaner(self) -> StreamsAppCleaner: return StreamsAppCleaner( config=self.config, handlers=self.handlers, - name=self.name, - namespace=self.namespace, - app=self.app, + **self.model_dump(), ) @override diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 07e78bb6a..b5de6b67b 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -168,11 +168,13 @@ def test_should_not_reset_producer_app( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app._cleaner.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object( + producer_app._cleaner.helm, "uninstall" ) - mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock_helm_print_helm_diff = mocker.patch.object( - producer_app.dry_run_handler, "print_helm_diff" + producer_app._cleaner.dry_run_handler, "print_helm_diff" ) mock = mocker.MagicMock() @@ -182,45 +184,55 @@ def test_should_not_reset_producer_app( producer_app.clean(dry_run=True) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - True, - ), - mocker.call.helm_upgrade_install( - PRODUCER_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/producer-app-cleanup-job", - True, - "test-namespace", - { - "nameOverride": PRODUCER_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + True, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm_upgrade_install( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + True, + "test-namespace", + { + "nameOverride": PRODUCER_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + }, }, - }, - HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True), - ), - mocker.call.print_helm_diff( - ANY, - PRODUCER_APP_CLEAN_RELEASE_NAME, - logging.getLogger("KafkaApp"), - ), - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - True, - ), - ] + HelmUpgradeInstallFlags( + version="2.4.2", wait=True, wait_for_jobs=True + ), + ), + mocker.call.print_helm_diff( + ANY, + PRODUCER_APP_CLEAN_RELEASE_NAME, + logging.getLogger("HelmApp"), + ), + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + True, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( self, mocker: MockerFixture, producer_app: ProducerApp ): mock_helm_upgrade_install = mocker.patch.object( - producer_app.helm, "upgrade_install" + producer_app._cleaner.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object( + producer_app._cleaner.helm, "uninstall" ) - mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -228,29 +240,37 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w producer_app.clean(dry_run=False) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - False, - ), - mocker.call.helm_upgrade_install( - PRODUCER_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/producer-app-cleanup-job", - False, - "test-namespace", - { - "nameOverride": PRODUCER_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + False, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm_upgrade_install( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + False, + "test-namespace", + { + "nameOverride": PRODUCER_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + }, }, - }, - HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True), - ), - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - False, - ), - ] + HelmUpgradeInstallFlags( + version="2.4.2", wait=True, wait_for_jobs=True + ), + ), + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + False, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 429c6490a..1bdb8631d 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -1,5 +1,5 @@ from pathlib import Path -from unittest.mock import MagicMock +from unittest.mock import ANY, MagicMock import pytest from pytest_mock import MockerFixture @@ -17,6 +17,7 @@ TopicConfig, ToSection, ) +from kpops.components.streams_bootstrap.streams.streams_app import StreamsAppCleaner from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -370,10 +371,11 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): def test_reset_when_dry_run_is_false( self, streams_app: StreamsApp, mocker: MockerFixture ): - mock_helm_upgrade_install = mocker.patch.object( - streams_app._cleaner.helm, "upgrade_install" - ) - mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") + cleaner = streams_app._cleaner + assert isinstance(cleaner, StreamsAppCleaner) + + mock_helm_upgrade_install = mocker.patch.object(cleaner.helm, "upgrade_install") + mock_helm_uninstall = mocker.patch.object(cleaner.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -382,33 +384,41 @@ def test_reset_when_dry_run_is_false( dry_run = False streams_app.reset(dry_run=dry_run) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - mocker.call.helm_upgrade_install( - STREAMS_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/streams-app-cleanup-job", - dry_run, - "test-namespace", - { - "nameOverride": STREAMS_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - "deleteOutput": False, + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ # FIXME: why is this in the call stack? + ANY, # __str__ + mocker.call.helm_upgrade_install( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "nameOverride": STREAMS_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + "deleteOutput": False, + }, }, - }, - HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True), - ), - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ] + HelmUpgradeInstallFlags( + version="2.9.0", wait=True, wait_for_jobs=True + ), + ), + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( self, @@ -416,9 +426,11 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app.helm, "upgrade_install" + streams_app._cleaner.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object( + streams_app._cleaner.helm, "uninstall" ) - mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -427,30 +439,38 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( dry_run = False streams_app.clean(dry_run=dry_run) - assert mock.mock_calls == [ - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - mocker.call.helm_upgrade_install( - STREAMS_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/streams-app-cleanup-job", - dry_run, - "test-namespace", - { - "nameOverride": STREAMS_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - "deleteOutput": True, + mock.assert_has_calls( + [ + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + mocker.call.helm_upgrade_install( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "nameOverride": STREAMS_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + "deleteOutput": True, + }, }, - }, - HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True), - ), - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ] + HelmUpgradeInstallFlags( + version="2.9.0", wait=True, wait_for_jobs=True + ), + ), + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ANY, # __bool__ + ANY, # __str__ + ] + ) From 44195e74f738dc56571da49d9f167028443d5fc3 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 17:56:58 +0100 Subject: [PATCH 09/36] Inherit from common streams-bootstrap app --- kpops/components/base_components/kafka_app.py | 20 ++----------------- .../producer/producer_app.py | 5 ++++- .../streams_bootstrap/streams/streams_app.py | 5 ++++- 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 630ab4d40..5eb8feada 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -47,7 +47,7 @@ class KafkaAppValues(HelmAppValues): ) -class StreamsBootstrapHelmApp(HelmApp): +class StreamsBootstrapHelmApp(HelmApp, ABC): repo_config: HelmRepoConfig = Field( default=HelmRepoConfig( repository_name="bakdata-streams-bootstrap", @@ -94,18 +94,13 @@ def run(self, dry_run: bool) -> None: log.info(f"Uninstall cleanup job for {self.helm_release_name}") self.destroy(dry_run=dry_run) - # def factory # TODO? - -class KafkaApp(HelmApp, ABC): +class KafkaApp(StreamsBootstrapHelmApp, ABC): """Base component for Kafka-based components. Producer or streaming apps should inherit from this class. :param app: Application-specific settings - :param repo_config: Configuration of the Helm chart repo to be used for - deploying the component, - defaults to HelmRepoConfig(repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/") :param version: Helm chart version, defaults to "2.9.0" """ @@ -113,22 +108,11 @@ class KafkaApp(HelmApp, ABC): default=..., description=describe_attr("app", __doc__), ) - repo_config: HelmRepoConfig = Field( - default=HelmRepoConfig( - repository_name="bakdata-streams-bootstrap", - url="https://bakdata.github.io/streams-bootstrap/", - ), - description=describe_attr("repo_config", __doc__), - ) version: str | None = Field( default="2.9.0", description=describe_attr("version", __doc__), ) - @property - def _cleaner(self) -> KafkaAppCleaner: - raise NotImplementedError - @override def deploy(self, dry_run: bool) -> None: if self.to: diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 31aad8938..465e9fa24 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -3,7 +3,10 @@ from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import KafkaApp, KafkaAppCleaner +from kpops.components.base_components.kafka_app import ( + KafkaApp, + KafkaAppCleaner, +) from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index d0aadd294..deedbd137 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -3,7 +3,10 @@ from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import KafkaApp, KafkaAppCleaner +from kpops.components.base_components.kafka_app import ( + KafkaApp, + KafkaAppCleaner, +) from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr From a2a8418d0165ec34f7f9a7a82526e02dd0fed671 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 18:01:34 +0100 Subject: [PATCH 10/36] Remove desc --- docs/docs/schema/pipeline.json | 6 ++---- kpops/components/base_components/kafka_app.py | 3 +-- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 2a9c565d0..1d15880cb 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -500,8 +500,7 @@ }, "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + } }, "to": { "anyOf": [ @@ -744,8 +743,7 @@ }, "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" + } }, "to": { "anyOf": [ diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 5eb8feada..ca7eb28a3 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -52,8 +52,7 @@ class StreamsBootstrapHelmApp(HelmApp, ABC): default=HelmRepoConfig( repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", - ), - description=describe_attr("repo_config", __doc__), + ) ) From 3457d1f26c497faff560c938d1837b8e393359a4 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 18:22:42 +0100 Subject: [PATCH 11/36] Fix failing hooks --- docs/docs/schema/pipeline.json | 6 +++-- kpops/components/base_components/kafka_app.py | 24 +++++++++++-------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 1d15880cb..2a9c565d0 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -500,7 +500,8 @@ }, "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" - } + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { "anyOf": [ @@ -743,7 +744,8 @@ }, "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" - } + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { "anyOf": [ diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index ca7eb28a3..584f57959 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -18,6 +18,11 @@ log = logging.getLogger("KafkaApp") +STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( + repository_name="bakdata-streams-bootstrap", + url="https://bakdata.github.io/streams-bootstrap/", +) + class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): """Kafka Streams config. @@ -47,16 +52,9 @@ class KafkaAppValues(HelmAppValues): ) -class StreamsBootstrapHelmApp(HelmApp, ABC): - repo_config: HelmRepoConfig = Field( - default=HelmRepoConfig( - repository_name="bakdata-streams-bootstrap", - url="https://bakdata.github.io/streams-bootstrap/", - ) - ) - +class KafkaAppCleaner(HelmApp): + repo_config: HelmRepoConfig = Field(default=STREAMS_BOOTSTRAP_HELM_REPO) -class KafkaAppCleaner(StreamsBootstrapHelmApp): @property @override def helm_chart(self) -> str: @@ -94,12 +92,14 @@ def run(self, dry_run: bool) -> None: self.destroy(dry_run=dry_run) -class KafkaApp(StreamsBootstrapHelmApp, ABC): +class KafkaApp(HelmApp, ABC): """Base component for Kafka-based components. Producer or streaming apps should inherit from this class. :param app: Application-specific settings + :param repo_config: Configuration of the Helm chart repo to be used for + deploying the component, defaults to streams-bootstrap Helm repo :param version: Helm chart version, defaults to "2.9.0" """ @@ -107,6 +107,10 @@ class KafkaApp(StreamsBootstrapHelmApp, ABC): default=..., description=describe_attr("app", __doc__), ) + repo_config: HelmRepoConfig = Field( + default=STREAMS_BOOTSTRAP_HELM_REPO, + description=describe_attr("repo_config", __doc__), + ) version: str | None = Field( default="2.9.0", description=describe_attr("version", __doc__), From a725f030d5170b2fbcb6b1cde487116b4d9ce976 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 18:33:00 +0100 Subject: [PATCH 12/36] Cosmetic --- kpops/components/base_components/kafka_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 584f57959..9ea6da342 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -53,7 +53,7 @@ class KafkaAppValues(HelmAppValues): class KafkaAppCleaner(HelmApp): - repo_config: HelmRepoConfig = Field(default=STREAMS_BOOTSTRAP_HELM_REPO) + repo_config: HelmRepoConfig = STREAMS_BOOTSTRAP_HELM_REPO @property @override From 26ac7f9297538b6dacd05d0a26e10d285c06acad Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 18:33:52 +0100 Subject: [PATCH 13/36] Cosmetic --- kpops/components/base_components/kafka_app.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 9ea6da342..f4a205080 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -82,14 +82,14 @@ def run(self, dry_run: bool) -> None: :param dry_run: Dry run command """ log.info(f"Uninstall old cleanup job for {self.helm_release_name}") - self.destroy(dry_run=dry_run) + self.destroy(dry_run) log.info(f"Init cleanup job for {self.helm_release_name}") - self.deploy(dry_run=dry_run) + self.deploy(dry_run) if not self.config.retain_clean_jobs: log.info(f"Uninstall cleanup job for {self.helm_release_name}") - self.destroy(dry_run=dry_run) + self.destroy(dry_run) class KafkaApp(HelmApp, ABC): From b193c039e93da317b9c6c75afb77a1aee42231c2 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 2 Jan 2024 18:36:50 +0100 Subject: [PATCH 14/36] Rename run to clean --- kpops/components/base_components/kafka_app.py | 5 +++-- kpops/components/streams_bootstrap/producer/producer_app.py | 2 +- kpops/components/streams_bootstrap/streams/streams_app.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index f4a205080..4edddd0ef 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -76,8 +76,9 @@ def helm_flags(self) -> HelmFlags: wait_for_jobs=True, ) - def run(self, dry_run: bool) -> None: - """Clean an app using the respective cleanup job. + @override + def clean(self, dry_run: bool) -> None: + """Clean an app using a cleanup job. :param dry_run: Dry run command """ diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 465e9fa24..eca7c4395 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -82,4 +82,4 @@ def helm_chart(self) -> str: @override def clean(self, dry_run: bool) -> None: - self._cleaner.run(dry_run) + self._cleaner.clean(dry_run) diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index deedbd137..41693f083 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -76,9 +76,9 @@ def helm_chart(self) -> str: @override def reset(self, dry_run: bool) -> None: self._cleaner.app.streams.delete_output = False - self._cleaner.run(dry_run) + self._cleaner.clean(dry_run) @override def clean(self, dry_run: bool) -> None: self._cleaner.app.streams.delete_output = True - self._cleaner.run(dry_run) + self._cleaner.clean(dry_run) From 9d27adc1aac71642404a6b7eca577627a151fd30 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 3 Jan 2024 09:59:15 +0100 Subject: [PATCH 15/36] Fix returned object type --- kpops/components/base_components/kafka_app.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 4edddd0ef..a0a8784b5 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -9,7 +9,6 @@ from kpops.component_handlers.helm_wrapper.model import ( HelmFlags, HelmRepoConfig, - HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name from kpops.components.base_components.helm_app import HelmApp, HelmAppValues @@ -69,7 +68,7 @@ def helm_release_name(self) -> str: @property @override def helm_flags(self) -> HelmFlags: - return HelmUpgradeInstallFlags( + return HelmFlags( create_namespace=self.config.create_namespace, version=self.version, wait=True, From c517a3b89301ea886cc57a191d3b5b8f3da62db1 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 3 Jan 2024 10:08:08 +0100 Subject: [PATCH 16/36] Add pydocs and todo --- kpops/components/base_components/kafka_app.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index a0a8784b5..8cc1c5c7e 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -52,7 +52,10 @@ class KafkaAppValues(HelmAppValues): class KafkaAppCleaner(HelmApp): + """Helm app for resetting and cleaning a streams-bootstrap app.""" + repo_config: HelmRepoConfig = STREAMS_BOOTSTRAP_HELM_REPO + # TODO: streams-bootstrap version? @property @override From 11dcc1e6d048a95912b33546528623ccfc9510a0 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 3 Jan 2024 16:45:29 +0100 Subject: [PATCH 17/36] Create streams-bootstrap base for all components based on its Helm charts --- ...aults_pipeline_component_dependencies.yaml | 4 +- .../dependencies/kpops_structure.yaml | 21 ++++++--- .../pipeline_component_dependencies.yaml | 11 ++--- .../pipeline-components/kafka-app.yaml | 12 ------ .../pipeline-components/pipeline.yaml | 20 ++------- .../pipeline-components/producer-app.yaml | 4 +- .../pipeline-components/streams-app.yaml | 4 +- .../pipeline-defaults/defaults-kafka-app.yaml | 43 ++++++++++++++++++- .../resources/pipeline-defaults/defaults.yaml | 43 ++++++++++++++++++- docs/docs/schema/pipeline.json | 18 ++++---- kpops/components/__init__.py | 5 ++- kpops/components/base_components/kafka_app.py | 28 +++--------- .../components/streams_bootstrap/__init__.py | 36 +++++++++++++--- .../producer/producer_app.py | 7 +-- .../streams_bootstrap/streams/model.py | 6 +-- .../streams_bootstrap/streams/streams_app.py | 5 ++- tests/cli/test_registry.py | 3 +- ...kafka_app.py => test_streams_bootstrap.py} | 35 +++++++-------- tests/pipeline/test_components/components.py | 3 +- 19 files changed, 189 insertions(+), 119 deletions(-) rename tests/components/{test_kafka_app.py => test_streams_bootstrap.py} (75%) diff --git a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml index 4e12885af..959596df0 100644 --- a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml @@ -2,8 +2,10 @@ helm-app.yaml: - app-helm-app.yaml - repo_config-helm-app.yaml kafka-app.yaml: +- prefix.yaml +- from_.yaml +- to.yaml - app-kafka-app.yaml -- version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml - from_.yaml diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 70dc43870..0d553845b 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -13,10 +13,7 @@ kpops_components_fields: - prefix - from_ - to - - namespace - app - - repo_config - - version kafka-connector: - name - prefix @@ -65,8 +62,8 @@ kpops_components_fields: - prefix - from_ - to - - namespace - app + - namespace - repo_config - version streams-app: @@ -74,17 +71,27 @@ kpops_components_fields: - prefix - from_ - to + - app + - namespace + - repo_config + - version + streams-bootstrap: + - name + - prefix + - from_ + - to - namespace - app - repo_config - version kpops_components_inheritance_ref: helm-app: kubernetes-app - kafka-app: helm-app + kafka-app: pipeline-component kafka-connector: pipeline-component kafka-sink-connector: kafka-connector kafka-source-connector: kafka-connector kubernetes-app: pipeline-component pipeline-component: base-defaults-component - producer-app: kafka-app - streams-app: kafka-app + producer-app: streams-bootstrap + streams-app: streams-bootstrap + streams-bootstrap: helm-app diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index 8504a0135..1127dda94 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -10,10 +10,7 @@ kafka-app.yaml: - prefix.yaml - from_.yaml - to.yaml -- namespace.yaml - app-kafka-app.yaml -- repo_config-helm-app.yaml -- version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml - from_.yaml @@ -52,15 +49,15 @@ producer-app.yaml: - prefix.yaml - from_-producer-app.yaml - to.yaml -- namespace.yaml - app-producer-app.yaml +- namespace.yaml - repo_config-helm-app.yaml -- version-kafka-app.yaml +- version.yaml streams-app.yaml: - prefix.yaml - from_.yaml - to.yaml -- namespace.yaml - app-streams-app.yaml +- namespace.yaml - repo_config-helm-app.yaml -- version-kafka-app.yaml +- version.yaml diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index cdc49ef28..ff2b5500c 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -44,7 +44,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -53,14 +52,3 @@ schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 1c6350fbc..52245a05b 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -107,7 +107,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -116,17 +115,6 @@ schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version # Kafka sink connector - type: kafka-sink-connector name: kafka-sink-connector # required @@ -322,7 +310,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required @@ -334,6 +321,7 @@ output_role1: output_topic1 output_role2: output_topic2 nameOverride: override-with-this-name # kafka-app-specific + namespace: namespace # required # Helm repository configuration (optional) # If not set the helm repo add will not be called. Useful when using local Helm charts repo_config: @@ -344,7 +332,7 @@ password: pass ca_file: /home/user/path/to/ca-file insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version + version: "1.0.0" # Helm chart version # StreamsApp component that configures a streams bootstrap app. # More documentation on StreamsApp: https://github.com/bakdata/streams-bootstrap - type: streams-app # required @@ -391,7 +379,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # No arbitrary keys are allowed under `app`here # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/streams-app @@ -448,6 +435,7 @@ topics: # List of auto-generated Kafka Streams topics used by the streams app. - topic1 - topic2 + namespace: namespace # required # Helm repository configuration (optional) # If not set the helm repo add will not be called. Useful when using local Helm charts repo_config: @@ -458,4 +446,4 @@ password: pass ca_file: /home/user/path/to/ca-file insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version + version: "1.0.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 5be3551d8..9a698e1b3 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -27,7 +27,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required @@ -39,6 +38,7 @@ output_role1: output_topic1 output_role2: output_topic2 nameOverride: override-with-this-name # kafka-app-specific + namespace: namespace # required # Helm repository configuration (optional) # If not set the helm repo add will not be called. Useful when using local Helm charts repo_config: @@ -49,4 +49,4 @@ password: pass ca_file: /home/user/path/to/ca-file insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version + version: "1.0.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index f77edf80c..c333631ef 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -44,7 +44,6 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model - namespace: namespace # required # No arbitrary keys are allowed under `app`here # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/streams-app @@ -101,6 +100,7 @@ topics: # List of auto-generated Kafka Streams topics used by the streams app. - topic1 - topic2 + namespace: namespace # required # Helm repository configuration (optional) # If not set the helm repo add will not be called. Useful when using local Helm charts repo_config: @@ -111,4 +111,4 @@ password: pass ca_file: /home/user/path/to/ca-file insecure_skip_tls_verify: false - version: "2.12.0" # Helm chart version + version: "1.0.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index bd6c9e2d9..d37dad1bb 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -3,6 +3,48 @@ # Parent of: ProducerApp, StreamsApp # Child of: KubernetesApp kafka-app: + # Pipeline prefix that will prefix every component name. If you wish to not + # have any prefix you can specify an empty string. + prefix: ${pipeline_name}- + from: # Must not be null + topics: # read from topic + ${pipeline_name}-input-topic: + type: input # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra + ${pipeline_name}-input-pattern-topic: + type: pattern # Implied to be an input pattern if `role` is undefined + ${pipeline_name}-extra-pattern-topic: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + components: # read from specific component + account-producer: + type: output # Implied when role is NOT specified + other-producer: + role: some-role # Implies `type` to be extra + component-as-input-pattern: + type: pattern # Implied to be an input pattern if `role` is undefined + component-as-extra-pattern: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + # Topic(s) into which the component will write output + to: + topics: + ${pipeline_name}-output-topic: + type: output # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined + ${pipeline_name}-error-topic: + type: error + # Currently KPOps supports Avro and JSON schemas. + key_schema: key-schema # must implement SchemaProvider to use + value_schema: value-schema + partitions_count: 1 + replication_factor: 1 + configs: # https://kafka.apache.org/documentation/#topicconfigs + cleanup.policy: compact + models: # SchemaProvider is initiated with the values given here + model: model # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -11,4 +53,3 @@ kafka-app: schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index 58b22d3f3..5c71248c2 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -24,6 +24,48 @@ helm-app: # Parent of: ProducerApp, StreamsApp # Child of: KubernetesApp kafka-app: + # Pipeline prefix that will prefix every component name. If you wish to not + # have any prefix you can specify an empty string. + prefix: ${pipeline_name}- + from: # Must not be null + topics: # read from topic + ${pipeline_name}-input-topic: + type: input # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra + ${pipeline_name}-input-pattern-topic: + type: pattern # Implied to be an input pattern if `role` is undefined + ${pipeline_name}-extra-pattern-topic: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + components: # read from specific component + account-producer: + type: output # Implied when role is NOT specified + other-producer: + role: some-role # Implies `type` to be extra + component-as-input-pattern: + type: pattern # Implied to be an input pattern if `role` is undefined + component-as-extra-pattern: + type: pattern # Implied to be an extra pattern if `role` is defined + role: some-role + # Topic(s) into which the component will write output + to: + topics: + ${pipeline_name}-output-topic: + type: output # Implied when role is NOT specified + ${pipeline_name}-extra-topic: + role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined + ${pipeline_name}-error-topic: + type: error + # Currently KPOps supports Avro and JSON schemas. + key_schema: key-schema # must implement SchemaProvider to use + value_schema: value-schema + partitions_count: 1 + replication_factor: 1 + configs: # https://kafka.apache.org/documentation/#topicconfigs + cleanup.policy: compact + models: # SchemaProvider is initiated with the values given here + model: model # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required @@ -32,7 +74,6 @@ kafka-app: schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app - version: "2.12.0" # Helm chart version # Kafka connector # # Parent of: KafkaSinkConnector, KafkaSourceConnector diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 2a9c565d0..9df7d8342 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -452,7 +452,7 @@ }, "ProducerApp": { "additionalProperties": true, - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -531,8 +531,8 @@ }, "required": [ "name", - "namespace", - "app" + "app", + "namespace" ], "title": "ProducerApp", "type": "object" @@ -689,7 +689,7 @@ }, "StreamsApp": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams bootstrap app.", + "description": "StreamsApp component that configures a streams-bootstrap app.", "properties": { "app": { "allOf": [ @@ -775,8 +775,8 @@ }, "required": [ "name", - "namespace", - "app" + "app", + "namespace" ], "title": "StreamsApp", "type": "object" @@ -863,7 +863,7 @@ }, "StreamsAppValues": { "additionalProperties": true, - "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { "anyOf": [ @@ -875,7 +875,7 @@ } ], "default": null, - "description": "Kubernetes Event-driven Autoscaling config" + "description": "Kubernetes event-driven autoscaling config" }, "nameOverride": { "anyOf": [ @@ -896,7 +896,7 @@ "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section" + "description": "streams-bootstrap streams section" } }, "required": [ diff --git a/kpops/components/__init__.py b/kpops/components/__init__.py index 98e1d3530..dc5fcee9c 100644 --- a/kpops/components/__init__.py +++ b/kpops/components/__init__.py @@ -7,7 +7,9 @@ PipelineComponent, ) from kpops.components.base_components.kafka_connector import KafkaConnector -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp +from kpops.components.streams_bootstrap import StreamsBootstrap +from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp +from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp __all__ = ( "HelmApp", @@ -16,6 +18,7 @@ "KafkaSinkConnector", "KafkaSourceConnector", "KubernetesApp", + "StreamsBootstrap", "ProducerApp", "StreamsApp", "PipelineComponent", diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 8cc1c5c7e..7ee67b09c 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -8,20 +8,16 @@ from kpops.component_handlers.helm_wrapper.model import ( HelmFlags, - HelmRepoConfig, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.base_components.helm_app import HelmApp, HelmAppValues +from kpops.components.base_components.helm_app import HelmAppValues +from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KafkaApp") -STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( - repository_name="bakdata-streams-bootstrap", - url="https://bakdata.github.io/streams-bootstrap/", -) - class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): """Kafka Streams config. @@ -51,12 +47,9 @@ class KafkaAppValues(HelmAppValues): ) -class KafkaAppCleaner(HelmApp): +class KafkaAppCleaner(StreamsBootstrap): """Helm app for resetting and cleaning a streams-bootstrap app.""" - repo_config: HelmRepoConfig = STREAMS_BOOTSTRAP_HELM_REPO - # TODO: streams-bootstrap version? - @property @override def helm_chart(self) -> str: @@ -95,29 +88,18 @@ def clean(self, dry_run: bool) -> None: self.destroy(dry_run) -class KafkaApp(HelmApp, ABC): +class KafkaApp(PipelineComponent, ABC): """Base component for Kafka-based components. Producer or streaming apps should inherit from this class. :param app: Application-specific settings - :param repo_config: Configuration of the Helm chart repo to be used for - deploying the component, defaults to streams-bootstrap Helm repo - :param version: Helm chart version, defaults to "2.9.0" """ app: KafkaAppValues = Field( default=..., description=describe_attr("app", __doc__), ) - repo_config: HelmRepoConfig = Field( - default=STREAMS_BOOTSTRAP_HELM_REPO, - description=describe_attr("repo_config", __doc__), - ) - version: str | None = Field( - default="2.9.0", - description=describe_attr("version", __doc__), - ) @override def deploy(self, dry_run: bool) -> None: diff --git a/kpops/components/streams_bootstrap/__init__.py b/kpops/components/streams_bootstrap/__init__.py index 097d85b13..1b02b091b 100644 --- a/kpops/components/streams_bootstrap/__init__.py +++ b/kpops/components/streams_bootstrap/__init__.py @@ -1,7 +1,31 @@ -from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp +from abc import ABC -__all__ = [ - "ProducerApp", - "StreamsApp", -] +from pydantic import Field + +from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig +from kpops.components.base_components.helm_app import HelmApp +from kpops.utils.docstring import describe_attr + +STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( + repository_name="bakdata-streams-bootstrap", + url="https://bakdata.github.io/streams-bootstrap/", +) +STREAMS_BOOTSTRAP_VERSION = "2.9.0" + + +class StreamsBootstrap(HelmApp, ABC): + """Base for components with a streams-bootstrap Helm chart. + + :param repo_config: Configuration of the Helm chart repo to be used for + deploying the component, defaults to streams-bootstrap Helm repo + :param version: Helm chart version, defaults to "2.9.0" + """ + + repo_config: HelmRepoConfig = Field( + default=STREAMS_BOOTSTRAP_HELM_REPO, + description=describe_attr("repo_config", __doc__), + ) + version: str | None = Field( + default=STREAMS_BOOTSTRAP_VERSION, + description=describe_attr("version", __doc__), + ) diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index eca7c4395..355c31cd3 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -11,6 +11,7 @@ OutputTopicTypes, TopicConfig, ) +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.producer.model import ProducerAppValues from kpops.utils.docstring import describe_attr @@ -27,11 +28,11 @@ def helm_chart(self) -> str: ) -class ProducerApp(KafkaApp): +class ProducerApp(StreamsBootstrap, KafkaApp): """Producer component. - This producer holds configuration to use as values for the streams bootstrap - producer helm chart. + This producer holds configuration to use as values for the streams-bootstrap + producer Helm chart. Note that the producer does not support error topics. diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index a162365fe..0447cf272 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -171,12 +171,12 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): class StreamsAppValues(KafkaAppValues): - """StreamsBoostrap app configurations. + """streams-bootstrap app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. - :param streams: Streams Bootstrap streams section - :param autoscaling: Kubernetes Event-driven Autoscaling config, defaults to None + :param streams: streams-bootstrap streams section + :param autoscaling: Kubernetes event-driven autoscaling config, defaults to None """ streams: StreamsConfig = Field( diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index 41693f083..beadd574c 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -7,6 +7,7 @@ KafkaApp, KafkaAppCleaner, ) +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr @@ -21,8 +22,8 @@ def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" -class StreamsApp(KafkaApp): - """StreamsApp component that configures a streams bootstrap app. +class StreamsApp(StreamsBootstrap, KafkaApp): + """StreamsApp component that configures a streams-bootstrap app. :param app: Application-specific settings """ diff --git a/tests/cli/test_registry.py b/tests/cli/test_registry.py index bc6a7a2f9..473c340c4 100644 --- a/tests/cli/test_registry.py +++ b/tests/cli/test_registry.py @@ -36,7 +36,7 @@ def test_find_builtin_classes(): class_.__name__ for class_ in _find_classes("kpops.components", PipelineComponent) ] - assert len(components) == 9 + assert len(components) == 10 assert components == [ "HelmApp", "KafkaApp", @@ -47,6 +47,7 @@ def test_find_builtin_classes(): "PipelineComponent", "ProducerApp", "StreamsApp", + "StreamsBootstrap", ] diff --git a/tests/components/test_kafka_app.py b/tests/components/test_streams_bootstrap.py similarity index 75% rename from tests/components/test_kafka_app.py rename to tests/components/test_streams_bootstrap.py index d7e8fd5d4..9a53ef319 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_streams_bootstrap.py @@ -11,13 +11,13 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.base_components import KafkaApp +from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.config import KpopsConfig DEFAULTS_PATH = Path(__file__).parent / "resources" -class TestKafkaApp: +class TestStreamsBootstrap: @pytest.fixture() def config(self) -> KpopsConfig: return KpopsConfig( @@ -34,36 +34,29 @@ def handlers(self) -> ComponentHandlers: ) def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers): - kafka_app = KafkaApp( + streams_bootstrap_helm_app = StreamsBootstrap( name="example-name", config=config, handlers=handlers, **{ "namespace": "test-namespace", - "app": { - "streams": { - "outputTopic": "test", - "brokers": "fake-broker:9092", - }, - }, + "app": {}, }, ) - assert kafka_app.app.streams.brokers == "fake-broker:9092" - - assert kafka_app.repo_config == HelmRepoConfig( + assert streams_bootstrap_helm_app.repo_config == HelmRepoConfig( repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", ) - assert kafka_app.version == "2.9.0" - assert kafka_app.namespace == "test-namespace" + assert streams_bootstrap_helm_app.version == "2.9.0" + assert streams_bootstrap_helm_app.namespace == "test-namespace" - def test_should_deploy_kafka_app( + def test_should_deploy_streams_bootstrap_helm_app( self, config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): - kafka_app = KafkaApp( + streams_bootstrap_helm_app = StreamsBootstrap( name="example-name", config=config, handlers=handlers, @@ -78,18 +71,20 @@ def test_should_deploy_kafka_app( "version": "1.2.3", }, ) - helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install") + helm_upgrade_install = mocker.patch.object( + streams_bootstrap_helm_app.helm, "upgrade_install" + ) print_helm_diff = mocker.patch.object( - kafka_app.dry_run_handler, "print_helm_diff" + streams_bootstrap_helm_app.dry_run_handler, "print_helm_diff" ) mocker.patch.object( - KafkaApp, + StreamsBootstrap, "helm_chart", return_value="test/test-chart", new_callable=mocker.PropertyMock, ) - kafka_app.deploy(dry_run=True) + streams_bootstrap_helm_app.deploy(dry_run=True) print_helm_diff.assert_called_once() helm_upgrade_install.assert_called_once_with( diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index d45882ea1..585fd8407 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -5,7 +5,7 @@ Schema, SchemaProvider, ) -from kpops.components import KafkaSinkConnector +from kpops.components import KafkaSinkConnector, ProducerApp, StreamsApp from kpops.components.base_components import PipelineComponent from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.components.base_components.models.to_section import ( @@ -13,7 +13,6 @@ TopicConfig, ToSection, ) -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp class ScheduledProducer(ProducerApp): From d9786dda7e39e57ce004e17fc6a22339e7a673d1 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 3 Jan 2024 16:49:46 +0100 Subject: [PATCH 18/36] Cleanup KPOps components import --- tests/pipeline/test_components/components.py | 8 ++++++-- .../test_components_without_schema_handler/components.py | 9 ++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 585fd8407..20f781545 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -5,8 +5,12 @@ Schema, SchemaProvider, ) -from kpops.components import KafkaSinkConnector, ProducerApp, StreamsApp -from kpops.components.base_components import PipelineComponent +from kpops.components import ( + KafkaSinkConnector, + PipelineComponent, + ProducerApp, + StreamsApp, +) from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.components.base_components.models.to_section import ( OutputTopicTypes, diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index d5684178c..686aac26c 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -1,10 +1,13 @@ from typing_extensions import override from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig -from kpops.components import KafkaSinkConnector -from kpops.components.base_components import PipelineComponent +from kpops.components import ( + KafkaSinkConnector, + PipelineComponent, + ProducerApp, + StreamsApp, +) from kpops.components.base_components.models.to_section import OutputTopicTypes -from kpops.components.streams_bootstrap import ProducerApp, StreamsApp class ScheduledProducer(ProducerApp): From a49b551f2380d12ba44d1ddfc9fe07d70c7570fc Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 3 Jan 2024 16:53:09 +0100 Subject: [PATCH 19/36] Fix inheritance order of streams-boostrap apps --- .../dependencies/kpops_structure.yaml | 8 +++--- .../pipeline_component_dependencies.yaml | 10 +++---- .../pipeline-components/pipeline.yaml | 28 +++---------------- .../pipeline-components/producer-app.yaml | 14 ++-------- .../pipeline-components/streams-app.yaml | 14 ++-------- docs/docs/schema/pipeline.json | 8 +++--- .../producer/producer_app.py | 2 +- .../streams_bootstrap/streams/streams_app.py | 2 +- 8 files changed, 22 insertions(+), 64 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 0d553845b..9c186896f 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -62,8 +62,8 @@ kpops_components_fields: - prefix - from_ - to - - app - namespace + - app - repo_config - version streams-app: @@ -71,8 +71,8 @@ kpops_components_fields: - prefix - from_ - to - - app - namespace + - app - repo_config - version streams-bootstrap: @@ -92,6 +92,6 @@ kpops_components_inheritance_ref: kafka-source-connector: kafka-connector kubernetes-app: pipeline-component pipeline-component: base-defaults-component - producer-app: streams-bootstrap - streams-app: streams-bootstrap + producer-app: kafka-app + streams-app: kafka-app streams-bootstrap: helm-app diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index 1127dda94..3b706014c 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -49,15 +49,13 @@ producer-app.yaml: - prefix.yaml - from_-producer-app.yaml - to.yaml -- app-producer-app.yaml - namespace.yaml -- repo_config-helm-app.yaml -- version.yaml +- app-producer-app.yaml +- version-kafka-app.yaml streams-app.yaml: - prefix.yaml - from_.yaml - to.yaml -- app-streams-app.yaml - namespace.yaml -- repo_config-helm-app.yaml -- version.yaml +- app-streams-app.yaml +- version-kafka-app.yaml diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 52245a05b..483244db1 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -310,6 +310,7 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model + namespace: namespace # required # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required @@ -321,18 +322,7 @@ output_role1: output_topic1 output_role2: output_topic2 nameOverride: override-with-this-name # kafka-app-specific - namespace: namespace # required - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version + version: "2.12.0" # Helm chart version # StreamsApp component that configures a streams bootstrap app. # More documentation on StreamsApp: https://github.com/bakdata/streams-bootstrap - type: streams-app # required @@ -379,6 +369,7 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model + namespace: namespace # required # No arbitrary keys are allowed under `app`here # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/streams-app @@ -435,15 +426,4 @@ topics: # List of auto-generated Kafka Streams topics used by the streams app. - topic1 - topic2 - namespace: namespace # required - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version + version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 9a698e1b3..86721cac7 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -27,6 +27,7 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model + namespace: namespace # required # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required @@ -38,15 +39,4 @@ output_role1: output_topic1 output_role2: output_topic2 nameOverride: override-with-this-name # kafka-app-specific - namespace: namespace # required - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version + version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index c333631ef..e9f303686 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -44,6 +44,7 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model + namespace: namespace # required # No arbitrary keys are allowed under `app`here # Allowed configs: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/streams-app @@ -100,15 +101,4 @@ topics: # List of auto-generated Kafka Streams topics used by the streams app. - topic1 - topic2 - namespace: namespace # required - # Helm repository configuration (optional) - # If not set the helm repo add will not be called. Useful when using local Helm charts - repo_config: - repository_name: bakdata-streams-bootstrap # required - url: https://bakdata.github.io/streams-bootstrap/ # required - repo_auth_flags: - username: user - password: pass - ca_file: /home/user/path/to/ca-file - insecure_skip_tls_verify: false - version: "1.0.0" # Helm chart version + version: "2.12.0" # Helm chart version diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 9df7d8342..211b13928 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -531,8 +531,8 @@ }, "required": [ "name", - "app", - "namespace" + "namespace", + "app" ], "title": "ProducerApp", "type": "object" @@ -775,8 +775,8 @@ }, "required": [ "name", - "app", - "namespace" + "namespace", + "app" ], "title": "StreamsApp", "type": "object" diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 355c31cd3..2d6a586b2 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -28,7 +28,7 @@ def helm_chart(self) -> str: ) -class ProducerApp(StreamsBootstrap, KafkaApp): +class ProducerApp(KafkaApp, StreamsBootstrap): """Producer component. This producer holds configuration to use as values for the streams-bootstrap diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index beadd574c..2c632e882 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -22,7 +22,7 @@ def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" -class StreamsApp(StreamsBootstrap, KafkaApp): +class StreamsApp(KafkaApp, StreamsBootstrap): """StreamsApp component that configures a streams-bootstrap app. :param app: Application-specific settings From 2e2c7baed8fc6482468a1e28b5fd4f387e78da18 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Wed, 3 Jan 2024 18:08:20 +0100 Subject: [PATCH 20/36] Update components hierarchy diagram --- docs/docs/resources/architecture/components-hierarchy.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/docs/resources/architecture/components-hierarchy.md b/docs/docs/resources/architecture/components-hierarchy.md index 190c44f82..ce24acc46 100644 --- a/docs/docs/resources/architecture/components-hierarchy.md +++ b/docs/docs/resources/architecture/components-hierarchy.md @@ -1,10 +1,13 @@ ```mermaid flowchart BT KubernetesApp --> PipelineComponent + KafkaApp --> PipelineComponent HelmApp --> KubernetesApp - KafkaApp --> HelmApp + StreamsBootstrap --> HelmApp StreamsApp --> KafkaApp + StreamsApp --> StreamsBootstrap ProducerApp --> KafkaApp + ProducerApp --> StreamsBootstrap KafkaConnector --> PipelineComponent KafkaSourceConnector --> KafkaConnector KafkaSinkConnector --> KafkaConnector @@ -12,6 +15,7 @@ flowchart BT click KubernetesApp "/kpops/user/core-concepts/components/kubernetes-app" click HelmApp "/kpops/user/core-concepts/components/helm-app" click KafkaApp "/kpops/user/core-concepts/components/kafka-app" + click StreamsBootstrap "/kpops/user/core-concepts/components/streams-bootstrap" click StreamsApp "/kpops/user/core-concepts/components/streams-app" click ProducerApp "/kpops/user/core-concepts/components/producer-app" click KafkaConnector "/kpops/user/core-concepts/components/kafka-connector" From 02c0ef7120a0c2ef0635b363330d2543d79326e1 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 4 Jan 2024 13:05:10 +0100 Subject: [PATCH 21/36] Fix docs --- kpops/components/streams_bootstrap/streams/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 0447cf272..95100b966 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -28,7 +28,7 @@ class StreamsConfig(KafkaStreamsConfig): :param output_topic: Output topic, defaults to None :param error_topic: Error topic, defaults to None :param config: Configuration, defaults to {} - :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to False + :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to None """ input_topics: list[str] = Field( From 83ac3989da4e2d1ef39e2cb57d7b72c1d819174f Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 13:31:18 +0100 Subject: [PATCH 22/36] Update defaults schema --- docs/docs/schema/defaults.json | 161 ++++++++++++++++++++++++--------- 1 file changed, 117 insertions(+), 44 deletions(-) diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index 137b547e9..aa392179b 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -247,36 +247,12 @@ "title": "Name", "type": "string" }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, "prefix": { "default": "${pipeline_name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" }, - "repo_config": { - "allOf": [ - { - "$ref": "#/$defs/HelmRepoConfig" - } - ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" - }, "to": { "anyOf": [ { @@ -288,24 +264,10 @@ ], "default": null, "description": "Topic(s) into which the component will write output" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", - "description": "Helm chart version", - "title": "Version" } }, "required": [ "name", - "namespace", "app" ], "title": "KafkaApp", @@ -837,7 +799,7 @@ }, "ProducerApp": { "additionalProperties": true, - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -1079,7 +1041,7 @@ }, "StreamsApp": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams bootstrap app.", + "description": "StreamsApp component that configures a streams-bootstrap app.", "properties": { "app": { "allOf": [ @@ -1258,7 +1220,7 @@ }, "StreamsAppValues": { "additionalProperties": true, - "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { "anyOf": [ @@ -1270,7 +1232,7 @@ } ], "default": null, - "description": "Kubernetes Event-driven Autoscaling config" + "description": "Kubernetes event-driven autoscaling config" }, "nameOverride": { "anyOf": [ @@ -1291,7 +1253,7 @@ "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section" + "description": "streams-bootstrap streams section" } }, "required": [ @@ -1300,6 +1262,100 @@ "title": "StreamsAppValues", "type": "object" }, + "StreamsBootstrap": { + "additionalProperties": true, + "description": "Base for components with a streams-bootstrap Helm chart.", + "properties": { + "app": { + "allOf": [ + { + "$ref": "#/$defs/HelmAppValues" + } + ], + "description": "Helm app values" + }, + "from": { + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) and/or components from which the component will read input", + "title": "From" + }, + "name": { + "description": "Component name", + "title": "Name", + "type": "string" + }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, + "prefix": { + "default": "${pipeline_name}-", + "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", + "title": "Prefix", + "type": "string" + }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, + "to": { + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Topic(s) into which the component will write output" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" + } + }, + "required": [ + "name", + "namespace", + "app" + ], + "title": "StreamsBootstrap", + "type": "object" + }, "StreamsConfig": { "additionalProperties": true, "description": "Streams Bootstrap streams section.", @@ -1315,6 +1371,19 @@ "title": "Config", "type": "object" }, + "deleteOutput": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", + "title": "Deleteoutput" + }, "errorTopic": { "anyOf": [ { @@ -1569,6 +1638,9 @@ }, "streams-app": { "$ref": "#/$defs/StreamsApp" + }, + "streams-bootstrap": { + "$ref": "#/$defs/StreamsBootstrap" } }, "required": [ @@ -1580,7 +1652,8 @@ "kubernetes-app", "pipeline-component", "producer-app", - "streams-app" + "streams-app", + "streams-bootstrap" ], "title": "DefaultsSchema", "type": "object" From 45c58d80af780bbfa433cc4f178d058ca55612c0 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 13:39:14 +0100 Subject: [PATCH 23/36] Add docs for streams-bootstrap --- docs/docs/user/core-concepts/components/producer-app.md | 2 +- docs/docs/user/core-concepts/components/streams-app.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/user/core-concepts/components/producer-app.md b/docs/docs/user/core-concepts/components/producer-app.md index 1f55fa6d9..bff598d53 100644 --- a/docs/docs/user/core-concepts/components/producer-app.md +++ b/docs/docs/user/core-concepts/components/producer-app.md @@ -1,6 +1,6 @@ # ProducerApp -Subclass of [_KafkaApp_](kafka-app.md). +Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md). ### Usage diff --git a/docs/docs/user/core-concepts/components/streams-app.md b/docs/docs/user/core-concepts/components/streams-app.md index ac881ade2..d34705062 100644 --- a/docs/docs/user/core-concepts/components/streams-app.md +++ b/docs/docs/user/core-concepts/components/streams-app.md @@ -1,6 +1,6 @@ # StreamsApp -Subclass of [_KafkaApp_](kafka-app.md). +Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md). ### Usage From 7c924211189e9f942ecbda182c019f79e115d659 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 8 Jan 2024 13:39:14 +0100 Subject: [PATCH 24/36] Add docs for streams-bootstrap --- .../components/streams-bootstrap.md | 25 +++++++++++++++++++ docs/mkdocs.yml | 1 + 2 files changed, 26 insertions(+) create mode 100644 docs/docs/user/core-concepts/components/streams-bootstrap.md diff --git a/docs/docs/user/core-concepts/components/streams-bootstrap.md b/docs/docs/user/core-concepts/components/streams-bootstrap.md new file mode 100644 index 000000000..52bb5fa0e --- /dev/null +++ b/docs/docs/user/core-concepts/components/streams-bootstrap.md @@ -0,0 +1,25 @@ +# StreamsApp + +Subclass of [_HelmApp_](helm-app.md). + +### Usage + +Configures a Helm app with [streams-bootstrap Helm charts](https://github.com/bakdata/streams-bootstrap){target=_blank}. + +### Operations + +#### deploy + +Deploy using Helm. + +#### destroy + +Uninstall Helm release. + +#### reset + +Do nothing. + +#### clean + +Do nothing. diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index c6ef09c16..d436c94a5 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -95,6 +95,7 @@ nav: - Overview: user/core-concepts/components/overview.md - KubernetesApp: user/core-concepts/components/kubernetes-app.md - HelmApp: user/core-concepts/components/helm-app.md + - StreamsBootstrap: user/core-concepts/components/streams-bootstrap.md - KafkaApp: user/core-concepts/components/kafka-app.md - StreamsApp: user/core-concepts/components/streams-app.md - ProducerApp: user/core-concepts/components/producer-app.md From 18cece35204229dc3643fc104ca03e8b5b1d72bc Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 9 Jan 2024 16:23:30 +0200 Subject: [PATCH 25/36] refactor: extract function to utils --- hooks/gen_docs/gen_docs_env_vars.py | 19 +------------------ kpops/utils/pydantic.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 8f5fe5646..353652068 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -25,6 +25,7 @@ from hooks.gen_docs import IterableStrEnum from kpops.cli import main from kpops.config import KpopsConfig +from kpops.utils.pydantic import patched_issubclass_of_basemodel PATH_DOCS_RESOURCES = ROOT / "docs/docs/resources" PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables" @@ -284,24 +285,6 @@ def collect_fields(model: type[BaseModel]) -> dict[str, Any]: :param model: settings class :return: ``dict`` of all fields in a settings class """ - - def patched_issubclass_of_basemodel(cls): - """Pydantic breaks issubclass. - - ``issubclass(set[str], set) # True`` - ``issubclass(BaseSettings, BaseModel) # True`` - ``issubclass(set[str], BaseModel) # raises exception`` - - :param cls: class to check - :return: Whether cls is subclass of ``BaseModel`` - """ - try: - return issubclass(cls, BaseModel) - except TypeError as e: - if str(e) == "issubclass() arg 1 must be a class": - return False - raise - seen_fields = {} for field_name, field_value in model.model_fields.items(): if field_value.annotation and patched_issubclass_of_basemodel( diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 3b643af51..d9c089689 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -95,6 +95,24 @@ def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, } +def patched_issubclass_of_basemodel(cls): + """Pydantic breaks issubclass. + + ``issubclass(set[str], set) # True`` + ``issubclass(BaseSettings, BaseModel) # True`` + ``issubclass(set[str], BaseModel) # raises exception`` + + :param cls: class to check + :return: Whether cls is subclass of ``BaseModel`` + """ + try: + return issubclass(cls, BaseModel) + except TypeError as e: + if str(e) == "issubclass() arg 1 must be a class": + return False + raise + + class CamelCaseConfigModel(BaseModel): model_config = ConfigDict( alias_generator=to_camel, From d9808f86d94f836204007a328a1c2de5ccca5dc1 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 13:44:27 +0200 Subject: [PATCH 26/36] refactor: enable multiple inheritance for doc gen --- .../dependencies/kpops_structure.yaml | 32 +++++++---- hooks/gen_docs/gen_docs_components.py | 53 +++++++++++++------ 2 files changed, 59 insertions(+), 26 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 9c186896f..1ba79a30a 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -85,13 +85,25 @@ kpops_components_fields: - repo_config - version kpops_components_inheritance_ref: - helm-app: kubernetes-app - kafka-app: pipeline-component - kafka-connector: pipeline-component - kafka-sink-connector: kafka-connector - kafka-source-connector: kafka-connector - kubernetes-app: pipeline-component - pipeline-component: base-defaults-component - producer-app: kafka-app - streams-app: kafka-app - streams-bootstrap: helm-app + helm-app: + - kubernetes-app + kafka-app: + - pipeline-component + kafka-connector: + - pipeline-component + kafka-sink-connector: + - kafka-connector + kafka-source-connector: + - kafka-connector + kubernetes-app: + - pipeline-component + pipeline-component: + - base-defaults-component + producer-app: + - kafka-app + - streams-bootstrap + streams-app: + - kafka-app + - streams-bootstrap + streams-bootstrap: + - helm-app diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index f1acf9973..cfc1375ea 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -11,6 +11,7 @@ from kpops.cli.registry import _find_classes from kpops.components import KafkaConnector, PipelineComponent from kpops.utils.colorify import redify, yellowify +from kpops.utils.pydantic import patched_issubclass_of_basemodel from kpops.utils.yaml import load_yaml_file PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py" @@ -34,10 +35,14 @@ KPOPS_COMPONENTS = tuple(_find_classes("kpops.components", PipelineComponent)) KPOPS_COMPONENTS_INHERITANCE_REF = { - component.type: cast( - type[PipelineComponent], - component.__base__, - ).type + component.type: [ + cast( + type[PipelineComponent], + base, + ).type + for base in component.__bases__ + if patched_issubclass_of_basemodel(base) + ] for component in KPOPS_COMPONENTS } @@ -73,6 +78,25 @@ class KpopsComponent(NamedTuple): specific_attrs: list[str] +def collect_parents(component_name: str) -> list[str]: + """Return a list of a component's parents. + + :param component_name: Component name in kebap-case + :return: List ordered from closest to furthest ancestor, + i.e. ``result[0] == component_name``. + """ + collected_components = [] + queue = [component_name] + while queue: + component = queue.pop(0) + collected_components.append(component) + for parent in KPOPS_COMPONENTS_INHERITANCE_REF.get(component, []): + if parent not in collected_components: + collected_components.append(parent) + queue.append(parent) + return list(dict.fromkeys(collected_components)) + + def filter_sections( component_name: str, sections: list[str], @@ -92,14 +116,11 @@ def filter_sections( if section := filter_section(component_name, sections, target_section): component_sections.append(section) elif include_inherited: - temp_component_name = component_name - while ( - temp_component_name := KPOPS_COMPONENTS_INHERITANCE_REF[ - temp_component_name - ] - ) != PipelineComponent.type: + for component in collect_parents(component_name): + if component == PipelineComponent.type: + break if section := filter_section( - temp_component_name, + component, sections, target_section, ): @@ -123,11 +144,11 @@ def filter_section( section = target_section + "-" + component_name + ".yaml" if section in sections: return section - if KPOPS_COMPONENTS_INHERITANCE_REF[component_name] == PipelineComponent.type: - section = target_section + ".yaml" - if section in sections: - return section - return None + for parent in KPOPS_COMPONENTS_INHERITANCE_REF[component_name]: + if parent == PipelineComponent.type: + section = target_section + ".yaml" + if section in sections: + return section return None From 87c24de2438965e86ec026f2d8401ad644ba55cd Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 13:56:01 +0200 Subject: [PATCH 27/36] style: add TODO --- hooks/gen_docs/gen_docs_components.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index cfc1375ea..1ca1cc619 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -78,6 +78,10 @@ class KpopsComponent(NamedTuple): specific_attrs: list[str] +# TODO(Ivan Yordanov): Evaluate whether it makes sense to instead use `__mro__` +# The problem is that we need an object for that. Note that `__mro__` differs +# from the output of this function sometimes and `__mro__` probably is the more +# accurate way. def collect_parents(component_name: str) -> list[str]: """Return a list of a component's parents. From 372a0e5bd79442e54a3ecdf143f140217739ad09 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 16:08:56 +0200 Subject: [PATCH 28/36] refactor: Use mro to collect parents --- .../dependencies/kpops_structure.yaml | 86 ++++++++++++++++--- .../pipeline_component_dependencies.yaml | 2 + .../pipeline-components/pipeline.yaml | 20 +++++ .../pipeline-components/producer-app.yaml | 10 +++ .../pipeline-components/streams-app.yaml | 10 +++ hooks/gen_docs/gen_docs_components.py | 57 ++++++------ 6 files changed, 143 insertions(+), 42 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 1ba79a30a..4a0018fe2 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -86,24 +86,86 @@ kpops_components_fields: - version kpops_components_inheritance_ref: helm-app: - - kubernetes-app + bases: + - kubernetes-app + mro: + - helm-app + - kubernetes-app + - pipeline-component + - base-defaults-component kafka-app: - - pipeline-component + bases: + - pipeline-component + mro: + - kafka-app + - pipeline-component + - base-defaults-component kafka-connector: - - pipeline-component + bases: + - pipeline-component + mro: + - kafka-connector + - pipeline-component + - base-defaults-component kafka-sink-connector: - - kafka-connector + bases: + - kafka-connector + mro: + - kafka-sink-connector + - kafka-connector + - pipeline-component + - base-defaults-component kafka-source-connector: - - kafka-connector + bases: + - kafka-connector + mro: + - kafka-source-connector + - kafka-connector + - pipeline-component + - base-defaults-component kubernetes-app: - - pipeline-component + bases: + - pipeline-component + mro: + - kubernetes-app + - pipeline-component + - base-defaults-component pipeline-component: - - base-defaults-component + bases: + - base-defaults-component + mro: + - pipeline-component + - base-defaults-component producer-app: - - kafka-app - - streams-bootstrap + bases: + - kafka-app + - streams-bootstrap + mro: + - producer-app + - kafka-app + - streams-bootstrap + - helm-app + - kubernetes-app + - pipeline-component + - base-defaults-component streams-app: - - kafka-app - - streams-bootstrap + bases: + - kafka-app + - streams-bootstrap + mro: + - streams-app + - kafka-app + - streams-bootstrap + - helm-app + - kubernetes-app + - pipeline-component + - base-defaults-component streams-bootstrap: - - helm-app + bases: + - helm-app + mro: + - streams-bootstrap + - helm-app + - kubernetes-app + - pipeline-component + - base-defaults-component diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index 3b706014c..b633db907 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -51,6 +51,7 @@ producer-app.yaml: - to.yaml - namespace.yaml - app-producer-app.yaml +- repo_config-helm-app.yaml - version-kafka-app.yaml streams-app.yaml: - prefix.yaml @@ -58,4 +59,5 @@ streams-app.yaml: - to.yaml - namespace.yaml - app-streams-app.yaml +- repo_config-helm-app.yaml - version-kafka-app.yaml diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 483244db1..0315bfaf4 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -322,6 +322,16 @@ output_role1: output_topic1 output_role2: output_topic2 nameOverride: override-with-this-name # kafka-app-specific + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false version: "2.12.0" # Helm chart version # StreamsApp component that configures a streams bootstrap app. # More documentation on StreamsApp: https://github.com/bakdata/streams-bootstrap @@ -426,4 +436,14 @@ topics: # List of auto-generated Kafka Streams topics used by the streams app. - topic1 - topic2 + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 86721cac7..5be3551d8 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -39,4 +39,14 @@ output_role1: output_topic1 output_role2: output_topic2 nameOverride: override-with-this-name # kafka-app-specific + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index e9f303686..f77edf80c 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -101,4 +101,14 @@ topics: # List of auto-generated Kafka Streams topics used by the streams app. - topic1 - topic2 + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false version: "2.12.0" # Helm chart version diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 1ca1cc619..d56709e35 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -2,6 +2,7 @@ import logging import sys +from contextlib import suppress from pathlib import Path from typing import NamedTuple, cast @@ -34,18 +35,6 @@ ) KPOPS_COMPONENTS = tuple(_find_classes("kpops.components", PipelineComponent)) -KPOPS_COMPONENTS_INHERITANCE_REF = { - component.type: [ - cast( - type[PipelineComponent], - base, - ).type - for base in component.__bases__ - if patched_issubclass_of_basemodel(base) - ] - for component in KPOPS_COMPONENTS -} - KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name @@ -78,27 +67,35 @@ class KpopsComponent(NamedTuple): specific_attrs: list[str] -# TODO(Ivan Yordanov): Evaluate whether it makes sense to instead use `__mro__` -# The problem is that we need an object for that. Note that `__mro__` differs -# from the output of this function sometimes and `__mro__` probably is the more -# accurate way. -def collect_parents(component_name: str) -> list[str]: +def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: """Return a list of a component's parents. - :param component_name: Component name in kebap-case + :param component_name: Component name in kebab-case :return: List ordered from closest to furthest ancestor, i.e. ``result[0] == component_name``. """ - collected_components = [] - queue = [component_name] - while queue: - component = queue.pop(0) - collected_components.append(component) - for parent in KPOPS_COMPONENTS_INHERITANCE_REF.get(component, []): - if parent not in collected_components: - collected_components.append(parent) - queue.append(parent) - return list(dict.fromkeys(collected_components)) + comps = [] + for c in component.mro(): + if patched_issubclass_of_basemodel(c): + with suppress(AttributeError): + comps.append(c.type) # pyright: ignore[reportGeneralTypeIssues] + return comps + + +KPOPS_COMPONENTS_INHERITANCE_REF = { + component.type: { + "bases": [ + cast( + type[PipelineComponent], + base, + ).type + for base in component.__bases__ + if patched_issubclass_of_basemodel(base) + ], + "mro": collect_parents_mro(component), + } + for component in KPOPS_COMPONENTS +} def filter_sections( @@ -120,7 +117,7 @@ def filter_sections( if section := filter_section(component_name, sections, target_section): component_sections.append(section) elif include_inherited: - for component in collect_parents(component_name): + for component in KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["mro"]: if component == PipelineComponent.type: break if section := filter_section( @@ -148,7 +145,7 @@ def filter_section( section = target_section + "-" + component_name + ".yaml" if section in sections: return section - for parent in KPOPS_COMPONENTS_INHERITANCE_REF[component_name]: + for parent in KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["bases"]: if parent == PipelineComponent.type: section = target_section + ".yaml" if section in sections: From a83d607c5b08ab48110a1efbbfe9fa7509f03e26 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 16:14:53 +0200 Subject: [PATCH 29/36] style: improve naming --- hooks/gen_docs/gen_docs_components.py | 34 +++++++++++++-------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index d56709e35..ac08a23ea 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -56,30 +56,19 @@ log = logging.getLogger("DocumentationGenerator") -class KpopsComponent(NamedTuple): - """Stores the names of components fields. - - :param attrs: All fields - :param specific_attrs: Fields that are NOT inherited - """ - - attrs: list[str] - specific_attrs: list[str] - - def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: """Return a list of a component's parents. - :param component_name: Component name in kebab-case + :param component: Component name in kebab-case :return: List ordered from closest to furthest ancestor, i.e. ``result[0] == component_name``. """ - comps = [] - for c in component.mro(): - if patched_issubclass_of_basemodel(c): + bases = [] + for base in component.mro(): + if patched_issubclass_of_basemodel(base): with suppress(AttributeError): - comps.append(c.type) # pyright: ignore[reportGeneralTypeIssues] - return comps + bases.append(base.type) # pyright: ignore[reportGeneralTypeIssues] + return bases KPOPS_COMPONENTS_INHERITANCE_REF = { @@ -98,6 +87,17 @@ def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: } +class KpopsComponent(NamedTuple): + """Stores the names of components fields. + + :param attrs: All fields + :param specific_attrs: Fields that are NOT inherited + """ + + attrs: list[str] + specific_attrs: list[str] + + def filter_sections( component_name: str, sections: list[str], From 34584cb158819f080cae155cef1bef609319c393 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 17:34:04 +0200 Subject: [PATCH 30/36] refactor: allow issubclass to take second arg --- hooks/gen_docs/gen_docs_components.py | 17 +++++++++-------- hooks/gen_docs/gen_docs_env_vars.py | 6 ++---- kpops/utils/pydantic.py | 9 +++++---- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index ac08a23ea..744dbd32d 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -12,7 +12,7 @@ from kpops.cli.registry import _find_classes from kpops.components import KafkaConnector, PipelineComponent from kpops.utils.colorify import redify, yellowify -from kpops.utils.pydantic import patched_issubclass_of_basemodel +from kpops.utils.pydantic import issubclass_patched from kpops.utils.yaml import load_yaml_file PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py" @@ -65,7 +65,7 @@ def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: """ bases = [] for base in component.mro(): - if patched_issubclass_of_basemodel(base): + if issubclass_patched(base): with suppress(AttributeError): bases.append(base.type) # pyright: ignore[reportGeneralTypeIssues] return bases @@ -79,7 +79,7 @@ def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: base, ).type for base in component.__bases__ - if patched_issubclass_of_basemodel(base) + if issubclass_patched(base) ], "mro": collect_parents_mro(component), } @@ -145,11 +145,12 @@ def filter_section( section = target_section + "-" + component_name + ".yaml" if section in sections: return section - for parent in KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["bases"]: - if parent == PipelineComponent.type: - section = target_section + ".yaml" - if section in sections: - return section + if KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["bases"] == [ + PipelineComponent.type + ]: + section = target_section + ".yaml" + if section in sections: + return section return None diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 353652068..aea4b6af2 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -25,7 +25,7 @@ from hooks.gen_docs import IterableStrEnum from kpops.cli import main from kpops.config import KpopsConfig -from kpops.utils.pydantic import patched_issubclass_of_basemodel +from kpops.utils.pydantic import issubclass_patched PATH_DOCS_RESOURCES = ROOT / "docs/docs/resources" PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables" @@ -287,9 +287,7 @@ def collect_fields(model: type[BaseModel]) -> dict[str, Any]: """ seen_fields = {} for field_name, field_value in model.model_fields.items(): - if field_value.annotation and patched_issubclass_of_basemodel( - field_value.annotation - ): + if field_value.annotation and issubclass_patched(field_value.annotation): seen_fields[field_name] = collect_fields(field_value.annotation) else: seen_fields[field_name] = field_value diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index d9c089689..26fcc5388 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -95,18 +95,19 @@ def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, } -def patched_issubclass_of_basemodel(cls): - """Pydantic breaks issubclass. +def issubclass_patched(__cls: type, __class_or_tuple: type = BaseModel) -> bool: + """Pydantic breaks ``issubclass``. ``issubclass(set[str], set) # True`` ``issubclass(BaseSettings, BaseModel) # True`` ``issubclass(set[str], BaseModel) # raises exception`` :param cls: class to check - :return: Whether cls is subclass of ``BaseModel`` + :base: class(es) to check against, defaults to ``BaseModel`` + :return: Whether 'cls' is derived from another class or is the same class. """ try: - return issubclass(cls, BaseModel) + return issubclass(__cls, __class_or_tuple) except TypeError as e: if str(e) == "issubclass() arg 1 must be a class": return False From c601fa4d5d41ecf4b9af7b0b70ea8bd2d4545917 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 18:09:08 +0200 Subject: [PATCH 31/36] refactor: move parents logic to PipelineComponent, Don't store BaseDefaultsComponent as parent for doc gen --- .../dependencies/kpops_structure.yaml | 33 +++++-------- hooks/gen_docs/gen_docs_components.py | 49 +++++++------------ .../base_components/pipeline_component.py | 22 +++++++++ 3 files changed, 50 insertions(+), 54 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 4a0018fe2..679bef784 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -88,84 +88,73 @@ kpops_components_inheritance_ref: helm-app: bases: - kubernetes-app - mro: + parents: - helm-app - kubernetes-app - pipeline-component - - base-defaults-component kafka-app: bases: - pipeline-component - mro: + parents: - kafka-app - pipeline-component - - base-defaults-component kafka-connector: bases: - pipeline-component - mro: + parents: - kafka-connector - pipeline-component - - base-defaults-component kafka-sink-connector: bases: - kafka-connector - mro: + parents: - kafka-sink-connector - kafka-connector - pipeline-component - - base-defaults-component kafka-source-connector: bases: - kafka-connector - mro: + parents: - kafka-source-connector - kafka-connector - pipeline-component - - base-defaults-component kubernetes-app: bases: - pipeline-component - mro: + parents: - kubernetes-app - pipeline-component - - base-defaults-component pipeline-component: - bases: - - base-defaults-component - mro: + bases: [] + parents: - pipeline-component - - base-defaults-component producer-app: bases: - kafka-app - streams-bootstrap - mro: + parents: - producer-app - kafka-app - streams-bootstrap - helm-app - kubernetes-app - pipeline-component - - base-defaults-component streams-app: bases: - kafka-app - streams-bootstrap - mro: + parents: - streams-app - kafka-app - streams-bootstrap - helm-app - kubernetes-app - pipeline-component - - base-defaults-component streams-bootstrap: bases: - helm-app - mro: + parents: - streams-bootstrap - helm-app - kubernetes-app - pipeline-component - - base-defaults-component diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 744dbd32d..424de213c 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -2,7 +2,6 @@ import logging import sys -from contextlib import suppress from pathlib import Path from typing import NamedTuple, cast @@ -43,34 +42,6 @@ ] for component in KPOPS_COMPONENTS } -# Dependency files should not be changed manually -DANGEROUS_FILES_TO_CHANGE = { - PATH_DOCS_COMPONENTS_DEPENDENCIES, - PATH_DOCS_COMPONENTS_DEPENDENCIES_DEFAULTS, - PATH_DOCS_KPOPS_STRUCTURE, -} -# All args provided to the script -# Pre-commit passes changed files as args -SCRIPT_ARGUMENTS = set(sys.argv) - -log = logging.getLogger("DocumentationGenerator") - - -def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: - """Return a list of a component's parents. - - :param component: Component name in kebab-case - :return: List ordered from closest to furthest ancestor, - i.e. ``result[0] == component_name``. - """ - bases = [] - for base in component.mro(): - if issubclass_patched(base): - with suppress(AttributeError): - bases.append(base.type) # pyright: ignore[reportGeneralTypeIssues] - return bases - - KPOPS_COMPONENTS_INHERITANCE_REF = { component.type: { "bases": [ @@ -79,13 +50,25 @@ def collect_parents_mro(component: type[PipelineComponent]) -> list[str]: base, ).type for base in component.__bases__ - if issubclass_patched(base) + if issubclass_patched(base, PipelineComponent) ], - "mro": collect_parents_mro(component), + "parents": component.get_parents(PipelineComponent), } for component in KPOPS_COMPONENTS } +# Dependency files should not be changed manually +DANGEROUS_FILES_TO_CHANGE = { + PATH_DOCS_COMPONENTS_DEPENDENCIES, + PATH_DOCS_COMPONENTS_DEPENDENCIES_DEFAULTS, + PATH_DOCS_KPOPS_STRUCTURE, +} +# All args provided to the script +# Pre-commit passes changed files as args +SCRIPT_ARGUMENTS = set(sys.argv) + +log = logging.getLogger("DocumentationGenerator") + class KpopsComponent(NamedTuple): """Stores the names of components fields. @@ -117,7 +100,9 @@ def filter_sections( if section := filter_section(component_name, sections, target_section): component_sections.append(section) elif include_inherited: - for component in KPOPS_COMPONENTS_INHERITANCE_REF[component_name]["mro"]: + for component in KPOPS_COMPONENTS_INHERITANCE_REF[component_name][ + "parents" + ]: if component == PipelineComponent.type: break if section := filter_section( diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index b53e03d43..f46b80b4d 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,6 +1,8 @@ from __future__ import annotations from abc import ABC +from contextlib import suppress +from typing import Self from pydantic import AliasChoices, ConfigDict, Field @@ -19,6 +21,7 @@ ToSection, ) from kpops.utils.docstring import describe_attr +from kpops.utils.pydantic import issubclass_patched class PipelineComponent(BaseDefaultsComponent, ABC): @@ -64,6 +67,25 @@ def __init__(self, **kwargs) -> None: def full_name(self) -> str: return self.prefix + self.name + @classmethod + def get_parents( + cls: type[Self], __class_or_tuple: type = BaseDefaultsComponent + ) -> list[str]: + """Get kebab-cased superclasses' names. + + Can only return subclasses of ``BaseDefaultsComponent``. + + :param __class_or_tuple: "Furthest" ancestors to look for, + defaults to BaseDefaultsComponent + :return: All ancestors that match the requirements + """ + bases = [] + for base in cls.mro(): + if issubclass_patched(base, __class_or_tuple): + with suppress(AttributeError): + bases.append(base.type) # pyright: ignore[reportGeneralTypeIssues] + return bases + def add_input_topics(self, topics: list[str]) -> None: """Add given topics to the list of input topics. From 358f84350aa3d9e642b69ea922c1ea1674f8ecb2 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 10 Jan 2024 18:16:06 +0200 Subject: [PATCH 32/36] fix: import exception --- kpops/components/base_components/pipeline_component.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index f46b80b4d..cdf5ac305 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -2,7 +2,6 @@ from abc import ABC from contextlib import suppress -from typing import Self from pydantic import AliasChoices, ConfigDict, Field @@ -23,6 +22,11 @@ from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import issubclass_patched +try: + from typing import Self +except ImportError: + from typing_extensions import Self + class PipelineComponent(BaseDefaultsComponent, ABC): """Base class for all components. From 2f2d61b8f801214283e0de3aa60d969f148cb8b0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 11 Jan 2024 14:10:44 +0200 Subject: [PATCH 33/36] refactor: get parents returns list of classes --- hooks/gen_docs/gen_docs_components.py | 8 +++++++- kpops/components/base_components/pipeline_component.py | 6 +----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 424de213c..c103b2936 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -52,7 +52,13 @@ for base in component.__bases__ if issubclass_patched(base, PipelineComponent) ], - "parents": component.get_parents(PipelineComponent), + "parents": [ + cast( + type[PipelineComponent], + parent, + ).type + for parent in component.get_parents(PipelineComponent) + ], } for component in KPOPS_COMPONENTS } diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index cdf5ac305..77946a9e8 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,7 +1,6 @@ from __future__ import annotations from abc import ABC -from contextlib import suppress from pydantic import AliasChoices, ConfigDict, Field @@ -77,8 +76,6 @@ def get_parents( ) -> list[str]: """Get kebab-cased superclasses' names. - Can only return subclasses of ``BaseDefaultsComponent``. - :param __class_or_tuple: "Furthest" ancestors to look for, defaults to BaseDefaultsComponent :return: All ancestors that match the requirements @@ -86,8 +83,7 @@ def get_parents( bases = [] for base in cls.mro(): if issubclass_patched(base, __class_or_tuple): - with suppress(AttributeError): - bases.append(base.type) # pyright: ignore[reportGeneralTypeIssues] + bases.append(base) return bases def add_input_topics(self, topics: list[str]) -> None: From d54e7d16f26193d8c3923d52d67b72218276eebf Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 11 Jan 2024 14:56:58 +0100 Subject: [PATCH 34/36] Refactor parents method as classproperty & remove self --- .../dependencies/kpops_structure.yaml | 12 +------ hooks/gen_docs/gen_docs_components.py | 2 +- .../base_components/pipeline_component.py | 31 ++++++++++--------- kpops/utils/pydantic.py | 4 ++- 4 files changed, 21 insertions(+), 28 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 679bef784..52192cb22 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -89,51 +89,43 @@ kpops_components_inheritance_ref: bases: - kubernetes-app parents: - - helm-app - kubernetes-app - pipeline-component kafka-app: bases: - pipeline-component parents: - - kafka-app - pipeline-component kafka-connector: bases: - pipeline-component parents: - - kafka-connector - pipeline-component kafka-sink-connector: bases: - kafka-connector parents: - - kafka-sink-connector - kafka-connector - pipeline-component kafka-source-connector: bases: - kafka-connector parents: - - kafka-source-connector - kafka-connector - pipeline-component kubernetes-app: bases: - pipeline-component parents: - - kubernetes-app - pipeline-component pipeline-component: bases: [] - parents: - - pipeline-component + parents: [] producer-app: bases: - kafka-app - streams-bootstrap parents: - - producer-app - kafka-app - streams-bootstrap - helm-app @@ -144,7 +136,6 @@ kpops_components_inheritance_ref: - kafka-app - streams-bootstrap parents: - - streams-app - kafka-app - streams-bootstrap - helm-app @@ -154,7 +145,6 @@ kpops_components_inheritance_ref: bases: - helm-app parents: - - streams-bootstrap - helm-app - kubernetes-app - pipeline-component diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index c103b2936..58edfcf34 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -57,7 +57,7 @@ type[PipelineComponent], parent, ).type - for parent in component.get_parents(PipelineComponent) + for parent in component.parents ], } for component in KPOPS_COMPONENTS diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 77946a9e8..edb0706d3 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -18,6 +18,7 @@ TopicConfig, ToSection, ) +from kpops.utils import cached_classproperty from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import issubclass_patched @@ -70,21 +71,21 @@ def __init__(self, **kwargs) -> None: def full_name(self) -> str: return self.prefix + self.name - @classmethod - def get_parents( - cls: type[Self], __class_or_tuple: type = BaseDefaultsComponent - ) -> list[str]: - """Get kebab-cased superclasses' names. - - :param __class_or_tuple: "Furthest" ancestors to look for, - defaults to BaseDefaultsComponent - :return: All ancestors that match the requirements - """ - bases = [] - for base in cls.mro(): - if issubclass_patched(base, __class_or_tuple): - bases.append(base) - return bases + @cached_classproperty + def parents(cls: type[Self]) -> tuple[type[PipelineComponent], ...]: # pyright: ignore[reportGeneralTypeIssues] + """Get parent components. + + :return: All ancestor KPOps components + """ + + def gen_parents(): + for base in cls.mro(): + # skip class itself and non-component ancestors + if base is cls or not issubclass_patched(base, PipelineComponent): + continue + yield base + + return tuple(gen_parents()) def add_input_topics(self, topics: list[str]) -> None: """Add given topics to the list of input topics. diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 26fcc5388..10c4b9415 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -95,7 +95,9 @@ def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, } -def issubclass_patched(__cls: type, __class_or_tuple: type = BaseModel) -> bool: +def issubclass_patched( + __cls: type, __class_or_tuple: type | tuple[type, ...] = BaseModel +) -> bool: """Pydantic breaks ``issubclass``. ``issubclass(set[str], set) # True`` From e27b6fb66ff6bc83f56b3bc754999ef007e09953 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 11 Jan 2024 18:07:29 +0200 Subject: [PATCH 35/36] copy-paste from backup branch --- .../architecture/components-hierarchy.md | 6 +- ...aults_pipeline_component_dependencies.yaml | 4 +- .../dependencies/kpops_structure.yaml | 12 +- .../pipeline_component_dependencies.yaml | 3 + .../pipeline-components/helm-app.yaml | 16 +- .../pipeline-components/kafka-app.yaml | 32 +++- .../pipeline-components/kafka-connector.yaml | 16 +- .../kafka-sink-connector.yaml | 16 +- .../kafka-source-connector.yaml | 8 +- .../pipeline-components/kubernetes-app.yaml | 16 +- .../pipeline-components/pipeline.yaml | 120 ++++++------ .../pipeline-components/producer-app.yaml | 12 +- .../sections/app-kafka-app.yaml | 4 +- .../sections/app-producer-app.yaml | 4 +- .../sections/app-streams-app.yaml | 4 +- .../pipeline-components/sections/from_.yaml | 8 +- .../pipeline-components/sections/prefix.yaml | 2 +- .../pipeline-components/sections/to.yaml | 6 +- .../pipeline-components/streams-app.yaml | 20 +- .../resources/pipeline-config/config.yaml | 4 +- .../pipeline-defaults/defaults-kafka-app.yaml | 47 +---- .../defaults-kafka-connector.yaml | 16 +- .../defaults-kubernetes-app.yaml | 16 +- .../defaults-producer-app.yaml | 4 +- .../defaults-streams-app.yaml | 4 +- .../resources/pipeline-defaults/defaults.yaml | 87 +++------ .../resources/variables/config_env_vars.env | 4 +- .../resources/variables/config_env_vars.md | 4 +- docs/docs/schema/config.json | 8 +- docs/docs/schema/defaults.json | 179 ++++++------------ docs/docs/schema/pipeline.json | 33 +--- .../core-concepts/components/producer-app.md | 2 +- .../core-concepts/components/streams-app.md | 2 +- .../components/streams-bootstrap.md | 25 --- .../core-concepts/variables/substitution.md | 2 +- docs/docs/user/migration-guide/v2-v3.md | 19 ++ docs/mkdocs.yml | 1 - .../bakdata/atm-fraud-detection/config.yaml | 4 +- .../bakdata/atm-fraud-detection/defaults.yaml | 6 +- .../bakdata/atm-fraud-detection/pipeline.yaml | 2 +- kpops/components/__init__.py | 5 +- kpops/components/base_components/kafka_app.py | 134 ++++++++----- .../base_components/pipeline_component.py | 4 +- .../components/streams_bootstrap/__init__.py | 36 +--- .../producer/producer_app.py | 46 ++--- .../streams_bootstrap/streams/model.py | 10 +- .../streams_bootstrap/streams/streams_app.py | 54 +++--- kpops/config.py | 4 +- kpops/pipeline.py | 11 +- .../snapshots/snap_test_schema_generation.py | 10 +- tests/cli/test_kpops_config.py | 4 +- tests/cli/test_registry.py | 3 +- tests/compiler/test_pipeline_name.py | 36 ++-- tests/components/test_helm_app.py | 14 +- ...streams_bootstrap.py => test_kafka_app.py} | 39 ++-- tests/components/test_kafka_connector.py | 4 +- tests/components/test_kubernetes_app.py | 2 +- tests/components/test_producer_app.py | 142 ++++++-------- tests/components/test_streams_app.py | 140 ++++++-------- tests/pipeline/resources/defaults.yaml | 4 +- .../no-topics-defaults/defaults.yaml | 6 +- .../defaults.yaml | 4 +- .../pipeline-with-env-defaults/defaults.yaml | 4 +- .../pipeline-with-short-topics/defaults.yaml | 2 +- .../read-from-component/pipeline.yaml | 2 +- .../temp-trim-release-name/defaults.yaml | 2 +- tests/pipeline/test_components/components.py | 9 +- .../components.py | 9 +- 68 files changed, 654 insertions(+), 864 deletions(-) delete mode 100644 docs/docs/user/core-concepts/components/streams-bootstrap.md rename tests/components/{test_streams_bootstrap.py => test_kafka_app.py} (72%) diff --git a/docs/docs/resources/architecture/components-hierarchy.md b/docs/docs/resources/architecture/components-hierarchy.md index ce24acc46..190c44f82 100644 --- a/docs/docs/resources/architecture/components-hierarchy.md +++ b/docs/docs/resources/architecture/components-hierarchy.md @@ -1,13 +1,10 @@ ```mermaid flowchart BT KubernetesApp --> PipelineComponent - KafkaApp --> PipelineComponent HelmApp --> KubernetesApp - StreamsBootstrap --> HelmApp + KafkaApp --> HelmApp StreamsApp --> KafkaApp - StreamsApp --> StreamsBootstrap ProducerApp --> KafkaApp - ProducerApp --> StreamsBootstrap KafkaConnector --> PipelineComponent KafkaSourceConnector --> KafkaConnector KafkaSinkConnector --> KafkaConnector @@ -15,7 +12,6 @@ flowchart BT click KubernetesApp "/kpops/user/core-concepts/components/kubernetes-app" click HelmApp "/kpops/user/core-concepts/components/helm-app" click KafkaApp "/kpops/user/core-concepts/components/kafka-app" - click StreamsBootstrap "/kpops/user/core-concepts/components/streams-bootstrap" click StreamsApp "/kpops/user/core-concepts/components/streams-app" click ProducerApp "/kpops/user/core-concepts/components/producer-app" click KafkaConnector "/kpops/user/core-concepts/components/kafka-connector" diff --git a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml index 959596df0..4e12885af 100644 --- a/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/defaults_pipeline_component_dependencies.yaml @@ -2,10 +2,8 @@ helm-app.yaml: - app-helm-app.yaml - repo_config-helm-app.yaml kafka-app.yaml: -- prefix.yaml -- from_.yaml -- to.yaml - app-kafka-app.yaml +- version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml - from_.yaml diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 52192cb22..668f05214 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -13,7 +13,10 @@ kpops_components_fields: - prefix - from_ - to + - namespace - app + - repo_config + - version kafka-connector: - name - prefix @@ -75,15 +78,6 @@ kpops_components_fields: - app - repo_config - version - streams-bootstrap: - - name - - prefix - - from_ - - to - - namespace - - app - - repo_config - - version kpops_components_inheritance_ref: helm-app: bases: diff --git a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml index b633db907..8504a0135 100644 --- a/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/pipeline_component_dependencies.yaml @@ -10,7 +10,10 @@ kafka-app.yaml: - prefix.yaml - from_.yaml - to.yaml +- namespace.yaml - app-kafka-app.yaml +- repo_config-helm-app.yaml +- version-kafka-app.yaml kafka-connector.yaml: - prefix.yaml - from_.yaml diff --git a/docs/docs/resources/pipeline-components/helm-app.yaml b/docs/docs/resources/pipeline-components/helm-app.yaml index 8f0a59c86..1bd2ce3c0 100644 --- a/docs/docs/resources/pipeline-components/helm-app.yaml +++ b/docs/docs/resources/pipeline-components/helm-app.yaml @@ -3,16 +3,16 @@ name: helm-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index ff2b5500c..83a67b4cf 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -4,16 +4,16 @@ name: kafka-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -29,11 +29,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -44,11 +44,23 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model + namespace: namespace # required # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false + version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-components/kafka-connector.yaml b/docs/docs/resources/pipeline-components/kafka-connector.yaml index d44aa7bce..ca6cfc6eb 100644 --- a/docs/docs/resources/pipeline-components/kafka-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-connector.yaml @@ -2,16 +2,16 @@ name: kafka-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -27,11 +27,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml index 017511e5b..06d14ffe1 100644 --- a/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-sink-connector.yaml @@ -3,16 +3,16 @@ name: kafka-sink-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml index d4cbcb24c..e38497b65 100644 --- a/docs/docs/resources/pipeline-components/kafka-source-connector.yaml +++ b/docs/docs/resources/pipeline-components/kafka-source-connector.yaml @@ -3,17 +3,17 @@ name: kafka-source-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # The source connector has no `from` section # from: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/kubernetes-app.yaml b/docs/docs/resources/pipeline-components/kubernetes-app.yaml index 5170768c2..66ed21bb2 100644 --- a/docs/docs/resources/pipeline-components/kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-components/kubernetes-app.yaml @@ -3,16 +3,16 @@ name: kubernetes-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 0315bfaf4..cdbd18d96 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -3,16 +3,16 @@ name: helm-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -28,11 +28,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -67,16 +67,16 @@ name: kafka-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -92,11 +92,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -107,29 +107,41 @@ cleanup.policy: compact models: # SchemaProvider is initiated with the values given here model: model + namespace: namespace # required # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app + # Helm repository configuration (optional) + # If not set the helm repo add will not be called. Useful when using local Helm charts + repo_config: + repository_name: bakdata-streams-bootstrap # required + url: https://bakdata.github.io/streams-bootstrap/ # required + repo_auth_flags: + username: user + password: pass + ca_file: /home/user/path/to/ca-file + insecure_skip_tls_verify: false + version: "2.12.0" # Helm chart version # Kafka sink connector - type: kafka-sink-connector name: kafka-sink-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -145,11 +157,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -185,17 +197,17 @@ name: kafka-source-connector # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # The source connector has no `from` section # from: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -234,16 +246,16 @@ name: kubernetes-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -259,11 +271,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -289,17 +301,17 @@ name: producer-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # from: # While the producer-app does inherit from kafka-app, it does not need a # `from` section, hence it does not support it. # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -315,8 +327,8 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 @@ -339,16 +351,16 @@ name: streams-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -364,11 +376,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -386,8 +398,8 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 5be3551d8..784873617 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -6,17 +6,17 @@ name: producer-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- # from: # While the producer-app does inherit from kafka-app, it does not need a # `from` section, hence it does not support it. # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -32,8 +32,8 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml index 73b70c59e..5ae8be6d6 100644 --- a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml @@ -2,7 +2,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml index 0cbe04ded..0fe6680cd 100644 --- a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml @@ -2,8 +2,8 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml index 1c5f0849f..e3577aa5f 100644 --- a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml @@ -4,8 +4,8 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-components/sections/from_.yaml b/docs/docs/resources/pipeline-components/sections/from_.yaml index 3f7f0dd22..777d10d0e 100644 --- a/docs/docs/resources/pipeline-components/sections/from_.yaml +++ b/docs/docs/resources/pipeline-components/sections/from_.yaml @@ -1,12 +1,12 @@ from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component diff --git a/docs/docs/resources/pipeline-components/sections/prefix.yaml b/docs/docs/resources/pipeline-components/sections/prefix.yaml index 91fbda223..b4d03f519 100644 --- a/docs/docs/resources/pipeline-components/sections/prefix.yaml +++ b/docs/docs/resources/pipeline-components/sections/prefix.yaml @@ -1,3 +1,3 @@ # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- diff --git a/docs/docs/resources/pipeline-components/sections/to.yaml b/docs/docs/resources/pipeline-components/sections/to.yaml index dd81be9ef..7ebaf60df 100644 --- a/docs/docs/resources/pipeline-components/sections/to.yaml +++ b/docs/docs/resources/pipeline-components/sections/to.yaml @@ -1,11 +1,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index f77edf80c..1e79eaf0b 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -4,16 +4,16 @@ name: streams-app # required # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -29,11 +29,11 @@ # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -51,8 +51,8 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-config/config.yaml b/docs/docs/resources/pipeline-config/config.yaml index 3b08c0708..275382d46 100644 --- a/docs/docs/resources/pipeline-config/config.yaml +++ b/docs/docs/resources/pipeline-config/config.yaml @@ -16,9 +16,9 @@ defaults_filename_prefix: defaults # Configures topic names. topic_name_config: # Configures the value for the variable ${output_topic_name} - default_output_topic_name: ${pipeline_name}-${component_name} + default_output_topic_name: ${pipeline.name}-${component_name} # Configures the value for the variable ${error_topic_name} - default_error_topic_name: ${pipeline_name}-${component_name}-error + default_error_topic_name: ${pipeline.name}-${component_name}-error # Address of the Schema Registry schema_registry_url: "http://localhost:8081" # Address of the Kafka REST Proxy. diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index d37dad1bb..7320042af 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -3,53 +3,12 @@ # Parent of: ProducerApp, StreamsApp # Child of: KubernetesApp kafka-app: - # Pipeline prefix that will prefix every component name. If you wish to not - # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- - from: # Must not be null - topics: # read from topic - ${pipeline_name}-input-topic: - type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: - role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: - type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: - type: pattern # Implied to be an extra pattern if `role` is defined - role: some-role - components: # read from specific component - account-producer: - type: output # Implied when role is NOT specified - other-producer: - role: some-role # Implies `type` to be extra - component-as-input-pattern: - type: pattern # Implied to be an input pattern if `role` is undefined - component-as-extra-pattern: - type: pattern # Implied to be an extra pattern if `role` is defined - role: some-role - # Topic(s) into which the component will write output - to: - topics: - ${pipeline_name}-output-topic: - type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: - role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: - type: error - # Currently KPOps supports Avro and JSON schemas. - key_schema: key-schema # must implement SchemaProvider to use - value_schema: value-schema - partitions_count: 1 - replication_factor: 1 - configs: # https://kafka.apache.org/documentation/#topicconfigs - cleanup.policy: compact - models: # SchemaProvider is initiated with the values given here - model: model # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app + version: "2.12.0" # Helm chart version diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml index 8aa5e8ac2..489bf8bb1 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-connector.yaml @@ -5,16 +5,16 @@ kafka-connector: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -30,11 +30,11 @@ kafka-connector: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml index 5dd85e9ce..0780de384 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kubernetes-app.yaml @@ -5,16 +5,16 @@ kubernetes-app: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -30,11 +30,11 @@ kubernetes-app: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use diff --git a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml index bfa5521c4..a5b4a0f6f 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml @@ -10,8 +10,8 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml index ae1adab98..4db627950 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml @@ -9,8 +9,8 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index 5c71248c2..3c1550af3 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -24,56 +24,15 @@ helm-app: # Parent of: ProducerApp, StreamsApp # Child of: KubernetesApp kafka-app: - # Pipeline prefix that will prefix every component name. If you wish to not - # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- - from: # Must not be null - topics: # read from topic - ${pipeline_name}-input-topic: - type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: - role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: - type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: - type: pattern # Implied to be an extra pattern if `role` is defined - role: some-role - components: # read from specific component - account-producer: - type: output # Implied when role is NOT specified - other-producer: - role: some-role # Implies `type` to be extra - component-as-input-pattern: - type: pattern # Implied to be an input pattern if `role` is undefined - component-as-extra-pattern: - type: pattern # Implied to be an extra pattern if `role` is defined - role: some-role - # Topic(s) into which the component will write output - to: - topics: - ${pipeline_name}-output-topic: - type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: - role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: - type: error - # Currently KPOps supports Avro and JSON schemas. - key_schema: key-schema # must implement SchemaProvider to use - value_schema: value-schema - partitions_count: 1 - replication_factor: 1 - configs: # https://kafka.apache.org/documentation/#topicconfigs - cleanup.policy: compact - models: # SchemaProvider is initiated with the values given here - model: model # `app` can contain application-specific settings, hence the user is free to # add the key-value pairs they need. app: # required streams: # required - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app + version: "2.12.0" # Helm chart version # Kafka connector # # Parent of: KafkaSinkConnector, KafkaSourceConnector @@ -81,16 +40,16 @@ kafka-app: kafka-connector: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -106,11 +65,11 @@ kafka-connector: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -162,16 +121,16 @@ kafka-source-connector: kubernetes-app: # Pipeline prefix that will prefix every component name. If you wish to not # have any prefix you can specify an empty string. - prefix: ${pipeline_name}- + prefix: ${pipeline.name}- from: # Must not be null topics: # read from topic - ${pipeline_name}-input-topic: + ${pipeline.name}-input-topic: type: input # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra - ${pipeline_name}-input-pattern-topic: + ${pipeline.name}-input-pattern-topic: type: pattern # Implied to be an input pattern if `role` is undefined - ${pipeline_name}-extra-pattern-topic: + ${pipeline.name}-extra-pattern-topic: type: pattern # Implied to be an extra pattern if `role` is defined role: some-role components: # read from specific component @@ -187,11 +146,11 @@ kubernetes-app: # Topic(s) into which the component will write output to: topics: - ${pipeline_name}-output-topic: + ${pipeline.name}-output-topic: type: output # Implied when role is NOT specified - ${pipeline_name}-extra-topic: + ${pipeline.name}-extra-topic: role: topic-role # Implies `type` to be extra; Will throw an error if `type` is defined - ${pipeline_name}-error-topic: + ${pipeline.name}-error-topic: type: error # Currently KPOps supports Avro and JSON schemas. key_schema: key-schema # must implement SchemaProvider to use @@ -221,8 +180,8 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} outputTopic: output_topic extraOutputTopics: output_role1: output_topic1 @@ -239,8 +198,8 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${kafka_brokers} # required - schemaRegistryUrl: ${schema_registry_url} + brokers: ${config.kafka_brokers} # required + schemaRegistryUrl: ${config.schema_registry.url} inputTopics: - topic1 - topic2 diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index cc1f68943..42d2dead8 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -25,10 +25,10 @@ KPOPS_KAFKA_BROKERS # No default value, required KPOPS_DEFAULTS_FILENAME_PREFIX=defaults # topic_name_config.default_output_topic_name # Configures the value for the variable ${output_topic_name} -KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component.name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline.name}-${component.name} # topic_name_config.default_error_topic_name # Configures the value for the variable ${error_topic_name} -KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component.name}-error +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline.name}-${component.name}-error # schema_registry.enabled # Whether the Schema Registry handler should be initialized. KPOPS_SCHEMA_REGISTRY__ENABLED=False diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index fd635278a..ef0a7726f 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -7,8 +7,8 @@ These variables are a lower priority alternative to the settings in `config.yaml |KPOPS_PIPELINE_BASE_DIR |. |False |Base directory to the pipelines (default is current working directory) |pipeline_base_dir | |KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | |KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| -|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component.name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline.name}-${component.name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline.name}-${component.name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | |KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | |KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | |KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index c4ed0b1d4..98056fca0 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -119,13 +119,13 @@ "description": "Configure the topic name variables you can use in the pipeline definition.", "properties": { "default_error_topic_name": { - "default": "${pipeline_name}-${component.name}-error", + "default": "${pipeline.name}-${component.name}-error", "description": "Configures the value for the variable ${error_topic_name}", "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { - "default": "${pipeline_name}-${component.name}", + "default": "${pipeline.name}-${component.name}", "description": "Configures the value for the variable ${output_topic_name}", "title": "Default Output Topic Name", "type": "string" @@ -266,8 +266,8 @@ } ], "default": { - "default_error_topic_name": "${pipeline_name}-${component.name}-error", - "default_output_topic_name": "${pipeline_name}-${component.name}" + "default_error_topic_name": "${pipeline.name}-${component.name}-error", + "default_output_topic_name": "${pipeline.name}-${component.name}" }, "description": "Configure the topic name variables you can use in the pipeline definition." } diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index aa392179b..d81314997 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -95,7 +95,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -247,12 +247,36 @@ "title": "Name", "type": "string" }, + "namespace": { + "description": "Namespace in which the component shall be deployed", + "title": "Namespace", + "type": "string" + }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" }, + "repo_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + } + ], + "default": { + "repo_auth_flags": { + "ca_file": null, + "cert_file": null, + "insecure_skip_tls_verify": false, + "password": null, + "username": null + }, + "repository_name": "bakdata-streams-bootstrap", + "url": "https://bakdata.github.io/streams-bootstrap/" + }, + "description": "Configuration of the Helm chart repo to be used for deploying the component" + }, "to": { "anyOf": [ { @@ -264,10 +288,24 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "2.9.0", + "description": "Helm chart version", + "title": "Version" } }, "required": [ "name", + "namespace", "app" ], "title": "KafkaApp", @@ -341,7 +379,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -458,7 +496,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -575,7 +613,7 @@ "title": "Offset Topic" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -708,7 +746,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -773,7 +811,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -799,7 +837,7 @@ }, "ProducerApp": { "additionalProperties": true, - "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -826,7 +864,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -1041,7 +1079,7 @@ }, "StreamsApp": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams-bootstrap app.", + "description": "StreamsApp component that configures a streams bootstrap app.", "properties": { "app": { "allOf": [ @@ -1075,7 +1113,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -1220,7 +1258,7 @@ }, "StreamsAppValues": { "additionalProperties": true, - "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { "anyOf": [ @@ -1232,7 +1270,7 @@ } ], "default": null, - "description": "Kubernetes event-driven autoscaling config" + "description": "Kubernetes Event-driven Autoscaling config" }, "nameOverride": { "anyOf": [ @@ -1253,7 +1291,7 @@ "$ref": "#/$defs/StreamsConfig" } ], - "description": "streams-bootstrap streams section" + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -1262,100 +1300,6 @@ "title": "StreamsAppValues", "type": "object" }, - "StreamsBootstrap": { - "additionalProperties": true, - "description": "Base for components with a streams-bootstrap Helm chart.", - "properties": { - "app": { - "allOf": [ - { - "$ref": "#/$defs/HelmAppValues" - } - ], - "description": "Helm app values" - }, - "from": { - "anyOf": [ - { - "$ref": "#/$defs/FromSection" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Topic(s) and/or components from which the component will read input", - "title": "From" - }, - "name": { - "description": "Component name", - "title": "Name", - "type": "string" - }, - "namespace": { - "description": "Namespace in which the component shall be deployed", - "title": "Namespace", - "type": "string" - }, - "prefix": { - "default": "${pipeline_name}-", - "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", - "title": "Prefix", - "type": "string" - }, - "repo_config": { - "allOf": [ - { - "$ref": "#/$defs/HelmRepoConfig" - } - ], - "default": { - "repo_auth_flags": { - "ca_file": null, - "cert_file": null, - "insecure_skip_tls_verify": false, - "password": null, - "username": null - }, - "repository_name": "bakdata-streams-bootstrap", - "url": "https://bakdata.github.io/streams-bootstrap/" - }, - "description": "Configuration of the Helm chart repo to be used for deploying the component" - }, - "to": { - "anyOf": [ - { - "$ref": "#/$defs/ToSection" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Topic(s) into which the component will write output" - }, - "version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "2.9.0", - "description": "Helm chart version", - "title": "Version" - } - }, - "required": [ - "name", - "namespace", - "app" - ], - "title": "StreamsBootstrap", - "type": "object" - }, "StreamsConfig": { "additionalProperties": true, "description": "Streams Bootstrap streams section.", @@ -1371,19 +1315,6 @@ "title": "Config", "type": "object" }, - "deleteOutput": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", - "title": "Deleteoutput" - }, "errorTopic": { "anyOf": [ { @@ -1638,9 +1569,6 @@ }, "streams-app": { "$ref": "#/$defs/StreamsApp" - }, - "streams-bootstrap": { - "$ref": "#/$defs/StreamsBootstrap" } }, "required": [ @@ -1652,8 +1580,7 @@ "kubernetes-app", "pipeline-component", "producer-app", - "streams-app", - "streams-bootstrap" + "streams-app" ], "title": "DefaultsSchema", "type": "object" diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 45b187e89..f6bd2eeff 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -95,7 +95,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -271,7 +271,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -388,7 +388,7 @@ "title": "Offset Topic" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -467,7 +467,7 @@ }, "ProducerApp": { "additionalProperties": true, - "description": "Producer component.\nThis producer holds configuration to use as values for the streams-bootstrap producer Helm chart. Note that the producer does not support error topics.", + "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ @@ -494,7 +494,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -709,7 +709,7 @@ }, "StreamsApp": { "additionalProperties": true, - "description": "StreamsApp component that configures a streams-bootstrap app.", + "description": "StreamsApp component that configures a streams bootstrap app.", "properties": { "app": { "allOf": [ @@ -743,7 +743,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -888,7 +888,7 @@ }, "StreamsAppValues": { "additionalProperties": true, - "description": "streams-bootstrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", + "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { "anyOf": [ @@ -900,7 +900,7 @@ } ], "default": null, - "description": "Kubernetes event-driven autoscaling config" + "description": "Kubernetes Event-driven Autoscaling config" }, "nameOverride": { "anyOf": [ @@ -921,7 +921,7 @@ "$ref": "#/$defs/StreamsConfig" } ], - "description": "streams-bootstrap streams section" + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -945,19 +945,6 @@ "title": "Config", "type": "object" }, - "deleteOutput": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup", - "title": "Deleteoutput" - }, "errorTopic": { "anyOf": [ { diff --git a/docs/docs/user/core-concepts/components/producer-app.md b/docs/docs/user/core-concepts/components/producer-app.md index bff598d53..1f55fa6d9 100644 --- a/docs/docs/user/core-concepts/components/producer-app.md +++ b/docs/docs/user/core-concepts/components/producer-app.md @@ -1,6 +1,6 @@ # ProducerApp -Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md). +Subclass of [_KafkaApp_](kafka-app.md). ### Usage diff --git a/docs/docs/user/core-concepts/components/streams-app.md b/docs/docs/user/core-concepts/components/streams-app.md index d34705062..ac881ade2 100644 --- a/docs/docs/user/core-concepts/components/streams-app.md +++ b/docs/docs/user/core-concepts/components/streams-app.md @@ -1,6 +1,6 @@ # StreamsApp -Subclass of [_KafkaApp_](kafka-app.md) and [_StreamsBootstrap_](streams-bootstrap.md). +Subclass of [_KafkaApp_](kafka-app.md). ### Usage diff --git a/docs/docs/user/core-concepts/components/streams-bootstrap.md b/docs/docs/user/core-concepts/components/streams-bootstrap.md deleted file mode 100644 index 52bb5fa0e..000000000 --- a/docs/docs/user/core-concepts/components/streams-bootstrap.md +++ /dev/null @@ -1,25 +0,0 @@ -# StreamsApp - -Subclass of [_HelmApp_](helm-app.md). - -### Usage - -Configures a Helm app with [streams-bootstrap Helm charts](https://github.com/bakdata/streams-bootstrap){target=_blank}. - -### Operations - -#### deploy - -Deploy using Helm. - -#### destroy - -Uninstall Helm release. - -#### reset - -Do nothing. - -#### clean - -Do nothing. diff --git a/docs/docs/user/core-concepts/variables/substitution.md b/docs/docs/user/core-concepts/variables/substitution.md index 71782180d..b1bfa97e3 100644 --- a/docs/docs/user/core-concepts/variables/substitution.md +++ b/docs/docs/user/core-concepts/variables/substitution.md @@ -41,7 +41,7 @@ Environment variables such as `$PATH` can be used in the pipeline definition and These are special variables that refer to the name and path of a pipeline. -- `${pipeline_name}`: Concatenated path of the parent directory where pipeline.yaml is defined in. +- `${pipeline.name}`: Concatenated path of the parent directory where pipeline.yaml is defined in. For instance, `./data/pipelines/v1/pipeline.yaml`, here the value for the variable would be `data-pipelines-v1`. - `${pipeline_name_}`: Similar to the previous variable, each `` contains a part of the path to the `pipeline.yaml` file. diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 9d94af628..c4b42c3fa 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -188,3 +188,22 @@ If you're using this functionality in your custom components, it needs to be upd """Render final component resources, e.g. Kubernetes manifests.""" return [] # list of manifests ``` + +## [Namespace substitution vars](https://github.com/bakdata/kpops/pull/408) + +The global configuration variables are now namespaced under the config key, such as `${config.kafka_brokers}`, `${config.schema_registry.url}`. Same with pipeline variables, e.g. `${pipeline_name} → ${pipeline.name}`. +This would make it more uniform with the existing `${component.}` variables. + +### pipeline.yaml + +```diff + name: kafka-app +- prefix: ${pipeline_name}- ++ prefix: ${pipeline.name}- + app: + streams: +- brokers: ${kafka_brokers} +- schemaRegistryUrl: ${schema_registry.url} ++ brokers: ${config.kafka_brokers} ++ schemaRegistryUrl: ${config.schema_registry.url} +``` diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index d436c94a5..c6ef09c16 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -95,7 +95,6 @@ nav: - Overview: user/core-concepts/components/overview.md - KubernetesApp: user/core-concepts/components/kubernetes-app.md - HelmApp: user/core-concepts/components/helm-app.md - - StreamsBootstrap: user/core-concepts/components/streams-bootstrap.md - KafkaApp: user/core-concepts/components/kafka-app.md - StreamsApp: user/core-concepts/components/streams-app.md - ProducerApp: user/core-concepts/components/producer-app.md diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index c3195147b..c20493eb7 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -1,6 +1,6 @@ topic_name_config: - default_error_topic_name: "${pipeline_name}-${component.name}-dead-letter-topic" - default_output_topic_name: "${pipeline_name}-${component.name}-topic" + default_error_topic_name: "${pipeline.name}-${component.name}-dead-letter-topic" + default_output_topic_name: "${pipeline.name}-${component.name}-topic" kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml index 2e9079f4f..a5a060378 100644 --- a/examples/bakdata/atm-fraud-detection/defaults.yaml +++ b/examples/bakdata/atm-fraud-detection/defaults.yaml @@ -10,8 +10,8 @@ kafka-connector: kafka-app: app: streams: - brokers: ${kafka_brokers} - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} optimizeLeaveGroupBehavior: false producer-app: @@ -23,7 +23,7 @@ producer-app: streams-app: app: labels: - pipeline: ${pipeline_name} + pipeline: ${pipeline.name} streams: optimizeLeaveGroupBehavior: false to: diff --git a/examples/bakdata/atm-fraud-detection/pipeline.yaml b/examples/bakdata/atm-fraud-detection/pipeline.yaml index 9982aa0a7..d166a21f4 100644 --- a/examples/bakdata/atm-fraud-detection/pipeline.yaml +++ b/examples/bakdata/atm-fraud-detection/pipeline.yaml @@ -83,7 +83,7 @@ app: connector.class: io.confluent.connect.jdbc.JdbcSinkConnector tasks.max: 1 - topics: ${pipeline_name}-account-linker-topic + topics: ${pipeline.name}-account-linker-topic connection.url: jdbc:postgresql://postgresql-dev.${NAMESPACE}.svc.cluster.local:5432/app_db connection.user: app1 connection.password: AppPassword diff --git a/kpops/components/__init__.py b/kpops/components/__init__.py index dc5fcee9c..98e1d3530 100644 --- a/kpops/components/__init__.py +++ b/kpops/components/__init__.py @@ -7,9 +7,7 @@ PipelineComponent, ) from kpops.components.base_components.kafka_connector import KafkaConnector -from kpops.components.streams_bootstrap import StreamsBootstrap -from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp +from kpops.components.streams_bootstrap import ProducerApp, StreamsApp __all__ = ( "HelmApp", @@ -18,7 +16,6 @@ "KafkaSinkConnector", "KafkaSourceConnector", "KubernetesApp", - "StreamsBootstrap", "ProducerApp", "StreamsApp", "PipelineComponent", diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 7ee67b09c..c7c983e0d 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -7,12 +7,10 @@ from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( - HelmFlags, + HelmRepoConfig, + HelmUpgradeInstallFlags, ) -from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.base_components.helm_app import HelmAppValues -from kpops.components.base_components.pipeline_component import PipelineComponent -from kpops.components.streams_bootstrap import StreamsBootstrap +from kpops.components.base_components.helm_app import HelmApp, HelmAppValues from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel @@ -47,59 +45,38 @@ class KafkaAppValues(HelmAppValues): ) -class KafkaAppCleaner(StreamsBootstrap): - """Helm app for resetting and cleaning a streams-bootstrap app.""" - - @property - @override - def helm_chart(self) -> str: - raise NotImplementedError - - @property - @override - def helm_release_name(self) -> str: - suffix = "-clean" - return create_helm_release_name(self.full_name + suffix, suffix) - - @property - @override - def helm_flags(self) -> HelmFlags: - return HelmFlags( - create_namespace=self.config.create_namespace, - version=self.version, - wait=True, - wait_for_jobs=True, - ) - - @override - def clean(self, dry_run: bool) -> None: - """Clean an app using a cleanup job. - - :param dry_run: Dry run command - """ - log.info(f"Uninstall old cleanup job for {self.helm_release_name}") - self.destroy(dry_run) - - log.info(f"Init cleanup job for {self.helm_release_name}") - self.deploy(dry_run) - - if not self.config.retain_clean_jobs: - log.info(f"Uninstall cleanup job for {self.helm_release_name}") - self.destroy(dry_run) - - -class KafkaApp(PipelineComponent, ABC): +class KafkaApp(HelmApp, ABC): """Base component for Kafka-based components. Producer or streaming apps should inherit from this class. :param app: Application-specific settings + :param repo_config: Configuration of the Helm chart repo to be used for + deploying the component, + defaults to HelmRepoConfig(repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/") + :param version: Helm chart version, defaults to "2.9.0" """ app: KafkaAppValues = Field( default=..., description=describe_attr("app", __doc__), ) + repo_config: HelmRepoConfig = Field( + default=HelmRepoConfig( + repository_name="bakdata-streams-bootstrap", + url="https://bakdata.github.io/streams-bootstrap/", + ), + description=describe_attr("repo_config", __doc__), + ) + version: str | None = Field( + default="2.9.0", + description=describe_attr("version", __doc__), + ) + + @property + def clean_up_helm_chart(self) -> str: + """Helm chart used to destroy and clean this component.""" + raise NotImplementedError @override def deploy(self, dry_run: bool) -> None: @@ -113,3 +90,66 @@ def deploy(self, dry_run: bool) -> None: to_section=self.to, dry_run=dry_run ) super().deploy(dry_run) + + def _run_clean_up_job( + self, + values: dict, + dry_run: bool, + retain_clean_jobs: bool = False, + ) -> None: + """Clean an app using the respective cleanup job. + + :param values: The value YAML for the chart + :param dry_run: Dry run command + :param retain_clean_jobs: Whether to retain the cleanup job, defaults to False + """ + log.info(f"Uninstall old cleanup job for {self.clean_release_name}") + + self.__uninstall_clean_up_job(self.clean_release_name, dry_run) + + log.info(f"Init cleanup job for {self.clean_release_name}") + + stdout = self.__install_clean_up_job(self.clean_release_name, values, dry_run) + + if dry_run: + self.dry_run_handler.print_helm_diff(stdout, self.clean_release_name, log) + + if not retain_clean_jobs: + log.info(f"Uninstall cleanup job for {self.clean_release_name}") + self.__uninstall_clean_up_job(self.clean_release_name, dry_run) + + def __uninstall_clean_up_job(self, release_name: str, dry_run: bool) -> None: + """Uninstall clean up job. + + :param release_name: Name of the Helm release + :param dry_run: Whether to do a dry run of the command + """ + self.helm.uninstall(self.namespace, release_name, dry_run) + + def __install_clean_up_job( + self, + release_name: str, + values: dict, + dry_run: bool, + ) -> str: + """Install clean up job. + + :param release_name: Name of the Helm release + :param suffix: Suffix to add to the release name, e.g. "-clean" + :param values: The Helm values for the chart + :param dry_run: Whether to do a dry run of the command + :return: Return the output of the installation + """ + return self.helm.upgrade_install( + release_name, + self.clean_up_helm_chart, + dry_run, + self.namespace, + values, + HelmUpgradeInstallFlags( + create_namespace=self.config.create_namespace, + version=self.version, + wait=True, + wait_for_jobs=True, + ), + ) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index edb0706d3..4b09b35de 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -34,7 +34,7 @@ class PipelineComponent(BaseDefaultsComponent, ABC): :param name: Component name :param prefix: Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string., - defaults to "${pipeline_name}-" + defaults to "${pipeline.name}-" :param from_: Topic(s) and/or components from which the component will read input, defaults to None :param to: Topic(s) into which the component will write output, @@ -43,7 +43,7 @@ class PipelineComponent(BaseDefaultsComponent, ABC): name: str = Field(default=..., description=describe_attr("name", __doc__)) prefix: str = Field( - default="${pipeline_name}-", + default="${pipeline.name}-", description=describe_attr("prefix", __doc__), ) from_: FromSection | None = Field( diff --git a/kpops/components/streams_bootstrap/__init__.py b/kpops/components/streams_bootstrap/__init__.py index 1b02b091b..097d85b13 100644 --- a/kpops/components/streams_bootstrap/__init__.py +++ b/kpops/components/streams_bootstrap/__init__.py @@ -1,31 +1,7 @@ -from abc import ABC +from kpops.components.streams_bootstrap.producer.producer_app import ProducerApp +from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp -from pydantic import Field - -from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig -from kpops.components.base_components.helm_app import HelmApp -from kpops.utils.docstring import describe_attr - -STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( - repository_name="bakdata-streams-bootstrap", - url="https://bakdata.github.io/streams-bootstrap/", -) -STREAMS_BOOTSTRAP_VERSION = "2.9.0" - - -class StreamsBootstrap(HelmApp, ABC): - """Base for components with a streams-bootstrap Helm chart. - - :param repo_config: Configuration of the Helm chart repo to be used for - deploying the component, defaults to streams-bootstrap Helm repo - :param version: Helm chart version, defaults to "2.9.0" - """ - - repo_config: HelmRepoConfig = Field( - default=STREAMS_BOOTSTRAP_HELM_REPO, - description=describe_attr("repo_config", __doc__), - ) - version: str | None = Field( - default=STREAMS_BOOTSTRAP_VERSION, - description=describe_attr("version", __doc__), - ) +__all__ = [ + "ProducerApp", + "StreamsApp", +] diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 2d6a586b2..e37529bae 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -1,38 +1,23 @@ -from functools import cached_property +# from __future__ import annotations from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import ( - KafkaApp, - KafkaAppCleaner, -) +from kpops.components.base_components.kafka_app import KafkaApp from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, ) -from kpops.components.streams_bootstrap import StreamsBootstrap from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.producer.model import ProducerAppValues from kpops.utils.docstring import describe_attr -class ProducerAppCleaner(KafkaAppCleaner): - app: ProducerAppValues - - @property - @override - def helm_chart(self) -> str: - return ( - f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" - ) - - -class ProducerApp(KafkaApp, StreamsBootstrap): +class ProducerApp(KafkaApp): """Producer component. - This producer holds configuration to use as values for the streams-bootstrap - producer Helm chart. + This producer holds configuration to use as values for the streams bootstrap + producer helm chart. Note that the producer does not support error topics. @@ -51,14 +36,6 @@ class ProducerApp(KafkaApp, StreamsBootstrap): description=describe_attr("from_", __doc__), ) - @cached_property - def _cleaner(self) -> ProducerAppCleaner: - return ProducerAppCleaner( - config=self.config, - handlers=self.handlers, - **self.model_dump(), - ) - @override def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: match topic.type: @@ -81,6 +58,17 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None: def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.PRODUCER_APP.value}" + @property + @override + def clean_up_helm_chart(self) -> str: + return ( + f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" + ) + @override def clean(self, dry_run: bool) -> None: - self._cleaner.clean(dry_run) + self._run_clean_up_job( + values=self.to_helm_values(), + dry_run=dry_run, + retain_clean_jobs=self.config.retain_clean_jobs, + ) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 95100b966..b52bc162c 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -28,7 +28,6 @@ class StreamsConfig(KafkaStreamsConfig): :param output_topic: Output topic, defaults to None :param error_topic: Error topic, defaults to None :param config: Configuration, defaults to {} - :param delete_output: Whether the output topics with their associated schemas and the consumer group should be deleted during the cleanup, defaults to None """ input_topics: list[str] = Field( @@ -55,9 +54,6 @@ class StreamsConfig(KafkaStreamsConfig): config: dict[str, Any] = Field( default={}, description=describe_attr("config", __doc__) ) - delete_output: bool | None = Field( - default=None, description=describe_attr("delete_output", __doc__) - ) def add_input_topics(self, topics: list[str]) -> None: """Add given topics to the list of input topics. @@ -171,12 +167,12 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): class StreamsAppValues(KafkaAppValues): - """streams-bootstrap app configurations. + """StreamsBoostrap app configurations. The attributes correspond to keys and values that are used as values for the streams bootstrap helm chart. - :param streams: streams-bootstrap streams section - :param autoscaling: Kubernetes event-driven autoscaling config, defaults to None + :param streams: Streams Bootstrap streams section + :param autoscaling: Kubernetes Event-driven Autoscaling config, defaults to None """ streams: StreamsConfig = Field( diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index 2c632e882..e8a434b70 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -1,29 +1,14 @@ -from functools import cached_property - from pydantic import Field from typing_extensions import override -from kpops.components.base_components.kafka_app import ( - KafkaApp, - KafkaAppCleaner, -) -from kpops.components.streams_bootstrap import StreamsBootstrap +from kpops.components.base_components.kafka_app import KafkaApp from kpops.components.streams_bootstrap.app_type import AppType from kpops.components.streams_bootstrap.streams.model import StreamsAppValues from kpops.utils.docstring import describe_attr -class StreamsAppCleaner(KafkaAppCleaner): - app: StreamsAppValues - - @property - @override - def helm_chart(self) -> str: - return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" - - -class StreamsApp(KafkaApp, StreamsBootstrap): - """StreamsApp component that configures a streams-bootstrap app. +class StreamsApp(KafkaApp): + """StreamsApp component that configures a streams bootstrap app. :param app: Application-specific settings """ @@ -33,14 +18,6 @@ class StreamsApp(KafkaApp, StreamsBootstrap): description=describe_attr("app", __doc__), ) - @cached_property - def _cleaner(self) -> StreamsAppCleaner: - return StreamsAppCleaner( - config=self.config, - handlers=self.handlers, - **self.model_dump(), - ) - @override def add_input_topics(self, topics: list[str]) -> None: self.app.streams.add_input_topics(topics) @@ -74,12 +51,29 @@ def add_extra_output_topic(self, topic_name: str, role: str) -> None: def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.STREAMS_APP.value}" + @property + @override + def clean_up_helm_chart(self) -> str: + return f"{self.repo_config.repository_name}/{AppType.CLEANUP_STREAMS_APP.value}" + @override def reset(self, dry_run: bool) -> None: - self._cleaner.app.streams.delete_output = False - self._cleaner.clean(dry_run) + self.__run_streams_clean_up_job(dry_run, delete_output=False) @override def clean(self, dry_run: bool) -> None: - self._cleaner.app.streams.delete_output = True - self._cleaner.clean(dry_run) + self.__run_streams_clean_up_job(dry_run, delete_output=True) + + def __run_streams_clean_up_job(self, dry_run: bool, delete_output: bool) -> None: + """Run clean job for this Streams app. + + :param dry_run: Whether to do a dry run of the command + :param delete_output: Whether to delete the output of the app that is being cleaned + """ + values = self.to_helm_values() + values["streams"]["deleteOutput"] = delete_output + self._run_clean_up_job( + values=values, + dry_run=dry_run, + retain_clean_jobs=self.config.retain_clean_jobs, + ) diff --git a/kpops/config.py b/kpops/config.py index f71444a43..9bb57e104 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -21,11 +21,11 @@ class TopicNameConfig(BaseSettings): """Configure the topic name variables you can use in the pipeline definition.""" default_output_topic_name: str = Field( - default="${pipeline_name}-${component.name}", + default="${pipeline.name}-${component.name}", description="Configures the value for the variable ${output_topic_name}", ) default_error_topic_name: str = Field( - default="${pipeline_name}-${component.name}-error", + default="${pipeline.name}-${component.name}-error", description="Configures the value for the variable ${error_topic_name}", ) diff --git a/kpops/pipeline.py b/kpops/pipeline.py index 45a39c232..26629686a 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -266,6 +266,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: ) substitution = generate_substitution( config.model_dump(mode="json"), + "config", existing_substitution=component_substitution, separator=".", ) @@ -295,9 +296,9 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: For example, for a given path ./data/v1/dev/pipeline.yaml the pipeline_name would be set to data-v1-dev. Then the sub environment variables are set: - pipeline_name_0 = data - pipeline_name_1 = v1 - pipeline_name_2 = dev + pipeline.name_0 = data + pipeline.name_1 = v1 + pipeline.name_2 = dev :param base_dir: Base directory to the pipeline files :param path: Path to pipeline.yaml file @@ -307,9 +308,9 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: msg = "The pipeline-base-dir should not equal the pipeline-path" raise ValueError(msg) pipeline_name = "-".join(path_without_file) - ENV["pipeline_name"] = pipeline_name + ENV["pipeline.name"] = pipeline_name for level, parent in enumerate(path_without_file): - ENV[f"pipeline_name_{level}"] = parent + ENV[f"pipeline.name_{level}"] = parent @staticmethod def set_environment_name(environment: str | None) -> None: diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 4875c610b..f23e77422 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -32,7 +32,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -161,7 +161,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -213,7 +213,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -270,7 +270,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" @@ -323,7 +323,7 @@ "type": "string" }, "prefix": { - "default": "${pipeline_name}-", + "default": "${pipeline.name}-", "description": "Pipeline prefix that will prefix every component name. If you wish to not have any prefix you can specify an empty string.", "title": "Prefix", "type": "string" diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index e52b2345a..5c9655ca3 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -18,11 +18,11 @@ def test_kpops_config_with_default_values(): assert default_config.defaults_filename_prefix == "defaults" assert ( default_config.topic_name_config.default_output_topic_name - == "${pipeline_name}-${component.name}" + == "${pipeline.name}-${component.name}" ) assert ( default_config.topic_name_config.default_error_topic_name - == "${pipeline_name}-${component.name}-error" + == "${pipeline.name}-${component.name}-error" ) assert default_config.schema_registry.enabled is False assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081") diff --git a/tests/cli/test_registry.py b/tests/cli/test_registry.py index 473c340c4..bc6a7a2f9 100644 --- a/tests/cli/test_registry.py +++ b/tests/cli/test_registry.py @@ -36,7 +36,7 @@ def test_find_builtin_classes(): class_.__name__ for class_ in _find_classes("kpops.components", PipelineComponent) ] - assert len(components) == 10 + assert len(components) == 9 assert components == [ "HelmApp", "KafkaApp", @@ -47,7 +47,6 @@ def test_find_builtin_classes(): "PipelineComponent", "ProducerApp", "StreamsApp", - "StreamsBootstrap", ] diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index cca9fe88c..99a228cfe 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -13,12 +13,12 @@ def test_should_set_pipeline_name_with_default_base_dir(): PipelineGenerator.set_pipeline_name_env_vars(DEFAULT_BASE_DIR, PIPELINE_PATH) - assert ENV["pipeline_name"] == "some-random-path-for-testing" - assert ENV["pipeline_name_0"] == "some" - assert ENV["pipeline_name_1"] == "random" - assert ENV["pipeline_name_2"] == "path" - assert ENV["pipeline_name_3"] == "for" - assert ENV["pipeline_name_4"] == "testing" + assert ENV["pipeline.name"] == "some-random-path-for-testing" + assert ENV["pipeline.name_0"] == "some" + assert ENV["pipeline.name_1"] == "random" + assert ENV["pipeline.name_2"] == "path" + assert ENV["pipeline.name_3"] == "for" + assert ENV["pipeline.name_4"] == "testing" def test_should_set_pipeline_name_with_specific_relative_base_dir(): @@ -26,9 +26,9 @@ def test_should_set_pipeline_name_with_specific_relative_base_dir(): Path("./some/random/path"), PIPELINE_PATH ) - assert ENV["pipeline_name"] == "for-testing" - assert ENV["pipeline_name_0"] == "for" - assert ENV["pipeline_name_1"] == "testing" + assert ENV["pipeline.name"] == "for-testing" + assert ENV["pipeline.name_0"] == "for" + assert ENV["pipeline.name_1"] == "testing" def test_should_set_pipeline_name_with_specific_absolute_base_dir(): @@ -36,20 +36,20 @@ def test_should_set_pipeline_name_with_specific_absolute_base_dir(): Path("some/random/path"), PIPELINE_PATH ) - assert ENV["pipeline_name"] == "for-testing" - assert ENV["pipeline_name_0"] == "for" - assert ENV["pipeline_name_1"] == "testing" + assert ENV["pipeline.name"] == "for-testing" + assert ENV["pipeline.name_0"] == "for" + assert ENV["pipeline.name_1"] == "testing" def test_should_set_pipeline_name_with_absolute_base_dir(): PipelineGenerator.set_pipeline_name_env_vars(Path.cwd(), PIPELINE_PATH) - assert ENV["pipeline_name"] == "some-random-path-for-testing" - assert ENV["pipeline_name_0"] == "some" - assert ENV["pipeline_name_1"] == "random" - assert ENV["pipeline_name_2"] == "path" - assert ENV["pipeline_name_3"] == "for" - assert ENV["pipeline_name_4"] == "testing" + assert ENV["pipeline.name"] == "some-random-path-for-testing" + assert ENV["pipeline.name_0"] == "some" + assert ENV["pipeline.name_1"] == "random" + assert ENV["pipeline.name_2"] == "path" + assert ENV["pipeline.name_3"] == "for" + assert ENV["pipeline.name_4"] == "testing" def test_should_not_set_pipeline_name_with_the_same_base_dir(): diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index e43c9de41..f01f30d10 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -88,12 +88,12 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( helm_app.deploy(False) helm_mock.upgrade_install.assert_called_once_with( - "${pipeline_name}-test-helm-app", + "${pipeline.name}-test-helm-app", "test/test-chart", False, "test-namespace", { - "nameOverride": "${pipeline_name}-test-helm-app", + "nameOverride": "${pipeline.name}-test-helm-app", "foo": "test-value", }, HelmUpgradeInstallFlags(), @@ -136,12 +136,12 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( RepoAuthFlags(), ), mocker.call.upgrade_install( - "${pipeline_name}-test-helm-app", + "${pipeline.name}-test-helm-app", "test/test-chart", False, "test-namespace", { - "nameOverride": "${pipeline_name}-test-helm-app", + "nameOverride": "${pipeline.name}-test-helm-app", "foo": "test-value", }, HelmUpgradeInstallFlags(version="3.4.5"), @@ -176,12 +176,12 @@ def helm_chart(self) -> str: helm_mock.add_repo.assert_not_called() helm_mock.upgrade_install.assert_called_once_with( - "${pipeline_name}-test-app-with-local-chart", + "${pipeline.name}-test-app-with-local-chart", "path/to/helm/charts/", False, "test-namespace", { - "nameOverride": "${pipeline_name}-test-app-with-local-chart", + "nameOverride": "${pipeline.name}-test-app-with-local-chart", "foo": "test-value", }, HelmUpgradeInstallFlags(), @@ -212,7 +212,7 @@ def test_should_call_helm_uninstall_when_destroying_helm_app( helm_app.destroy(True) helm_mock.uninstall.assert_called_once_with( - "test-namespace", "${pipeline_name}-test-helm-app", True + "test-namespace", "${pipeline.name}-test-helm-app", True ) log_info_mock.assert_called_once_with(magentaify(stdout)) diff --git a/tests/components/test_streams_bootstrap.py b/tests/components/test_kafka_app.py similarity index 72% rename from tests/components/test_streams_bootstrap.py rename to tests/components/test_kafka_app.py index 9a53ef319..21c9072f8 100644 --- a/tests/components/test_streams_bootstrap.py +++ b/tests/components/test_kafka_app.py @@ -11,13 +11,13 @@ HelmUpgradeInstallFlags, ) from kpops.component_handlers.helm_wrapper.utils import create_helm_release_name -from kpops.components.streams_bootstrap import StreamsBootstrap +from kpops.components.base_components import KafkaApp from kpops.config import KpopsConfig DEFAULTS_PATH = Path(__file__).parent / "resources" -class TestStreamsBootstrap: +class TestKafkaApp: @pytest.fixture() def config(self) -> KpopsConfig: return KpopsConfig( @@ -34,29 +34,36 @@ def handlers(self) -> ComponentHandlers: ) def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers): - streams_bootstrap_helm_app = StreamsBootstrap( + kafka_app = KafkaApp( name="example-name", config=config, handlers=handlers, **{ "namespace": "test-namespace", - "app": {}, + "app": { + "streams": { + "outputTopic": "test", + "brokers": "fake-broker:9092", + }, + }, }, ) - assert streams_bootstrap_helm_app.repo_config == HelmRepoConfig( + assert kafka_app.app.streams.brokers == "fake-broker:9092" + + assert kafka_app.repo_config == HelmRepoConfig( repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", ) - assert streams_bootstrap_helm_app.version == "2.9.0" - assert streams_bootstrap_helm_app.namespace == "test-namespace" + assert kafka_app.version == "2.9.0" + assert kafka_app.namespace == "test-namespace" - def test_should_deploy_streams_bootstrap_helm_app( + def test_should_deploy_kafka_app( self, config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): - streams_bootstrap_helm_app = StreamsBootstrap( + kafka_app = KafkaApp( name="example-name", config=config, handlers=handlers, @@ -71,29 +78,27 @@ def test_should_deploy_streams_bootstrap_helm_app( "version": "1.2.3", }, ) - helm_upgrade_install = mocker.patch.object( - streams_bootstrap_helm_app.helm, "upgrade_install" - ) + helm_upgrade_install = mocker.patch.object(kafka_app.helm, "upgrade_install") print_helm_diff = mocker.patch.object( - streams_bootstrap_helm_app.dry_run_handler, "print_helm_diff" + kafka_app.dry_run_handler, "print_helm_diff" ) mocker.patch.object( - StreamsBootstrap, + KafkaApp, "helm_chart", return_value="test/test-chart", new_callable=mocker.PropertyMock, ) - streams_bootstrap_helm_app.deploy(dry_run=True) + kafka_app.deploy(dry_run=True) print_helm_diff.assert_called_once() helm_upgrade_install.assert_called_once_with( - create_helm_release_name("${pipeline_name}-example-name"), + create_helm_release_name("${pipeline.name}-example-name"), "test/test-chart", True, "test-namespace", { - "nameOverride": "${pipeline_name}-example-name", + "nameOverride": "${pipeline.name}-example-name", "streams": {"brokers": "fake-broker:9092", "outputTopic": "test"}, }, HelmUpgradeInstallFlags(version="1.2.3"), diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 6c0e0dcc3..d352a6d8a 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -13,9 +13,9 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop" -CONNECTOR_FULL_NAME = "${pipeline_name}-" + CONNECTOR_NAME +CONNECTOR_FULL_NAME = "${pipeline.name}-" + CONNECTOR_NAME CONNECTOR_CLEAN_FULL_NAME = CONNECTOR_FULL_NAME + "-clean" -CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline_name}-test-connector-with-lon-449ec-clean" +CONNECTOR_CLEAN_RELEASE_NAME = "${pipeline.name}-test-connector-with-lon-612f3-clean" CONNECTOR_CLASS = "com.bakdata.connect.TestConnector" diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index ebc2701fa..c949f9832 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -15,7 +15,7 @@ ) from kpops.config import KpopsConfig -HELM_RELEASE_NAME = create_helm_release_name("${pipeline_name}-test-kubernetes-app") +HELM_RELEASE_NAME = create_helm_release_name("${pipeline.name}-test-kubernetes-app") DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index b5de6b67b..2038c6909 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -18,7 +18,7 @@ DEFAULTS_PATH = Path(__file__).parent / "resources" PRODUCER_APP_NAME = "test-producer-app-with-long-name-0123456789abcdefghijklmnop" -PRODUCER_APP_FULL_NAME = "${pipeline_name}-" + PRODUCER_APP_NAME +PRODUCER_APP_FULL_NAME = "${pipeline.name}-" + PRODUCER_APP_NAME PRODUCER_APP_RELEASE_NAME = create_helm_release_name(PRODUCER_APP_FULL_NAME) PRODUCER_APP_CLEAN_FULL_NAME = PRODUCER_APP_FULL_NAME + "-clean" PRODUCER_APP_CLEAN_RELEASE_NAME = create_helm_release_name( @@ -168,13 +168,11 @@ def test_should_not_reset_producer_app( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - producer_app._cleaner.helm, "upgrade_install" - ) - mock_helm_uninstall = mocker.patch.object( - producer_app._cleaner.helm, "uninstall" + producer_app.helm, "upgrade_install" ) + mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock_helm_print_helm_diff = mocker.patch.object( - producer_app._cleaner.dry_run_handler, "print_helm_diff" + producer_app.dry_run_handler, "print_helm_diff" ) mock = mocker.MagicMock() @@ -184,55 +182,45 @@ def test_should_not_reset_producer_app( producer_app.clean(dry_run=True) - mock.assert_has_calls( - [ - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - True, - ), - ANY, # __bool__ - ANY, # __str__ - mocker.call.helm_upgrade_install( - PRODUCER_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/producer-app-cleanup-job", - True, - "test-namespace", - { - "nameOverride": PRODUCER_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - }, + assert mock.mock_calls == [ + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + True, + ), + mocker.call.helm_upgrade_install( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + True, + "test-namespace", + { + "nameOverride": PRODUCER_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", }, - HelmUpgradeInstallFlags( - version="2.4.2", wait=True, wait_for_jobs=True - ), - ), - mocker.call.print_helm_diff( - ANY, - PRODUCER_APP_CLEAN_RELEASE_NAME, - logging.getLogger("HelmApp"), - ), - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - True, - ), - ANY, # __bool__ - ANY, # __str__ - ] - ) + }, + HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True), + ), + mocker.call.print_helm_diff( + ANY, + PRODUCER_APP_CLEAN_RELEASE_NAME, + logging.getLogger("KafkaApp"), + ), + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + True, + ), + ] def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( self, mocker: MockerFixture, producer_app: ProducerApp ): mock_helm_upgrade_install = mocker.patch.object( - producer_app._cleaner.helm, "upgrade_install" - ) - mock_helm_uninstall = mocker.patch.object( - producer_app._cleaner.helm, "uninstall" + producer_app.helm, "upgrade_install" ) + mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -240,37 +228,29 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w producer_app.clean(dry_run=False) - mock.assert_has_calls( - [ - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - False, - ), - ANY, # __bool__ - ANY, # __str__ - mocker.call.helm_upgrade_install( - PRODUCER_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/producer-app-cleanup-job", - False, - "test-namespace", - { - "nameOverride": PRODUCER_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - }, + assert mock.mock_calls == [ + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + False, + ), + mocker.call.helm_upgrade_install( + PRODUCER_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/producer-app-cleanup-job", + False, + "test-namespace", + { + "nameOverride": PRODUCER_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", }, - HelmUpgradeInstallFlags( - version="2.4.2", wait=True, wait_for_jobs=True - ), - ), - mocker.call.helm_uninstall( - "test-namespace", - PRODUCER_APP_CLEAN_RELEASE_NAME, - False, - ), - ANY, # __bool__ - ANY, # __str__ - ] - ) + }, + HelmUpgradeInstallFlags(version="2.4.2", wait=True, wait_for_jobs=True), + ), + mocker.call.helm_uninstall( + "test-namespace", + PRODUCER_APP_CLEAN_RELEASE_NAME, + False, + ), + ] diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 1bdb8631d..2de276643 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -1,5 +1,5 @@ from pathlib import Path -from unittest.mock import ANY, MagicMock +from unittest.mock import MagicMock import pytest from pytest_mock import MockerFixture @@ -17,13 +17,12 @@ TopicConfig, ToSection, ) -from kpops.components.streams_bootstrap.streams.streams_app import StreamsAppCleaner from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" -STREAMS_APP_FULL_NAME = "${pipeline_name}-" + STREAMS_APP_NAME +STREAMS_APP_FULL_NAME = "${pipeline.name}-" + STREAMS_APP_NAME STREAMS_APP_RELEASE_NAME = create_helm_release_name(STREAMS_APP_FULL_NAME) STREAMS_APP_CLEAN_FULL_NAME = STREAMS_APP_FULL_NAME + "-clean" STREAMS_APP_CLEAN_RELEASE_NAME = create_helm_release_name( @@ -371,11 +370,10 @@ def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): def test_reset_when_dry_run_is_false( self, streams_app: StreamsApp, mocker: MockerFixture ): - cleaner = streams_app._cleaner - assert isinstance(cleaner, StreamsAppCleaner) - - mock_helm_upgrade_install = mocker.patch.object(cleaner.helm, "upgrade_install") - mock_helm_uninstall = mocker.patch.object(cleaner.helm, "uninstall") + mock_helm_upgrade_install = mocker.patch.object( + streams_app.helm, "upgrade_install" + ) + mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -384,41 +382,33 @@ def test_reset_when_dry_run_is_false( dry_run = False streams_app.reset(dry_run=dry_run) - mock.assert_has_calls( - [ - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ANY, # __bool__ # FIXME: why is this in the call stack? - ANY, # __str__ - mocker.call.helm_upgrade_install( - STREAMS_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/streams-app-cleanup-job", - dry_run, - "test-namespace", - { - "nameOverride": STREAMS_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - "deleteOutput": False, - }, + assert mock.mock_calls == [ + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + mocker.call.helm_upgrade_install( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "nameOverride": STREAMS_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + "deleteOutput": False, }, - HelmUpgradeInstallFlags( - version="2.9.0", wait=True, wait_for_jobs=True - ), - ), - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ANY, # __bool__ - ANY, # __str__ - ] - ) + }, + HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True), + ), + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ] def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( self, @@ -426,11 +416,9 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mocker: MockerFixture, ): mock_helm_upgrade_install = mocker.patch.object( - streams_app._cleaner.helm, "upgrade_install" - ) - mock_helm_uninstall = mocker.patch.object( - streams_app._cleaner.helm, "uninstall" + streams_app.helm, "upgrade_install" ) + mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") mock = mocker.MagicMock() mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") @@ -439,38 +427,30 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( dry_run = False streams_app.clean(dry_run=dry_run) - mock.assert_has_calls( - [ - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ANY, # __bool__ - ANY, # __str__ - mocker.call.helm_upgrade_install( - STREAMS_APP_CLEAN_RELEASE_NAME, - "bakdata-streams-bootstrap/streams-app-cleanup-job", - dry_run, - "test-namespace", - { - "nameOverride": STREAMS_APP_FULL_NAME, - "streams": { - "brokers": "fake-broker:9092", - "outputTopic": "${output_topic_name}", - "deleteOutput": True, - }, + assert mock.mock_calls == [ + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + mocker.call.helm_upgrade_install( + STREAMS_APP_CLEAN_RELEASE_NAME, + "bakdata-streams-bootstrap/streams-app-cleanup-job", + dry_run, + "test-namespace", + { + "nameOverride": STREAMS_APP_FULL_NAME, + "streams": { + "brokers": "fake-broker:9092", + "outputTopic": "${output_topic_name}", + "deleteOutput": True, }, - HelmUpgradeInstallFlags( - version="2.9.0", wait=True, wait_for_jobs=True - ), - ), - mocker.call.helm_uninstall( - "test-namespace", - STREAMS_APP_CLEAN_RELEASE_NAME, - dry_run, - ), - ANY, # __bool__ - ANY, # __str__ - ] - ) + }, + HelmUpgradeInstallFlags(version="2.9.0", wait=True, wait_for_jobs=True), + ), + mocker.call.helm_uninstall( + "test-namespace", + STREAMS_APP_CLEAN_RELEASE_NAME, + dry_run, + ), + ] diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index b78293627..101e3e175 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -5,8 +5,8 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" - schema_registry_url: "${schema_registry.url}" + brokers: "${config.kafka_brokers}" + schema_registry_url: "${config.schema_registry.url}" version: "2.4.2" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml index ea3dd7d9e..7820898a3 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml @@ -1,8 +1,8 @@ kafka-app: app: streams: - brokers: "${kafka_brokers}" - schemaRegistryUrl: "${schema_registry.url}" + brokers: "${config.kafka_brokers}" + schemaRegistryUrl: "${config.schema_registry.url}" producer-app: to: @@ -14,7 +14,7 @@ producer-app: streams-app: app: labels: - pipeline: ${pipeline_name} + pipeline: ${pipeline.name} to: topics: ${error_topic_name}: diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml index b5954da19..ff053e990 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml @@ -7,5 +7,5 @@ kubernetes-app: kafka-app: app: streams: - brokers: ${kafka_brokers} - schemaRegistryUrl: ${schema_registry.url} + brokers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index f9505c0ab..b8aeb6137 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -4,8 +4,8 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" - schemaRegistryUrl: "${schema_registry.url}" + brokers: "${config.kafka_brokers}" + schemaRegistryUrl: "${config.schema_registry.url}" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml index 3b9e93eb7..cf3b4831b 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml @@ -5,7 +5,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" + brokers: "${config.kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/resources/read-from-component/pipeline.yaml b/tests/pipeline/resources/read-from-component/pipeline.yaml index 902e8edd9..cc6bf72c7 100644 --- a/tests/pipeline/resources/read-from-component/pipeline.yaml +++ b/tests/pipeline/resources/read-from-component/pipeline.yaml @@ -44,7 +44,7 @@ name: consumer3 from: topics: - ${pipeline_name}-producer1: + ${pipeline.name}-producer1: type: input components: producer2: diff --git a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml index 55754eba1..c895105b7 100644 --- a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml +++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml @@ -4,7 +4,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${kafka_brokers}" + brokers: "${config.kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 20f781545..d45882ea1 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -5,18 +5,15 @@ Schema, SchemaProvider, ) -from kpops.components import ( - KafkaSinkConnector, - PipelineComponent, - ProducerApp, - StreamsApp, -) +from kpops.components import KafkaSinkConnector +from kpops.components.base_components import PipelineComponent from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.components.base_components.models.to_section import ( OutputTopicTypes, TopicConfig, ToSection, ) +from kpops.components.streams_bootstrap import ProducerApp, StreamsApp class ScheduledProducer(ProducerApp): diff --git a/tests/pipeline/test_components_without_schema_handler/components.py b/tests/pipeline/test_components_without_schema_handler/components.py index 686aac26c..d5684178c 100644 --- a/tests/pipeline/test_components_without_schema_handler/components.py +++ b/tests/pipeline/test_components_without_schema_handler/components.py @@ -1,13 +1,10 @@ from typing_extensions import override from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig -from kpops.components import ( - KafkaSinkConnector, - PipelineComponent, - ProducerApp, - StreamsApp, -) +from kpops.components import KafkaSinkConnector +from kpops.components.base_components import PipelineComponent from kpops.components.base_components.models.to_section import OutputTopicTypes +from kpops.components.streams_bootstrap import ProducerApp, StreamsApp class ScheduledProducer(ProducerApp): From ef9f3fdcf3576caf790da79c04855c2afd196ea0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 11 Jan 2024 18:09:01 +0200 Subject: [PATCH 36/36] run pre-commit --- .../dependencies/kpops_structure.yaml | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 668f05214..784d9ccc4 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -87,8 +87,10 @@ kpops_components_inheritance_ref: - pipeline-component kafka-app: bases: - - pipeline-component + - helm-app parents: + - helm-app + - kubernetes-app - pipeline-component kafka-connector: bases: @@ -118,27 +120,16 @@ kpops_components_inheritance_ref: producer-app: bases: - kafka-app - - streams-bootstrap parents: - kafka-app - - streams-bootstrap - helm-app - kubernetes-app - pipeline-component streams-app: bases: - kafka-app - - streams-bootstrap parents: - kafka-app - - streams-bootstrap - - helm-app - - kubernetes-app - - pipeline-component - streams-bootstrap: - bases: - - helm-app - parents: - helm-app - kubernetes-app - pipeline-component