From 224ec2c47064f8931815d9b295a99f08ddb13efc Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 13:46:57 +0200 Subject: [PATCH 01/20] Make Kafka rest proxy & connect hosts default and improve schema registry config --- config.yaml | 3 -- .../resources/variables/config_env_vars.env | 7 +-- .../resources/variables/config_env_vars.md | 17 +++--- docs/docs/schema/config.json | 51 ++++++++++++++---- .../bakdata/atm-fraud-detection/config.yaml | 4 +- kpops/cli/pipeline_config.py | 30 +++++++---- .../kafka_connect/connect_wrapper.py | 8 +-- .../schema_handler/schema_handler.py | 13 +++-- .../component_handlers/topic/proxy_wrapper.py | 5 -- tests/cli/test_handlers.py | 8 ++- .../kafka_connect/test_connect_wrapper.py | 52 +++++++------------ .../schema_handler/test_schema_handler.py | 21 ++++---- .../topic/test_proxy_wrapper.py | 10 ---- .../resources/custom-config/config.yaml | 4 +- 14 files changed, 117 insertions(+), 116 deletions(-) diff --git a/config.yaml b/config.yaml index 46d0cf8b3..ba8b885cb 100644 --- a/config.yaml +++ b/config.yaml @@ -1,5 +1,2 @@ environment: development brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -kafka_connect_host: "http://localhost:8083" -kafka_rest_host: "http://localhost:8082" -schema_registry_url: "http://localhost:8081" diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 308fb6334..aed411bb8 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -12,15 +12,12 @@ KPOPS_ENVIRONMENT # No default value, required # brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required -# schema_registry_url -# Address of the Schema Registry. -KPOPS_SCHEMA_REGISTRY_URL # No default value, not required # kafka_rest_host # Address of the Kafka REST Proxy. -KPOPS_REST_PROXY_HOST # No default value, not required +KPOPS_REST_PROXY_HOST=http://localhost:8082 # kafka_connect_host # Address of Kafka Connect. -KPOPS_CONNECT_HOST # No default value, not required +KPOPS_CONNECT_HOST=http://localhost:8083 # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 889985257..5e1d5e06e 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,12 +1,11 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name |Default Value|Required| Description | Setting name | -|-------------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | -|KPOPS_SCHEMA_REGISTRY_URL| |False |Address of the Schema Registry. |schema_registry_url| -|KPOPS_REST_PROXY_HOST | |False |Address of the Kafka REST Proxy. |kafka_rest_host | -|KPOPS_CONNECT_HOST | |False |Address of Kafka Connect. |kafka_connect_host | -|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|-----------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------| +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | +|KPOPS_REST_PROXY_HOST |http://localhost:8082|False |Address of the Kafka REST Proxy. |kafka_rest_host | +|KPOPS_CONNECT_HOST |http://localhost:8083|False |Address of Kafka Connect. |kafka_connect_host| +|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index b77b4e850..1e9a13a3b 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -127,22 +127,22 @@ "title": "Helm Diff Config" }, "kafka_connect_host": { + "default": "http://localhost:8083", "description": "Address of Kafka Connect.", "env": "KPOPS_CONNECT_HOST", "env_names": [ "kpops_connect_host" ], - "example": "http://localhost:8083", "title": "Kafka Connect Host", "type": "string" }, "kafka_rest_host": { + "default": "http://localhost:8082", "description": "Address of the Kafka REST Proxy.", "env": "KPOPS_REST_PROXY_HOST", "env_names": [ "kpops_rest_proxy_host" ], - "example": "http://localhost:8082", "title": "Kafka Rest Host", "type": "string" }, @@ -156,15 +156,21 @@ "title": "Retain Clean Jobs", "type": "boolean" }, - "schema_registry_url": { - "description": "Address of the Schema Registry.", - "env": "KPOPS_SCHEMA_REGISTRY_URL", + "schema_registry": { + "allOf": [ + { + "$ref": "#/definitions/SchemaRegistryConfig" + } + ], + "default": { + "enabled": false, + "url": "http://localhost:8081" + }, + "description": "Configure the Schema Registry.", "env_names": [ - "kpops_schema_registry_url" + "schema_registry" ], - "example": "http://localhost:8081", - "title": "Schema Registry Url", - "type": "string" + "title": "Schema Registry" }, "timeout": { "default": 300, @@ -200,6 +206,33 @@ "title": "PipelineConfig", "type": "object" }, + "SchemaRegistryConfig": { + "additionalProperties": false, + "description": "Base class for settings, allowing values to be overridden by environment variables.\n\nThis is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose),\nHeroku and any 12 factor app design.", + "properties": { + "enabled": { + "default": false, + "description": "If the Schema Registry handler should be initialized.", + "env_names": [ + "enabled" + ], + "title": "Enabled", + "type": "boolean" + }, + "url": { + "default": "http://localhost:8081", + "description": "Address of the Schema Registry.", + "env": "KPOPS_SCHEMA_REGISTRY_URL", + "env_names": [ + "kpops_schema_registry_url" + ], + "title": "Url", + "type": "string" + } + }, + "title": "SchemaRegistryConfig", + "type": "object" + }, "TopicNameConfig": { "additionalProperties": false, "description": "Configures topic names.", diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index e3742ded9..7297da066 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -6,7 +6,9 @@ topic_name_config: brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -schema_registry_url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" +schema_registry: + enabled: true + url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" kafka_rest_host: "http://localhost:8082" diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index f03f419aa..b1216033a 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -22,6 +22,18 @@ class TopicNameConfig(BaseSettings): ) +class SchemaRegistryConfig(BaseSettings): + enabled: bool = Field( + default=False, + description="If the Schema Registry handler should be initialized.", + ) + url: str = Field( + default="http://localhost:8081", + env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", + description="Address of the Schema Registry.", + ) + + class PipelineConfig(BaseSettings): """Pipeline configuration unrelated to the components.""" @@ -52,22 +64,18 @@ class PipelineConfig(BaseSettings): default=TopicNameConfig(), description="Configure the topic name variables you can use in the pipeline definition.", ) - schema_registry_url: str | None = Field( - default=None, - example="http://localhost:8081", - env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", - description="Address of the Schema Registry.", + schema_registry: SchemaRegistryConfig = Field( + default=SchemaRegistryConfig(), + description="Configure the Schema Registry.", ) - kafka_rest_host: str | None = Field( - default=None, + kafka_rest_host: str = Field( + default="http://localhost:8082", env=f"{ENV_PREFIX}REST_PROXY_HOST", - example="http://localhost:8082", description="Address of the Kafka REST Proxy.", ) - kafka_connect_host: str | None = Field( - default=None, + kafka_connect_host: str = Field( + default="http://localhost:8083", env=f"{ENV_PREFIX}CONNECT_HOST", - example="http://localhost:8083", description="Address of Kafka Connect.", ) timeout: int = Field( diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 9a3dd307e..3c5605d62 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -24,13 +24,7 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, host: str | None): - if not host: - error_message = ( - "The Kafka Connect host is not set. Please set the host in the config." - ) - log.error(error_message) - raise RuntimeError(error_message) + def __init__(self, host: str): self._host: str = host @property diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index a053ccc62..9ee2d5fe0 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -44,13 +44,12 @@ def schema_provider(self) -> SchemaProvider: def load_schema_handler( cls, components_module: str | None, config: PipelineConfig ) -> SchemaHandler | None: - if not config.schema_registry_url: - return None - - return cls( - url=config.schema_registry_url, - components_module=components_module, - ) + if config.schema_registry.enabled: + return cls( + url=config.schema_registry.url, + components_module=components_module, + ) + return None def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: for topic_name, config in to_section.topics.items(): diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index af7914379..6464b094c 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -26,11 +26,6 @@ class ProxyWrapper: """ def __init__(self, pipeline_config: PipelineConfig) -> None: - if not pipeline_config.kafka_rest_host: - raise ValueError( - "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." - ) - self._host = pipeline_config.kafka_rest_host @cached_property diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index 509c5e0cc..b320d7875 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -3,7 +3,7 @@ from pytest_mock import MockerFixture from kpops.cli.main import setup_handlers -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( KafkaConnectHandler, @@ -19,8 +19,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): config = PipelineConfig( defaults_path=Path("fake"), environment="development", - kafka_rest_host="https://testhost:8082", - schema_registry_url=None, + schema_registry=SchemaRegistryConfig(), ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config) @@ -54,8 +53,7 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): config = PipelineConfig( defaults_path=Path("fake"), environment="development", - kafka_rest_host="https://testhost:8082", - schema_registry_url="https://testhost:8081", + schema_registry=SchemaRegistryConfig(enabled=True), ) schema_handler_mock = mocker.patch("kpops.cli.main.SchemaHandler") schema_handler = SchemaHandler.load_schema_handler(MODULE, config) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 3db9c090f..21b2cc2cf 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -20,7 +20,7 @@ HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} -HOST = "http://localhost:8083" +DEFAULT_HOST = "http://localhost:8083" DEFAULTS_PATH = Path(__file__).parent / "resources" @@ -30,7 +30,6 @@ def setup(self): config = PipelineConfig( defaults_path=DEFAULTS_PATH, environment="development", - kafka_connect_host=HOST, ) self.connect_wrapper = ConnectWrapper(host=config.kafka_connect_host) @@ -43,19 +42,6 @@ def connector_config(self) -> KafkaConnectorConfig: } ) - def test_should_through_exception_when_host_is_not_set(self): - config = PipelineConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - kafka_connect_host=None, - ) - with pytest.raises(RuntimeError) as run_time_error: - ConnectWrapper(host=config.kafka_connect_host) - assert ( - str(run_time_error.value) - == "The Kafka Connect host is not set. Please set the host in the config." - ) - @patch("httpx.post") def test_should_create_post_requests_for_given_connector_configuration( self, mock_post: MagicMock @@ -75,7 +61,7 @@ def test_should_create_post_requests_for_given_connector_configuration( self.connect_wrapper.create_connector(KafkaConnectorConfig(**configs)) mock_post.assert_called_with( - url=f"{HOST}/connectors", + url=f"{DEFAULT_HOST}/connectors", headers=HEADERS, json={ "name": "test-connector", @@ -107,7 +93,7 @@ def test_should_return_correct_response_when_connector_created( } httpx_mock.add_response( method="POST", - url=f"{HOST}/connectors", + url=f"{DEFAULT_HOST}/connectors", headers=HEADERS, json=actual_response, status_code=201, @@ -124,7 +110,7 @@ def test_should_raise_connector_exists_exception_when_connector_exists( ): httpx_mock.add_response( method="POST", - url=f"{HOST}/connectors", + url=f"{DEFAULT_HOST}/connectors", json={}, status_code=409, ) @@ -145,7 +131,7 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): self.connect_wrapper.get_connector(connector_name) mock_get.assert_called_with( - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers={"Accept": "application/json", "Content-Type": "application/json"}, ) @@ -176,7 +162,7 @@ def test_should_return_correct_response_when_getting_connector( } httpx_mock.add_response( method="GET", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json=actual_response, status_code=200, @@ -193,7 +179,7 @@ def test_should_raise_connector_not_found_when_getting_connector( httpx_mock.add_response( method="GET", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=404, @@ -213,7 +199,7 @@ def test_should_raise_rebalance_in_progress_when_getting_connector( httpx_mock.add_response( method="GET", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=409, @@ -247,7 +233,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc ) mock_put.assert_called_with( - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers={"Accept": "application/json", "Content-Type": "application/json"}, json=KafkaConnectorConfig(**configs).dict(), ) @@ -281,7 +267,7 @@ def test_should_return_correct_response_when_update_connector( } httpx_mock.add_response( method="PUT", - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers=HEADERS, json=actual_response, status_code=200, @@ -323,7 +309,7 @@ def test_should_return_correct_response_when_update_connector_created( } httpx_mock.add_response( method="PUT", - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers=HEADERS, json=actual_response, status_code=201, @@ -345,7 +331,7 @@ def test_should_raise_connector_exists_exception_when_update_connector( httpx_mock.add_response( method="PUT", - url=f"{HOST}/connectors/{connector_name}/config", + url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers=HEADERS, json={}, status_code=409, @@ -369,7 +355,7 @@ def test_should_create_correct_delete_connector_request( self.connect_wrapper.delete_connector(connector_name) mock_delete.assert_called_with( - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, ) @@ -399,7 +385,7 @@ def test_should_return_correct_response_when_deleting_connector( } httpx_mock.add_response( method="DELETE", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json=actual_response, status_code=204, @@ -416,7 +402,7 @@ def test_should_raise_connector_not_found_when_deleting_connector( httpx_mock.add_response( method="DELETE", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=404, @@ -436,7 +422,7 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( httpx_mock.add_response( method="DELETE", - url=f"{HOST}/connectors/{connector_name}", + url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, json={}, status_code=409, @@ -467,7 +453,7 @@ def test_should_create_correct_validate_connector_config_request( self.connect_wrapper.validate_connector_config(connector_config) mock_put.assert_called_with( - url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate", + url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, json=connector_config.dict(), ) @@ -489,7 +475,7 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( ) mock_put.assert_called_with( - url=f"{HOST}/connector-plugins/{connector_name}/config/validate", + url=f"{DEFAULT_HOST}/connector-plugins/{connector_name}/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, json=KafkaConnectorConfig(**{"name": connector_name, **configs}).dict(), ) @@ -501,7 +487,7 @@ def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): actual_response = json.load(f) httpx_mock.add_response( method="PUT", - url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate", + url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers=HEADERS, json=actual_response, status_code=200, diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index ccea021c6..5c88e2850 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -9,7 +9,7 @@ from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider from kpops.components.base_components.models import TopicName @@ -73,27 +73,28 @@ def test_load_schema_handler(): config_enable = PipelineConfig( defaults_path=Path("fake"), environment="development", - schema_registry_url="http://localhost:8081", + schema_registry=SchemaRegistryConfig(enabled=True), + ) + + assert isinstance( + SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_enable), + SchemaHandler, ) config_disable = config_enable.copy() - config_disable.schema_registry_url = None + config_disable.schema_registry = SchemaRegistryConfig(enabled=False) + assert ( SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_disable) is None ) - assert isinstance( - SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_enable), - SchemaHandler, - ) - def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): config_enable = PipelineConfig( defaults_path=Path("fake"), environment="development", - schema_registry_url="http://localhost:8081", + schema_registry=SchemaRegistryConfig(enabled=True), ) schema_handler = SchemaHandler.load_schema_handler( TEST_SCHEMA_PROVIDER_MODULE, config_enable @@ -133,7 +134,7 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ config_enable = PipelineConfig( defaults_path=Path("fake"), environment="development", - schema_registry_url="http://localhost:8081", + schema_registry=SchemaRegistryConfig(enabled=True), ) with pytest.raises(ValueError): diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 7b587ecb3..00630dbd5 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -50,16 +50,6 @@ def setup(self, httpx_mock: HTTPXMock): assert self.proxy_wrapper.host == HOST assert self.proxy_wrapper.cluster_id == "cluster-1" - def test_should_raise_exception_when_host_is_not_set(self): - config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development") - config.kafka_rest_host = None - with pytest.raises(ValueError) as exception: - ProxyWrapper(pipeline_config=config) - assert ( - str(exception.value) - == "The Kafka REST Proxy host is not set. Please set the host in the config.yaml using the kafka_rest_host property or set the environemt variable KPOPS_REST_PROXY_HOST." - ) - @patch("httpx.post") def test_should_create_topic_with_all_topic_configuration( self, mock_post: MagicMock diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 2707ee0fa..30b369de8 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -6,6 +6,8 @@ topic_name_config: brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" kafka_connect_host: "http://localhost:8083" kafka_rest_host: "http://localhost:8082" -schema_registry_url: "http://localhost:8081" +schema_registry: + enabled: true + url: "http://localhost:8081" helm_config: api_version: "2.1.1" From 182d981233d33ada76d9b7d6c4a9dbcf30f6f2f2 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 13:47:18 +0200 Subject: [PATCH 02/20] add tests --- tests/cli/test_pipeline_config.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 tests/cli/test_pipeline_config.py diff --git a/tests/cli/test_pipeline_config.py b/tests/cli/test_pipeline_config.py new file mode 100644 index 000000000..9b3b206f4 --- /dev/null +++ b/tests/cli/test_pipeline_config.py @@ -0,0 +1,31 @@ +from pathlib import Path + +from kpops.cli.pipeline_config import PipelineConfig + + +def test_pipeline_config_with_default_values(): + default_config = PipelineConfig( + environment="development", brokers="http://broker:9092" + ) + + assert default_config.defaults_path == Path(".") + assert default_config.defaults_filename_prefix == "defaults" + assert ( + default_config.topic_name_config.default_output_topic_name + == "${pipeline_name}-${component_name}" + ) + assert ( + default_config.topic_name_config.default_error_topic_name + == "${pipeline_name}-${component_name}-error" + ) + assert default_config.schema_registry.enabled is False + assert default_config.schema_registry.url == "http://localhost:8081" + assert default_config.kafka_rest_host == "http://localhost:8082" + assert default_config.kafka_connect_host == "http://localhost:8083" + assert default_config.timeout == 300 + assert default_config.create_namespace is False + assert default_config.helm_config.context is None + assert default_config.helm_config.debug is False + assert default_config.helm_config.api_version is None + assert default_config.helm_diff_config.ignore == set() + assert default_config.retain_clean_jobs is False From 38b11395eedb2fb80bb4d84ac5e356537544bed0 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 13:57:14 +0200 Subject: [PATCH 03/20] add migration guide --- docs/docs/user/migration-guide/v1-v2.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/docs/user/migration-guide/v1-v2.md b/docs/docs/user/migration-guide/v1-v2.md index c5936cbe5..07dfb338b 100644 --- a/docs/docs/user/migration-guide/v1-v2.md +++ b/docs/docs/user/migration-guide/v1-v2.md @@ -1,5 +1,20 @@ # Migrate from V1 to V2 +## [Make Kafka rest proxy & connect hosts default and improve schema registry config](https://github.com/bakdata/kpops/pull/354) + +The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). Your `config.yaml` will change to: + +```diff + environment: development + brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +- kafka_connect_host: "http://localhost:8083" +- kafka_rest_host: "http://localhost:8082" +- schema_registry_url: "http://my.custom.sr.url:8081" ++ schema_registry: ++ enabled: true ++ url: "http://my.custom.sr.url:8081" +``` + ## [Derive component type automatically from class name](https://github.com/bakdata/kpops/pull/309) KPOps automatically infers the component `type` from the class name. Therefore, the `type` and `schema_type` attributes can be removed from your custom components. By convention the `type` would be the lower, and kebab cased name of the class. From 59b4140147756e47a01d28ad899d5e81edb298cc Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 14:51:01 +0200 Subject: [PATCH 04/20] add url validation --- docs/docs/schema/config.json | 9 ++++++ kpops/cli/pipeline_config.py | 16 ++++++---- .../kafka_connect/connect_wrapper.py | 3 +- tests/cli/test_pipeline_config.py | 31 +++++++++++++++++- .../topic/test_proxy_wrapper.py | 32 +++++++++---------- .../kafka-connect-sink-config/config.yaml | 4 +-- 6 files changed, 67 insertions(+), 28 deletions(-) diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 1e9a13a3b..e65456b23 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -133,6 +133,9 @@ "env_names": [ "kpops_connect_host" ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, "title": "Kafka Connect Host", "type": "string" }, @@ -143,6 +146,9 @@ "env_names": [ "kpops_rest_proxy_host" ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, "title": "Kafka Rest Host", "type": "string" }, @@ -226,6 +232,9 @@ "env_names": [ "kpops_schema_registry_url" ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, "title": "Url", "type": "string" } diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index b1216033a..b5df9759b 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -1,6 +1,6 @@ from pathlib import Path -from pydantic import BaseConfig, BaseSettings, Field +from pydantic import AnyHttpUrl, BaseConfig, BaseSettings, Field, parse_obj_as from pydantic.env_settings import SettingsSourceCallable from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig @@ -27,8 +27,8 @@ class SchemaRegistryConfig(BaseSettings): default=False, description="If the Schema Registry handler should be initialized.", ) - url: str = Field( - default="http://localhost:8081", + url: AnyHttpUrl = Field( + default=parse_obj_as(AnyHttpUrl, "http://localhost:8081"), env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", description="Address of the Schema Registry.", ) @@ -68,13 +68,15 @@ class PipelineConfig(BaseSettings): default=SchemaRegistryConfig(), description="Configure the Schema Registry.", ) - kafka_rest_host: str = Field( - default="http://localhost:8082", + kafka_rest_host: AnyHttpUrl = Field( + # For validating URLs use parse_obj_as + # https://github.com/pydantic/pydantic/issues/1106 + default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), env=f"{ENV_PREFIX}REST_PROXY_HOST", description="Address of the Kafka REST Proxy.", ) - kafka_connect_host: str = Field( - default="http://localhost:8083", + kafka_connect_host: AnyHttpUrl = Field( + default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), env=f"{ENV_PREFIX}CONNECT_HOST", description="Address of Kafka Connect.", ) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 3c5605d62..1a9b1e3c7 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -3,6 +3,7 @@ from time import sleep import httpx +from pydantic import AnyHttpUrl from kpops.component_handlers.kafka_connect.exception import ( ConnectorNotFoundException, @@ -24,7 +25,7 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, host: str): + def __init__(self, host: AnyHttpUrl): self._host: str = host @property diff --git a/tests/cli/test_pipeline_config.py b/tests/cli/test_pipeline_config.py index 9b3b206f4..cd976a103 100644 --- a/tests/cli/test_pipeline_config.py +++ b/tests/cli/test_pipeline_config.py @@ -1,6 +1,9 @@ from pathlib import Path -from kpops.cli.pipeline_config import PipelineConfig +import pytest +from pydantic import AnyHttpUrl, ValidationError, parse_obj_as + +from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig def test_pipeline_config_with_default_values(): @@ -29,3 +32,29 @@ def test_pipeline_config_with_default_values(): assert default_config.helm_config.api_version is None assert default_config.helm_diff_config.ignore == set() assert default_config.retain_clean_jobs is False + + +def test_pipeline_config_with_different_invalid_urls(): + with pytest.raises(ValidationError): + PipelineConfig( + environment="development", + brokers="http://broker:9092", + kafka_connect_host=parse_obj_as(AnyHttpUrl, "in-valid-host"), + ) + + with pytest.raises(ValidationError): + PipelineConfig( + environment="development", + brokers="http://broker:9092", + kafka_rest_host=parse_obj_as(AnyHttpUrl, "in-valid-host"), + ) + + with pytest.raises(ValidationError): + PipelineConfig( + environment="development", + brokers="http://broker:9092", + schema_registry=SchemaRegistryConfig( + enabled=True, + url=parse_obj_as(AnyHttpUrl, "in-valid-host"), + ), + ) diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 00630dbd5..fb04ca4dd 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -16,7 +16,7 @@ from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper HEADERS = {"Content-Type": "application/json"} -HOST = "http://localhost:8082" +DEFAULT_HOST = "http://localhost:8082" DEFAULTS_PATH = Path(__file__).parent.parent / "resources" @@ -31,9 +31,7 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def setup(self, httpx_mock: HTTPXMock): - config = PipelineConfig( - defaults_path=DEFAULTS_PATH, environment="development", kafka_rest_host=HOST - ) + config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development") self.proxy_wrapper = ProxyWrapper(pipeline_config=config) with open( @@ -43,11 +41,11 @@ def setup(self, httpx_mock: HTTPXMock): httpx_mock.add_response( method="GET", - url=f"{HOST}/v3/clusters", + url=f"{DEFAULT_HOST}/v3/clusters", json=cluster_response, status_code=200, ) - assert self.proxy_wrapper.host == HOST + assert self.proxy_wrapper.host == DEFAULT_HOST assert self.proxy_wrapper.cluster_id == "cluster-1" @patch("httpx.post") @@ -68,7 +66,7 @@ def test_should_create_topic_with_all_topic_configuration( self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) mock_post.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", headers=HEADERS, json=topic_spec, ) @@ -81,7 +79,7 @@ def test_should_create_topic_with_no_configuration(self, mock_post: MagicMock): self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) mock_post.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", headers=HEADERS, json=topic_spec, ) @@ -94,7 +92,7 @@ def test_should_call_get_topic(self, mock_get: MagicMock): self.proxy_wrapper.get_topic(topic_name=topic_name) mock_get.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", headers=HEADERS, ) @@ -112,7 +110,7 @@ def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock): ) mock_put.assert_called_with( - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", headers=HEADERS, json={ "data": [ @@ -130,7 +128,7 @@ def test_should_call_delete_topic(self, mock_delete: MagicMock): self.proxy_wrapper.delete_topic(topic_name=topic_name) mock_delete.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", headers=HEADERS, ) @@ -140,7 +138,7 @@ def test_should_call_get_broker_config(self, mock_get: MagicMock): self.proxy_wrapper.get_broker_config() mock_get.assert_called_with( - url=f"{HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs", + url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs", headers=HEADERS, ) @@ -159,7 +157,7 @@ def test_should_log_topic_creation( httpx_mock.add_response( method="POST", - url=f"{HOST}/v3/clusters/cluster-1/topics", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics", json=topic_spec, headers=HEADERS, status_code=201, @@ -174,7 +172,7 @@ def test_should_log_topic_deletion( httpx_mock.add_response( method="DELETE", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}", headers=HEADERS, status_code=204, ) @@ -203,7 +201,7 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock httpx_mock.add_response( method="GET", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}", headers=HEADERS, status_code=200, json=res, @@ -221,7 +219,7 @@ def test_should_rais_topic_not_found_exception_get_topic( httpx_mock.add_response( method="GET", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}", headers=HEADERS, status_code=404, json={ @@ -241,7 +239,7 @@ def test_should_log_reset_default_topic_config_when_deleted( httpx_mock.add_response( method="POST", - url=f"{HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", + url=f"{DEFAULT_HOST}/v3/clusters/cluster-1/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": [{"name": config_name, "operation": "DELETE"}]}, status_code=204, diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 6b7c754ab..9167e8aac 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -6,5 +6,5 @@ topic_name_config: default_output_topic_name: ${component_type}-output-topic helm_diff_config: enable: false -kafka_connect_host: "kafka_connect_host:8083" -kafka_rest_host: "kafka_rest_host:8082" +kafka_connect_host: "http://kafka_connect_host:8083" +kafka_rest_host: "http://kafka_rest_host:8082" From fbf11b2c413593f41ee1f971eb3d5a65c6cfb19e Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 14:52:33 +0200 Subject: [PATCH 05/20] Update files --- docs/docs/user/migration-guide/v1-v2.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/user/migration-guide/v1-v2.md b/docs/docs/user/migration-guide/v1-v2.md index 07dfb338b..86a687b27 100644 --- a/docs/docs/user/migration-guide/v1-v2.md +++ b/docs/docs/user/migration-guide/v1-v2.md @@ -9,10 +9,10 @@ The `schema_registry_url` is replaced with `schema_registry.enabled` (default `f brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" - kafka_connect_host: "http://localhost:8083" - kafka_rest_host: "http://localhost:8082" -- schema_registry_url: "http://my.custom.sr.url:8081" +- schema_registry_url: "http://my-custom-sr.url:8081" + schema_registry: + enabled: true -+ url: "http://my.custom.sr.url:8081" ++ url: "http://my-custom-sr.url:8081" ``` ## [Derive component type automatically from class name](https://github.com/bakdata/kpops/pull/309) From eaef84bce9cab8200cc77df561c445ecd3392137 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 16:50:38 +0200 Subject: [PATCH 06/20] add reviews --- docs/docs/schema/config.json | 2 +- kpops/cli/pipeline_config.py | 2 + .../kafka_connect/connect_wrapper.py | 2 +- .../schema_handler/schema_handler.py | 12 +++-- .../schema_handler/test_schema_handler.py | 51 ++++++++++++++----- 5 files changed, 51 insertions(+), 18 deletions(-) diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index e65456b23..372020c15 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -214,7 +214,7 @@ }, "SchemaRegistryConfig": { "additionalProperties": false, - "description": "Base class for settings, allowing values to be overridden by environment variables.\n\nThis is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose),\nHeroku and any 12 factor app design.", + "description": "Configures schema registry.", "properties": { "enabled": { "default": false, diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index b5df9759b..e4b1ef66b 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -23,6 +23,8 @@ class TopicNameConfig(BaseSettings): class SchemaRegistryConfig(BaseSettings): + """Configures schema registry.""" + enabled: bool = Field( default=False, description="If the Schema Registry handler should be initialized.", diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 1a9b1e3c7..1a0555f20 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -25,7 +25,7 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, host: AnyHttpUrl): + def __init__(self, host: AnyHttpUrl) -> None: self._host: str = host @property diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index 9ee2d5fe0..18253e104 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -8,7 +8,7 @@ from schema_registry.client.schema import AvroSchema from kpops.cli.exception import ClassNotFoundError -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig from kpops.cli.registry import find_class from kpops.component_handlers.schema_handler.schema_provider import ( Schema, @@ -21,8 +21,12 @@ class SchemaHandler: - def __init__(self, url: str, components_module: str | None): - self.schema_registry_client = SchemaRegistryClient(url) + def __init__( + self, + schema_registry_config: SchemaRegistryConfig, + components_module: str | None, + ): + self.schema_registry_client = SchemaRegistryClient(schema_registry_config.url) self.components_module = components_module @cached_property @@ -46,7 +50,7 @@ def load_schema_handler( ) -> SchemaHandler | None: if config.schema_registry.enabled: return cls( - url=config.schema_registry.url, + schema_registry_config=config.schema_registry, components_module=components_module, ) return None diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 5c88e2850..4d48460ea 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock import pytest -from pydantic import BaseModel +from pydantic import AnyHttpUrl, BaseModel, parse_obj_as from pytest_mock import MockerFixture from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion @@ -69,6 +69,13 @@ def to_section(topic_config: TopicConfig) -> ToSection: return ToSection(topics={TopicName("topic-X"): topic_config}) +@pytest.fixture() +def schema_registry_config() -> SchemaRegistryConfig: + return SchemaRegistryConfig( + enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") + ) + + def test_load_schema_handler(): config_enable = PipelineConfig( defaults_path=Path("fake"), @@ -112,9 +119,12 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): find_class_mock.assert_called_once_with(TEST_SCHEMA_PROVIDER_MODULE, SchemaProvider) -def test_should_raise_value_error_if_schema_provider_class_not_found(): +def test_should_raise_value_error_if_schema_provider_class_not_found( + schema_registry_config: SchemaRegistryConfig, +): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=NON_EXISTING_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=NON_EXISTING_PROVIDER_MODULE, ) with pytest.raises(ValueError) as value_error: @@ -158,10 +168,14 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( - to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock + to_section: ToSection, + log_info_mock: MagicMock, + schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -179,9 +193,11 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [1, 2, 3] @@ -200,10 +216,12 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl topic_config: TopicConfig, to_section: ToSection, schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -240,10 +258,12 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve log_info_mock: MagicMock, log_debug_mock: MagicMock, schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) @@ -274,12 +294,14 @@ def test_should_submit_non_existing_schema_when_not_dry( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_provider = TestSchemaProvider() schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -301,9 +323,11 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] @@ -318,10 +342,13 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( - to_section: ToSection, schema_registry_mock: MagicMock + to_section: ToSection, + schema_registry_mock: MagicMock, + schema_registry_config: SchemaRegistryConfig, ): schema_handler = SchemaHandler( - url="http://mock:8081", components_module=TEST_SCHEMA_PROVIDER_MODULE + schema_registry_config=schema_registry_config, + components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_registry_mock.get_versions.return_value = [] From 281e9dad1f136eaa2e15d2addc20772ef1fbfee7 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 6 Sep 2023 17:20:10 +0200 Subject: [PATCH 07/20] update docs --- .../resources/variables/config_env_vars.env | 4 +- .../resources/variables/config_env_vars.md | 16 +++---- docs/docs/schema/config.json | 10 ++--- docs/docs/user/migration-guide/v1-v2.md | 15 +++++-- .../bakdata/atm-fraud-detection/config.yaml | 4 +- kpops/cli/pipeline_config.py | 6 +-- .../kafka_connect/connect_wrapper.py | 18 ++++---- .../kafka_connect/kafka_connect_handler.py | 8 ++-- .../schema_handler/schema_handler.py | 12 +++--- kpops/component_handlers/topic/handler.py | 4 +- .../component_handlers/topic/proxy_wrapper.py | 21 +++++----- tests/cli/test_pipeline_config.py | 8 ++-- .../kafka_connect/test_connect_wrapper.py | 2 +- .../schema_handler/test_schema_handler.py | 42 ++++++++++--------- .../topic/test_proxy_wrapper.py | 2 +- .../resources/custom-config/config.yaml | 4 +- .../kafka-connect-sink-config/config.yaml | 4 +- 17 files changed, 98 insertions(+), 82 deletions(-) diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index aed411bb8..32f6f54e9 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -12,10 +12,10 @@ KPOPS_ENVIRONMENT # No default value, required # brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required -# kafka_rest_host +# kafka_rest_url # Address of the Kafka REST Proxy. KPOPS_REST_PROXY_HOST=http://localhost:8082 -# kafka_connect_host +# kafka_connect_url # Address of Kafka Connect. KPOPS_CONNECT_HOST=http://localhost:8083 # timeout diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 5e1d5e06e..4a1a5a5a9 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,11 +1,11 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value |Required| Description | Setting name | -|-----------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | -|KPOPS_REST_PROXY_HOST |http://localhost:8082|False |Address of the Kafka REST Proxy. |kafka_rest_host | -|KPOPS_CONNECT_HOST |http://localhost:8083|False |Address of Kafka Connect. |kafka_connect_host| -|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|-----------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | +|KPOPS_REST_PROXY_HOST |http://localhost:8082|False |Address of the Kafka REST Proxy. |kafka_rest_url | +|KPOPS_CONNECT_HOST |http://localhost:8083|False |Address of Kafka Connect. |kafka_connect_url| +|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 372020c15..f105bec62 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -126,7 +126,7 @@ ], "title": "Helm Diff Config" }, - "kafka_connect_host": { + "kafka_connect_url": { "default": "http://localhost:8083", "description": "Address of Kafka Connect.", "env": "KPOPS_CONNECT_HOST", @@ -136,10 +136,10 @@ "format": "uri", "maxLength": 65536, "minLength": 1, - "title": "Kafka Connect Host", + "title": "Kafka Connect Url", "type": "string" }, - "kafka_rest_host": { + "kafka_rest_url": { "default": "http://localhost:8082", "description": "Address of the Kafka REST Proxy.", "env": "KPOPS_REST_PROXY_HOST", @@ -149,7 +149,7 @@ "format": "uri", "maxLength": 65536, "minLength": 1, - "title": "Kafka Rest Host", + "title": "Kafka Rest Url", "type": "string" }, "retain_clean_jobs": { @@ -214,7 +214,7 @@ }, "SchemaRegistryConfig": { "additionalProperties": false, - "description": "Configures schema registry.", + "description": "Configuration for Schema Registry.", "properties": { "enabled": { "default": false, diff --git a/docs/docs/user/migration-guide/v1-v2.md b/docs/docs/user/migration-guide/v1-v2.md index 86a687b27..511e5b21a 100644 --- a/docs/docs/user/migration-guide/v1-v2.md +++ b/docs/docs/user/migration-guide/v1-v2.md @@ -2,14 +2,23 @@ ## [Make Kafka rest proxy & connect hosts default and improve schema registry config](https://github.com/bakdata/kpops/pull/354) -The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). Your `config.yaml` will change to: +The breaking changes target the `config.yaml` file: + +- The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). + +- `kafka_connect_host` is renamed to `kafka_connect_url` (default `http://localhost:8083`). +- `kafka_rest_host` is renamed to `kafka_rest_url` (default `http://localhost:8082`). + +Your `config.yaml` will change to: ```diff environment: development brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -- kafka_connect_host: "http://localhost:8083" -- kafka_rest_host: "http://localhost:8082" +- kafka_rest_host: "http://my-custom-rest.url:8082" +- kafka_connect_host: "http://my-custom-connect.url:8083" - schema_registry_url: "http://my-custom-sr.url:8081" ++ kafka_rest_url: "http://my-custom-rest.url:8082" ++ Kafka_connect_url: "http://my-custom-connect.url:8083" + schema_registry: + enabled: true + url: "http://my-custom-sr.url:8081" diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index 7297da066..3f72dfb2a 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -10,8 +10,8 @@ schema_registry: enabled: true url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" -kafka_rest_host: "http://localhost:8082" +kafka_rest_url: "http://localhost:8082" -kafka_connect_host: "http://localhost:8083" +kafka_connect_url: "http://localhost:8083" defaults_path: . diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index e4b1ef66b..aa2bee624 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -23,7 +23,7 @@ class TopicNameConfig(BaseSettings): class SchemaRegistryConfig(BaseSettings): - """Configures schema registry.""" + """Configuration for Schema Registry.""" enabled: bool = Field( default=False, @@ -70,14 +70,14 @@ class PipelineConfig(BaseSettings): default=SchemaRegistryConfig(), description="Configure the Schema Registry.", ) - kafka_rest_host: AnyHttpUrl = Field( + kafka_rest_url: AnyHttpUrl = Field( # For validating URLs use parse_obj_as # https://github.com/pydantic/pydantic/issues/1106 default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), env=f"{ENV_PREFIX}REST_PROXY_HOST", description="Address of the Kafka REST Proxy.", ) - kafka_connect_host: AnyHttpUrl = Field( + kafka_connect_url: AnyHttpUrl = Field( default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), env=f"{ENV_PREFIX}CONNECT_HOST", description="Address of Kafka Connect.", diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 1a0555f20..07928c7c8 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -25,12 +25,12 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, host: AnyHttpUrl) -> None: - self._host: str = host + def __init__(self, url: AnyHttpUrl) -> None: + self._url: AnyHttpUrl = url @property - def host(self) -> str: - return self._host + def url(self) -> AnyHttpUrl: + return self._url def create_connector( self, connector_config: KafkaConnectorConfig @@ -44,7 +44,7 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._host}/connectors", headers=HEADERS, json=connect_data + url=f"{self._url}/connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -66,7 +66,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector """ response = httpx.get( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS + url=f"{self._url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -94,7 +94,7 @@ def update_connector_config( connector_name = connector_config.name config_json = connector_config.dict() response = httpx.put( - url=f"{self._host}/connectors/{connector_name}/config", + url=f"{self._url}/connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -124,7 +124,7 @@ def validate_connector_config( :return: """ response = httpx.put( - url=f"{self._host}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self._url}/connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, json=connector_config.dict(), ) @@ -151,7 +151,7 @@ def delete_connector(self, connector_name: str) -> None: API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)- """ response = httpx.delete( - url=f"{self._host}/connectors/{connector_name}", headers=HEADERS + url=f"{self._url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 14f5af076..944ccc090 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -100,14 +100,14 @@ def __dry_run_connector_creation( log.debug(connector_config.dict()) log.debug(f"PUT /connectors/{connector_name}/config HTTP/1.1") - log.debug(f"HOST: {self._connect_wrapper.host}") + log.debug(f"HOST: {self._connect_wrapper.url}") except ConnectorNotFoundException: diff = render_diff({}, connector_config.dict()) log.info( f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" ) log.debug("POST /connectors HTTP/1.1") - log.debug(f"HOST: {self._connect_wrapper.host}") + log.debug(f"HOST: {self._connect_wrapper.url}") errors = self._connect_wrapper.validate_connector_config(connector_config) if len(errors) > 0: @@ -129,7 +129,7 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: ) ) log.debug(f"DELETE /connectors/{connector_name} HTTP/1.1") - log.debug(f"HOST: {self._connect_wrapper.host}") + log.debug(f"HOST: {self._connect_wrapper.url}") except ConnectorNotFoundException: log.warning( f"Connector Destruction: connector {connector_name} does not exist and cannot be deleted. Skipping." @@ -138,6 +138,6 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: @classmethod def from_pipeline_config(cls, pipeline_config: PipelineConfig) -> Self: return cls( - connect_wrapper=ConnectWrapper(host=pipeline_config.kafka_connect_host), + connect_wrapper=ConnectWrapper(url=pipeline_config.kafka_connect_url), timeout=pipeline_config.timeout, ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index 18253e104..bdc084717 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -8,7 +8,7 @@ from schema_registry.client.schema import AvroSchema from kpops.cli.exception import ClassNotFoundError -from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig +from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import find_class from kpops.component_handlers.schema_handler.schema_provider import ( Schema, @@ -23,10 +23,12 @@ class SchemaHandler: def __init__( self, - schema_registry_config: SchemaRegistryConfig, + pipeline_config: PipelineConfig, components_module: str | None, - ): - self.schema_registry_client = SchemaRegistryClient(schema_registry_config.url) + ) -> None: + self.schema_registry_client = SchemaRegistryClient( + pipeline_config.schema_registry.url + ) self.components_module = components_module @cached_property @@ -50,7 +52,7 @@ def load_schema_handler( ) -> SchemaHandler | None: if config.schema_registry.enabled: return cls( - schema_registry_config=config.schema_registry, + pipeline_config=config, components_module=components_module, ) return None diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 1df0d106a..cef544ab9 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -129,7 +129,7 @@ def __dry_run_topic_creation( ) ) log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") - log.debug(f"Host: {self.proxy_wrapper.host}") + log.debug(f"Host: {self.proxy_wrapper.url}") log.debug(HEADERS) log.debug(topic_spec.dict()) @@ -187,7 +187,7 @@ def __dry_run_topic_deletion(self, topic_name: str) -> None: log.warning( f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." ) - log.debug(f"Host: {self.proxy_wrapper.host}") + log.debug(f"Host: {self.proxy_wrapper.url}") log.debug(HEADERS) log.debug("HTTP/1.1 404 Not Found") log.debug(HEADERS) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 6464b094c..4b71d3f2b 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -2,6 +2,7 @@ from functools import cached_property import httpx +from pydantic import AnyHttpUrl from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers.topic.exception import ( @@ -26,7 +27,7 @@ class ProxyWrapper: """ def __init__(self, pipeline_config: PipelineConfig) -> None: - self._host = pipeline_config.kafka_rest_host + self._url: AnyHttpUrl = pipeline_config.kafka_rest_url @cached_property def cluster_id(self) -> str: @@ -39,7 +40,7 @@ def cluster_id(self) -> str: bootstrap.servers configuration. Therefore, only one Kafka cluster will be returned. :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._host}/v3/clusters") + response = httpx.get(url=f"{self._url}/v3/clusters") if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -47,8 +48,8 @@ def cluster_id(self) -> str: raise KafkaRestProxyError(response) @property - def host(self) -> str: - return self._host + def url(self) -> AnyHttpUrl: + return self._url def create_topic(self, topic_spec: TopicSpec) -> None: """ @@ -57,7 +58,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :param topic_spec: The topic specification. """ response = httpx.post( - url=f"{self._host}/v3/clusters/{self.cluster_id}/topics", + url=f"{self._url}/v3/clusters/{self.cluster_id}/topics", headers=HEADERS, json=topic_spec.dict(exclude_none=True), ) @@ -75,7 +76,7 @@ def delete_topic(self, topic_name: str) -> None: :param topic_name: Name of the topic """ response = httpx.delete( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: @@ -92,7 +93,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: :return: Response of the get topic API """ response = httpx.get( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.OK: @@ -118,7 +119,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: :return: The topic configuration. """ response = httpx.get( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", headers=HEADERS, ) @@ -145,7 +146,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No :param config_name: The configuration parameter name. """ response = httpx.post( - url=f"{self.host}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": json_body}, ) @@ -162,7 +163,7 @@ def get_broker_config(self) -> BrokerConfigResponse: :return: The broker configuration. """ response = httpx.get( - url=f"{self.host}/v3/clusters/{self.cluster_id}/brokers/-/configs", + url=f"{self.url}/v3/clusters/{self.cluster_id}/brokers/-/configs", headers=HEADERS, ) diff --git a/tests/cli/test_pipeline_config.py b/tests/cli/test_pipeline_config.py index cd976a103..ade83e34d 100644 --- a/tests/cli/test_pipeline_config.py +++ b/tests/cli/test_pipeline_config.py @@ -23,8 +23,8 @@ def test_pipeline_config_with_default_values(): ) assert default_config.schema_registry.enabled is False assert default_config.schema_registry.url == "http://localhost:8081" - assert default_config.kafka_rest_host == "http://localhost:8082" - assert default_config.kafka_connect_host == "http://localhost:8083" + assert default_config.kafka_rest_url == "http://localhost:8082" + assert default_config.kafka_connect_url == "http://localhost:8083" assert default_config.timeout == 300 assert default_config.create_namespace is False assert default_config.helm_config.context is None @@ -39,14 +39,14 @@ def test_pipeline_config_with_different_invalid_urls(): PipelineConfig( environment="development", brokers="http://broker:9092", - kafka_connect_host=parse_obj_as(AnyHttpUrl, "in-valid-host"), + kafka_connect_url=parse_obj_as(AnyHttpUrl, "in-valid-host"), ) with pytest.raises(ValidationError): PipelineConfig( environment="development", brokers="http://broker:9092", - kafka_rest_host=parse_obj_as(AnyHttpUrl, "in-valid-host"), + kafka_rest_url=parse_obj_as(AnyHttpUrl, "in-valid-host"), ) with pytest.raises(ValidationError): diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 21b2cc2cf..3054c50ed 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -31,7 +31,7 @@ def setup(self): defaults_path=DEFAULTS_PATH, environment="development", ) - self.connect_wrapper = ConnectWrapper(host=config.kafka_connect_host) + self.connect_wrapper = ConnectWrapper(url=config.kafka_connect_url) @pytest.fixture def connector_config(self) -> KafkaConnectorConfig: diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 4d48460ea..8b2dda5cf 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -70,9 +70,13 @@ def to_section(topic_config: TopicConfig) -> ToSection: @pytest.fixture() -def schema_registry_config() -> SchemaRegistryConfig: - return SchemaRegistryConfig( - enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") +def pipeline_config_with_sr_enabled() -> PipelineConfig: + return PipelineConfig( + environment="development", + brokers="broker:9092", + schema_registry=SchemaRegistryConfig( + enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") + ), ) @@ -120,10 +124,10 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): def test_should_raise_value_error_if_schema_provider_class_not_found( - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=NON_EXISTING_PROVIDER_MODULE, ) @@ -171,10 +175,10 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -193,10 +197,10 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -216,11 +220,11 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl topic_config: TopicConfig, to_section: ToSection, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -258,11 +262,11 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve log_info_mock: MagicMock, log_debug_mock: MagicMock, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -294,13 +298,13 @@ def test_should_submit_non_existing_schema_when_not_dry( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_provider = TestSchemaProvider() schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -323,10 +327,10 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -344,10 +348,10 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( to_section: ToSection, schema_registry_mock: MagicMock, - schema_registry_config: SchemaRegistryConfig, + pipeline_config_with_sr_enabled: PipelineConfig, ): schema_handler = SchemaHandler( - schema_registry_config=schema_registry_config, + pipeline_config=pipeline_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index fb04ca4dd..d69de8e12 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -45,7 +45,7 @@ def setup(self, httpx_mock: HTTPXMock): json=cluster_response, status_code=200, ) - assert self.proxy_wrapper.host == DEFAULT_HOST + assert self.proxy_wrapper.url == DEFAULT_HOST assert self.proxy_wrapper.cluster_id == "cluster-1" @patch("httpx.post") diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 30b369de8..8188a32c9 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -4,8 +4,8 @@ topic_name_config: default_error_topic_name: "${component_name}-dead-letter-topic" default_output_topic_name: "${component_name}-test-topic" brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -kafka_connect_host: "http://localhost:8083" -kafka_rest_host: "http://localhost:8082" +kafka_connect_url: "http://localhost:8083" +kafka_rest_url: "http://localhost:8082" schema_registry: enabled: true url: "http://localhost:8081" diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 9167e8aac..630b046f6 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -6,5 +6,5 @@ topic_name_config: default_output_topic_name: ${component_type}-output-topic helm_diff_config: enable: false -kafka_connect_host: "http://kafka_connect_host:8083" -kafka_rest_host: "http://kafka_rest_host:8082" +kafka_connect_url: "http://kafka_connect_url:8083" +kafka_rest_url: "http://kafka_rest_url:8082" From 9f6fdfa1cf62503d6b30dcfb08bc94c0544054fd Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Thu, 7 Sep 2023 10:33:24 +0200 Subject: [PATCH 08/20] add prefix --- .../resources/variables/config_env_vars.env | 4 +-- .../resources/variables/config_env_vars.md | 4 +-- docs/docs/schema/config.json | 22 +++++++-------- docs/docs/user/migration-guide/v1-v2.md | 24 ----------------- docs/docs/user/migration-guide/v2-v3.md | 27 +++++++++++++++++++ kpops/cli/pipeline_config.py | 5 ++-- 6 files changed, 45 insertions(+), 41 deletions(-) create mode 100644 docs/docs/user/migration-guide/v2-v3.md diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 32f6f54e9..e431c3af7 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -14,10 +14,10 @@ KPOPS_ENVIRONMENT # No default value, required KPOPS_KAFKA_BROKERS # No default value, required # kafka_rest_url # Address of the Kafka REST Proxy. -KPOPS_REST_PROXY_HOST=http://localhost:8082 +KPOPS_REST_PROXY_URL=http://localhost:8082 # kafka_connect_url # Address of Kafka Connect. -KPOPS_CONNECT_HOST=http://localhost:8083 +KPOPS_CONNECT_URL=http://localhost:8083 # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 4a1a5a5a9..32c923282 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -5,7 +5,7 @@ These variables are a lower priority alternative to the settings in `config.yaml |-----------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| |KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | |KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | -|KPOPS_REST_PROXY_HOST |http://localhost:8082|False |Address of the Kafka REST Proxy. |kafka_rest_url | -|KPOPS_CONNECT_HOST |http://localhost:8083|False |Address of Kafka Connect. |kafka_connect_url| +|KPOPS_REST_PROXY_URL |http://localhost:8082|False |Address of the Kafka REST Proxy. |kafka_rest_url | +|KPOPS_CONNECT_URL |http://localhost:8083|False |Address of Kafka Connect. |kafka_connect_url| |KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | |KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index f105bec62..bff6248be 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -59,7 +59,7 @@ "default": false, "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", "env_names": [ - "create_namespace" + "kpops_create_namespace" ], "title": "Create Namespace", "type": "boolean" @@ -68,7 +68,7 @@ "default": "defaults", "description": "The name of the defaults file and the prefix of the defaults environment file.", "env_names": [ - "defaults_filename_prefix" + "kpops_defaults_filename_prefix" ], "title": "Defaults Filename Prefix", "type": "string" @@ -77,7 +77,7 @@ "default": ".", "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", "env_names": [ - "defaults_path" + "kpops_defaults_path" ], "example": "defaults", "format": "path", @@ -107,7 +107,7 @@ }, "description": "Global flags for Helm.", "env_names": [ - "helm_config" + "kpops_helm_config" ], "title": "Helm Config" }, @@ -122,16 +122,16 @@ }, "description": "Configure Helm Diff.", "env_names": [ - "helm_diff_config" + "kpops_helm_diff_config" ], "title": "Helm Diff Config" }, "kafka_connect_url": { "default": "http://localhost:8083", "description": "Address of Kafka Connect.", - "env": "KPOPS_CONNECT_HOST", + "env": "KPOPS_CONNECT_URL", "env_names": [ - "kpops_connect_host" + "kpops_connect_url" ], "format": "uri", "maxLength": 65536, @@ -142,9 +142,9 @@ "kafka_rest_url": { "default": "http://localhost:8082", "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_REST_PROXY_HOST", + "env": "KPOPS_REST_PROXY_URL", "env_names": [ - "kpops_rest_proxy_host" + "kpops_rest_proxy_url" ], "format": "uri", "maxLength": 65536, @@ -174,7 +174,7 @@ }, "description": "Configure the Schema Registry.", "env_names": [ - "schema_registry" + "kpops_schema_registry" ], "title": "Schema Registry" }, @@ -200,7 +200,7 @@ }, "description": "Configure the topic name variables you can use in the pipeline definition.", "env_names": [ - "topic_name_config" + "kpops_topic_name_config" ], "title": "Topic Name Config" } diff --git a/docs/docs/user/migration-guide/v1-v2.md b/docs/docs/user/migration-guide/v1-v2.md index 511e5b21a..c5936cbe5 100644 --- a/docs/docs/user/migration-guide/v1-v2.md +++ b/docs/docs/user/migration-guide/v1-v2.md @@ -1,29 +1,5 @@ # Migrate from V1 to V2 -## [Make Kafka rest proxy & connect hosts default and improve schema registry config](https://github.com/bakdata/kpops/pull/354) - -The breaking changes target the `config.yaml` file: - -- The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). - -- `kafka_connect_host` is renamed to `kafka_connect_url` (default `http://localhost:8083`). -- `kafka_rest_host` is renamed to `kafka_rest_url` (default `http://localhost:8082`). - -Your `config.yaml` will change to: - -```diff - environment: development - brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -- kafka_rest_host: "http://my-custom-rest.url:8082" -- kafka_connect_host: "http://my-custom-connect.url:8083" -- schema_registry_url: "http://my-custom-sr.url:8081" -+ kafka_rest_url: "http://my-custom-rest.url:8082" -+ Kafka_connect_url: "http://my-custom-connect.url:8083" -+ schema_registry: -+ enabled: true -+ url: "http://my-custom-sr.url:8081" -``` - ## [Derive component type automatically from class name](https://github.com/bakdata/kpops/pull/309) KPOps automatically infers the component `type` from the class name. Therefore, the `type` and `schema_type` attributes can be removed from your custom components. By convention the `type` would be the lower, and kebab cased name of the class. diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md new file mode 100644 index 000000000..14173825c --- /dev/null +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -0,0 +1,27 @@ +# Migrate from V2 to V3 + +## [Make Kafka rest proxy & connect hosts default and improve schema registry config](https://github.com/bakdata/kpops/pull/354) + +The breaking changes target the `config.yaml` file: + +- The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). + +- `kafka_connect_host` is renamed to `kafka_connect_url` (default `http://localhost:8083`). +- `kafka_rest_host` is renamed to `kafka_rest_url` (default `http://localhost:8082`). + +The environment variable name of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md). + +Your `config.yaml` will change to: + +```diff + environment: development + brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +- kafka_rest_host: "http://my-custom-rest.url:8082" +- kafka_connect_host: "http://my-custom-connect.url:8083" +- schema_registry_url: "http://my-custom-sr.url:8081" ++ kafka_rest_url: "http://my-custom-rest.url:8082" ++ Kafka_connect_url: "http://my-custom-connect.url:8083" ++ schema_registry: ++ enabled: true ++ url: "http://my-custom-sr.url:8081" +``` diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 568a8f88c..f3b55e865 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -81,12 +81,12 @@ class PipelineConfig(BaseSettings): # For validating URLs use parse_obj_as # https://github.com/pydantic/pydantic/issues/1106 default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), - env=f"{ENV_PREFIX}REST_PROXY_HOST", + env=f"{ENV_PREFIX}REST_PROXY_URL", description="Address of the Kafka REST Proxy.", ) kafka_connect_url: AnyHttpUrl = Field( default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), - env=f"{ENV_PREFIX}CONNECT_HOST", + env=f"{ENV_PREFIX}CONNECT_URL", description="Address of Kafka Connect.", ) timeout: int = Field( @@ -116,6 +116,7 @@ class Config(BaseConfig): config_path = Path("config.yaml") env_file = ".env" env_file_encoding = "utf-8" + env_prefix = f"{ENV_PREFIX}" @classmethod def customise_sources( From 28d20d4b318f1cc088e60cd735fa6cd1eb92c8f6 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Thu, 7 Sep 2023 12:13:29 +0200 Subject: [PATCH 09/20] Add kafka rest and connect changes --- .../resources/variables/config_env_vars.env | 8 +- .../resources/variables/config_env_vars.md | 14 ++- docs/docs/schema/config.json | 98 ++++++++++++++----- docs/docs/user/migration-guide/v2-v3.md | 13 ++- .../bakdata/atm-fraud-detection/config.yaml | 6 +- hooks/gen_docs/gen_docs_env_vars.py | 6 +- kpops/cli/{pipeline_config.py => config.py} | 48 ++++++--- kpops/cli/main.py | 14 +-- .../kafka_connect/kafka_connect_handler.py | 8 +- .../schema_handler/schema_handler.py | 6 +- .../component_handlers/topic/proxy_wrapper.py | 6 +- .../base_defaults_component.py | 8 +- kpops/pipeline_generator/pipeline.py | 8 +- kpops/utils/gen_schema.py | 4 +- tests/cli/test_handlers.py | 10 +- ...ipeline_config.py => test_kpops_config.py} | 25 +++-- tests/compiler/test_pipeline_name.py | 4 +- .../kafka_connect/test_connect_wrapper.py | 6 +- .../schema_handler/test_schema_handler.py | 28 +++--- .../topic/test_proxy_wrapper.py | 6 +- .../test_base_defaults_component.py | 16 ++- tests/components/test_kafka_app.py | 10 +- tests/components/test_kafka_connector.py | 8 +- tests/components/test_kafka_sink_connector.py | 16 +-- .../components/test_kafka_source_connector.py | 10 +- tests/components/test_kubernetes_app.py | 14 +-- tests/components/test_producer_app.py | 10 +- tests/components/test_streams_app.py | 20 ++-- .../resources/custom-config/config.yaml | 6 +- .../kafka-connect-sink-config/config.yaml | 6 +- tests/pipeline/test_pipeline.py | 3 +- 31 files changed, 257 insertions(+), 188 deletions(-) rename kpops/cli/{pipeline_config.py => config.py} (84%) rename tests/cli/{test_pipeline_config.py => test_kpops_config.py} (75%) diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index e431c3af7..bdb17fffe 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -11,13 +11,7 @@ KPOPS_ENVIRONMENT # No default value, required # brokers # The comma separated Kafka brokers address. -KPOPS_KAFKA_BROKERS # No default value, required -# kafka_rest_url -# Address of the Kafka REST Proxy. -KPOPS_REST_PROXY_URL=http://localhost:8082 -# kafka_connect_url -# Address of Kafka Connect. -KPOPS_CONNECT_URL=http://localhost:8083 +KPOPS_BROKERS # No default value, required # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 32c923282..c9d1e1343 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,11 +1,9 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value |Required| Description | Setting name | -|-----------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | -|KPOPS_REST_PROXY_URL |http://localhost:8082|False |Address of the Kafka REST Proxy. |kafka_rest_url | -|KPOPS_CONNECT_URL |http://localhost:8083|False |Address of Kafka Connect. |kafka_connect_url| -|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| +| Name |Default Value|Required| Description | Setting name | +|-----------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_BROKERS | |True |The comma separated Kafka brokers address. |brokers | +|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index bff6248be..038a65d1d 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -1,5 +1,5 @@ { - "$ref": "#/definitions/PipelineConfig", + "$ref": "#/definitions/KpopsConfig", "definitions": { "HelmConfig": { "description": "Global Helm configuration", @@ -41,15 +41,57 @@ "title": "HelmDiffConfig", "type": "object" }, - "PipelineConfig": { + "KafkaConnectConfig": { + "additionalProperties": false, + "description": "Configuration for Kafka Connect.", + "properties": { + "url": { + "default": "http://localhost:8083", + "description": "Address of Kafka Connect.", + "env": "KPOPS__KAFKA_CONNECT_URL", + "env_names": [ + "kpops__kafka_connect_url" + ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, + "title": "Url", + "type": "string" + } + }, + "title": "KafkaConnectConfig", + "type": "object" + }, + "KafkaRestConfig": { + "additionalProperties": false, + "description": "Configuration for Kafka REST Proxy.", + "properties": { + "url": { + "default": "http://localhost:8082", + "description": "Address of the Kafka REST Proxy.", + "env": "KPOPS__KAFKA_REST_URL", + "env_names": [ + "kpops__kafka_rest_url" + ], + "format": "uri", + "maxLength": 65536, + "minLength": 1, + "title": "Url", + "type": "string" + } + }, + "title": "KafkaRestConfig", + "type": "object" + }, + "KpopsConfig": { "additionalProperties": false, "description": "Pipeline configuration unrelated to the components.", "properties": { "brokers": { "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_KAFKA_BROKERS", + "env": "KPOPS_BROKERS", "env_names": [ - "kpops_kafka_brokers" + "kpops_brokers" ], "example": "broker1:9092,broker2:9092,broker3:9092", "title": "Brokers", @@ -126,31 +168,35 @@ ], "title": "Helm Diff Config" }, - "kafka_connect_url": { - "default": "http://localhost:8083", - "description": "Address of Kafka Connect.", - "env": "KPOPS_CONNECT_URL", + "kafka_connect": { + "allOf": [ + { + "$ref": "#/definitions/KafkaConnectConfig" + } + ], + "default": { + "url": "http://localhost:8083" + }, + "description": "Configuration for Kafka Connect.", "env_names": [ - "kpops_connect_url" + "kpops_kafka_connect" ], - "format": "uri", - "maxLength": 65536, - "minLength": 1, - "title": "Kafka Connect Url", - "type": "string" + "title": "Kafka Connect" }, - "kafka_rest_url": { - "default": "http://localhost:8082", - "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_REST_PROXY_URL", + "kafka_rest": { + "allOf": [ + { + "$ref": "#/definitions/KafkaRestConfig" + } + ], + "default": { + "url": "http://localhost:8082" + }, + "description": "Configuration for Kafka REST Proxy.", "env_names": [ - "kpops_rest_proxy_url" + "kpops_kafka_rest" ], - "format": "uri", - "maxLength": 65536, - "minLength": 1, - "title": "Kafka Rest Url", - "type": "string" + "title": "Kafka Rest" }, "retain_clean_jobs": { "default": false, @@ -172,7 +218,7 @@ "enabled": false, "url": "http://localhost:8081" }, - "description": "Configure the Schema Registry.", + "description": "Configuration for Schema Registry.", "env_names": [ "kpops_schema_registry" ], @@ -209,7 +255,7 @@ "environment", "brokers" ], - "title": "PipelineConfig", + "title": "KpopsConfig", "type": "object" }, "SchemaRegistryConfig": { diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 14173825c..e67e8484a 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -6,10 +6,11 @@ The breaking changes target the `config.yaml` file: - The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). -- `kafka_connect_host` is renamed to `kafka_connect_url` (default `http://localhost:8083`). -- `kafka_rest_host` is renamed to `kafka_rest_url` (default `http://localhost:8082`). +- `kafka_rest_host` is renamed to `kafka_rest.url` (default `http://localhost:8082`). -The environment variable name of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md). +- `kafka_connect_host` is replaced with `kafka_connect.url` (default `http://localhost:8083`). + +The environment variable name of these config fields changed respectively. The environment variable `KPOPS_KAFKA_BROKERS` changed to `KPOPS_BROKERS`. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes. Your `config.yaml` will change to: @@ -19,8 +20,10 @@ Your `config.yaml` will change to: - kafka_rest_host: "http://my-custom-rest.url:8082" - kafka_connect_host: "http://my-custom-connect.url:8083" - schema_registry_url: "http://my-custom-sr.url:8081" -+ kafka_rest_url: "http://my-custom-rest.url:8082" -+ Kafka_connect_url: "http://my-custom-connect.url:8083" ++ kafka_rest: ++ url: "http://my-custom-rest.url:8082" ++ Kafka_connect: ++ url: "http://my-custom-connect.url:8083" + schema_registry: + enabled: true + url: "http://my-custom-sr.url:8081" diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index 3f72dfb2a..a31ab2d0a 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -10,8 +10,10 @@ schema_registry: enabled: true url: "http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081" -kafka_rest_url: "http://localhost:8082" +kafka_rest: + url: "http://localhost:8082" -kafka_connect_url: "http://localhost:8083" +kafka_connect: + url: "http://localhost:8083" defaults_path: . diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 436ba19de..ab034d8bf 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -19,7 +19,7 @@ from hooks import PATH_ROOT from hooks.gen_docs import SuperEnum from kpops.cli import main -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig PATH_DOCS_RESOURCES = PATH_ROOT / "docs/docs/resources" PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables" @@ -250,9 +250,9 @@ def __fill_csv_pipeline_config(target: Path) -> None: """ # NOTE: This does not see nested fields, hence if there are env vars in a class like # TopicConfig(), they wil not be listed. Possible fix with recursion. - config_fields = PipelineConfig.__fields__ + config_fields = KpopsConfig.__fields__ for config_field in config_fields.values(): - config_field_info = PipelineConfig.Config.get_field_info(config_field.name) + config_field_info = KpopsConfig.Config.get_field_info(config_field.name) config_field_description: str = ( config_field.field_info.description or "No description available, please refer to the pipeline config documentation." diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/config.py similarity index 84% rename from kpops/cli/pipeline_config.py rename to kpops/cli/config.py index f3b55e865..88b90684e 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/config.py @@ -43,7 +43,29 @@ class SchemaRegistryConfig(BaseSettings): ) -class PipelineConfig(BaseSettings): +class KafkaRestConfig(BaseSettings): + """Configuration for Kafka REST Proxy.""" + + url: AnyHttpUrl = Field( + # For validating URLs use parse_obj_as + # https://github.com/pydantic/pydantic/issues/1106 + default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), + env=f"{ENV_PREFIX}_KAFKA_REST_URL", + description="Address of the Kafka REST Proxy.", + ) + + +class KafkaConnectConfig(BaseSettings): + """Configuration for Kafka Connect.""" + + url: AnyHttpUrl = Field( + default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), + env=f"{ENV_PREFIX}_KAFKA_CONNECT_URL", + description="Address of Kafka Connect.", + ) + + +class KpopsConfig(BaseSettings): """Pipeline configuration unrelated to the components.""" defaults_path: Path = Field( @@ -61,7 +83,7 @@ class PipelineConfig(BaseSettings): ) brokers: str = Field( default=..., - env=f"{ENV_PREFIX}KAFKA_BROKERS", + env=f"{ENV_PREFIX}BROKERS", description="The comma separated Kafka brokers address.", example="broker1:9092,broker2:9092,broker3:9092", ) @@ -75,19 +97,15 @@ class PipelineConfig(BaseSettings): ) schema_registry: SchemaRegistryConfig = Field( default=SchemaRegistryConfig(), - description="Configure the Schema Registry.", + description="Configuration for Schema Registry.", ) - kafka_rest_url: AnyHttpUrl = Field( - # For validating URLs use parse_obj_as - # https://github.com/pydantic/pydantic/issues/1106 - default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), - env=f"{ENV_PREFIX}REST_PROXY_URL", - description="Address of the Kafka REST Proxy.", + kafka_rest: KafkaRestConfig = Field( + default=KafkaRestConfig(), + description="Configuration for Kafka REST Proxy.", ) - kafka_connect_url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), - env=f"{ENV_PREFIX}CONNECT_URL", - description="Address of Kafka Connect.", + kafka_connect: KafkaConnectConfig = Field( + default=KafkaConnectConfig(), + description="Configuration for Kafka Connect.", ) timeout: int = Field( default=300, @@ -125,7 +143,7 @@ def customise_sources( env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> tuple[ - SettingsSourceCallable | Callable[[PipelineConfig], dict[str, Any]], ... + SettingsSourceCallable | Callable[[KpopsConfig], dict[str, Any]], ... ]: return ( env_settings, @@ -135,7 +153,7 @@ def customise_sources( ) -def yaml_config_settings_source(settings: PipelineConfig) -> dict[str, Any]: +def yaml_config_settings_source(settings: KpopsConfig) -> dict[str, Any]: path_to_config = settings.Config.config_path if path_to_config.exists(): if isinstance(source := load_yaml_file(path_to_config), dict): diff --git a/kpops/cli/main.py b/kpops/cli/main.py index f58808cd2..a37947612 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -9,8 +9,8 @@ import typer from kpops import __version__ +from kpops.cli.config import ENV_PREFIX, KpopsConfig from kpops.cli.custom_formatter import CustomFormatter -from kpops.cli.pipeline_config import ENV_PREFIX, PipelineConfig from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( @@ -111,7 +111,7 @@ def setup_pipeline( pipeline_base_dir: Path, pipeline_path: Path, components_module: str | None, - pipeline_config: PipelineConfig, + pipeline_config: KpopsConfig, ) -> Pipeline: registry = Registry() if components_module: @@ -125,7 +125,7 @@ def setup_pipeline( def setup_handlers( - components_module: str | None, config: PipelineConfig + components_module: str | None, config: KpopsConfig ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) connector_handler = KafkaConnectHandler.from_pipeline_config(config) @@ -193,13 +193,13 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_pipeline_config( config: Path, defaults: Optional[Path], verbose: bool -) -> PipelineConfig: +) -> KpopsConfig: setup_logging_level(verbose) - PipelineConfig.Config.config_path = config + KpopsConfig.Config.config_path = config if defaults: - pipeline_config = PipelineConfig(defaults_path=defaults) + pipeline_config = KpopsConfig(defaults_path=defaults) else: - pipeline_config = PipelineConfig() + pipeline_config = KpopsConfig() pipeline_config.defaults_path = config.parent / pipeline_config.defaults_path return pipeline_config diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 944ccc090..910fd7eeb 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -19,7 +19,7 @@ from typing_extensions import Self if TYPE_CHECKING: - from kpops.cli.pipeline_config import PipelineConfig + from kpops.cli.config import KpopsConfig log = logging.getLogger("KafkaConnectHandler") @@ -136,8 +136,8 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: ) @classmethod - def from_pipeline_config(cls, pipeline_config: PipelineConfig) -> Self: + def from_pipeline_config(cls, config: KpopsConfig) -> Self: return cls( - connect_wrapper=ConnectWrapper(url=pipeline_config.kafka_connect_url), - timeout=pipeline_config.timeout, + connect_wrapper=ConnectWrapper(url=config.kafka_connect.url), + timeout=config.timeout, ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index bdc084717..e46fb1eff 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -7,8 +7,8 @@ from schema_registry.client import SchemaRegistryClient from schema_registry.client.schema import AvroSchema +from kpops.cli.config import KpopsConfig from kpops.cli.exception import ClassNotFoundError -from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import find_class from kpops.component_handlers.schema_handler.schema_provider import ( Schema, @@ -23,7 +23,7 @@ class SchemaHandler: def __init__( self, - pipeline_config: PipelineConfig, + pipeline_config: KpopsConfig, components_module: str | None, ) -> None: self.schema_registry_client = SchemaRegistryClient( @@ -48,7 +48,7 @@ def schema_provider(self) -> SchemaProvider: @classmethod def load_schema_handler( - cls, components_module: str | None, config: PipelineConfig + cls, components_module: str | None, config: KpopsConfig ) -> SchemaHandler | None: if config.schema_registry.enabled: return cls( diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 4b71d3f2b..e4862d640 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -4,7 +4,7 @@ import httpx from pydantic import AnyHttpUrl -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers.topic.exception import ( KafkaRestProxyError, TopicNotFoundException, @@ -26,8 +26,8 @@ class ProxyWrapper: Wraps Kafka REST Proxy APIs """ - def __init__(self, pipeline_config: PipelineConfig) -> None: - self._url: AnyHttpUrl = pipeline_config.kafka_rest_url + def __init__(self, config: KpopsConfig) -> None: + self._url: AnyHttpUrl = config.kafka_rest.url @cached_property def cluster_id(self) -> str: diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 99dec42f2..9dff5c6df 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -9,7 +9,7 @@ import typer from pydantic import BaseModel, Field -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.utils import cached_classproperty from kpops.utils.dict_ops import update_nested @@ -45,7 +45,7 @@ class BaseDefaultsComponent(BaseModel): exclude=True, hidden_from_schema=True, ) - config: PipelineConfig = Field( + config: KpopsConfig = Field( default=..., description=describe_attr("config", __doc__), exclude=True, @@ -90,7 +90,7 @@ def extend_with_defaults(self, **kwargs) -> dict: :param kwargs: The init kwargs for pydantic :returns: Enriched kwargs with inheritted defaults """ - config: PipelineConfig = kwargs["config"] + config: KpopsConfig = kwargs["config"] log.debug( typer.style( "Enriching component of type ", fg=typer.colors.GREEN, bold=False @@ -177,7 +177,7 @@ def defaults_from_yaml(path: Path, key: str) -> dict: return value -def get_defaults_file_paths(config: PipelineConfig) -> tuple[Path, Path]: +def get_defaults_file_paths(config: KpopsConfig) -> tuple[Path, Path]: """Return the paths to the main and the environment defaults-files The files need not exist, this function will only check if the dir set in diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 093a452ea..5c3f5ef8c 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -12,7 +12,7 @@ from rich.console import Console from rich.syntax import Syntax -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.pipeline_component import PipelineComponent @@ -100,7 +100,7 @@ def __init__( component_list: list[dict], environment_components: list[dict], registry: Registry, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, ) -> None: self.components: PipelineComponents = PipelineComponents() @@ -117,7 +117,7 @@ def load_from_yaml( base_dir: Path, path: Path, registry: Registry, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, ) -> Pipeline: """Load pipeline definition from yaml @@ -310,7 +310,7 @@ def validate(self) -> None: self.components.validate_unique_names() @staticmethod - def pipeline_filename_environment(path: Path, config: PipelineConfig) -> Path: + def pipeline_filename_environment(path: Path, config: KpopsConfig) -> Path: """Add the environment name from the PipelineConfig to the pipeline.yaml path :param path: Path to pipeline.yaml file diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 470a1412d..b9ecf55d4 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -8,7 +8,7 @@ from pydantic.fields import FieldInfo, ModelField from pydantic.schema import SkipField -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.cli.registry import _find_classes from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.docstring import describe_object @@ -139,6 +139,6 @@ def gen_pipeline_schema( def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" schema = schema_json_of( - PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True + KpopsConfig, title="KPOps config schema", indent=4, sort_keys=True ) print(schema) diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index b320d7875..808c5c9c2 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -2,8 +2,8 @@ from pytest_mock import MockerFixture +from kpops.cli.config import KpopsConfig, SchemaRegistryConfig from kpops.cli.main import setup_handlers -from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( KafkaConnectHandler, @@ -16,13 +16,13 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): - config = PipelineConfig( + config = KpopsConfig( defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(), ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") - connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config) + connector_handler = KafkaConnectHandler.from_pipeline_config(config=config) connector_handler_mock.from_pipeline_config.return_value = connector_handler topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler") @@ -50,7 +50,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): - config = PipelineConfig( + config = KpopsConfig( defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(enabled=True), @@ -60,7 +60,7 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): schema_handler_mock.load_schema_handler.return_value = schema_handler connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") - connector_handler = KafkaConnectHandler.from_pipeline_config(pipeline_config=config) + connector_handler = KafkaConnectHandler.from_pipeline_config(config=config) connector_handler_mock.from_pipeline_config.return_value = connector_handler topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler") diff --git a/tests/cli/test_pipeline_config.py b/tests/cli/test_kpops_config.py similarity index 75% rename from tests/cli/test_pipeline_config.py rename to tests/cli/test_kpops_config.py index ade83e34d..3988cc17b 100644 --- a/tests/cli/test_pipeline_config.py +++ b/tests/cli/test_kpops_config.py @@ -3,11 +3,16 @@ import pytest from pydantic import AnyHttpUrl, ValidationError, parse_obj_as -from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig +from kpops.cli.config import ( + KafkaConnectConfig, + KafkaRestConfig, + KpopsConfig, + SchemaRegistryConfig, +) def test_pipeline_config_with_default_values(): - default_config = PipelineConfig( + default_config = KpopsConfig( environment="development", brokers="http://broker:9092" ) @@ -23,8 +28,8 @@ def test_pipeline_config_with_default_values(): ) assert default_config.schema_registry.enabled is False assert default_config.schema_registry.url == "http://localhost:8081" - assert default_config.kafka_rest_url == "http://localhost:8082" - assert default_config.kafka_connect_url == "http://localhost:8083" + assert default_config.kafka_rest.url == "http://localhost:8082" + assert default_config.kafka_connect.url == "http://localhost:8083" assert default_config.timeout == 300 assert default_config.create_namespace is False assert default_config.helm_config.context is None @@ -36,21 +41,23 @@ def test_pipeline_config_with_default_values(): def test_pipeline_config_with_different_invalid_urls(): with pytest.raises(ValidationError): - PipelineConfig( + KpopsConfig( environment="development", brokers="http://broker:9092", - kafka_connect_url=parse_obj_as(AnyHttpUrl, "in-valid-host"), + kafka_connect=KafkaConnectConfig( + url=parse_obj_as(AnyHttpUrl, "in-valid-host") + ), ) with pytest.raises(ValidationError): - PipelineConfig( + KpopsConfig( environment="development", brokers="http://broker:9092", - kafka_rest_url=parse_obj_as(AnyHttpUrl, "in-valid-host"), + kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "in-valid-host")), ) with pytest.raises(ValidationError): - PipelineConfig( + KpopsConfig( environment="development", brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 7a07c1a12..87b47565d 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -2,7 +2,7 @@ import pytest -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.environment import ENV @@ -55,7 +55,7 @@ def test_should_not_set_pipeline_name_with_the_same_base_dir(): def test_pipeline_file_name_environment(): - config = PipelineConfig( + config = KpopsConfig( defaults_path=DEFAULTS_PATH, environment="some_environment", ) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 3054c50ed..f7f9668f9 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -6,7 +6,7 @@ import pytest from pytest_httpx import HTTPXMock -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers.kafka_connect.connect_wrapper import ConnectWrapper from kpops.component_handlers.kafka_connect.exception import ( ConnectorNotFoundException, @@ -27,11 +27,11 @@ class TestConnectorApiWrapper: @pytest.fixture(autouse=True) def setup(self): - config = PipelineConfig( + config = KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", ) - self.connect_wrapper = ConnectWrapper(url=config.kafka_connect_url) + self.connect_wrapper = ConnectWrapper(url=config.kafka_connect.url) @pytest.fixture def connector_config(self) -> KafkaConnectorConfig: diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 8b2dda5cf..b00ecdaea 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -9,7 +9,7 @@ from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion -from kpops.cli.pipeline_config import PipelineConfig, SchemaRegistryConfig +from kpops.cli.config import KpopsConfig, SchemaRegistryConfig from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider from kpops.components.base_components.models import TopicName @@ -70,8 +70,8 @@ def to_section(topic_config: TopicConfig) -> ToSection: @pytest.fixture() -def pipeline_config_with_sr_enabled() -> PipelineConfig: - return PipelineConfig( +def pipeline_config_with_sr_enabled() -> KpopsConfig: + return KpopsConfig( environment="development", brokers="broker:9092", schema_registry=SchemaRegistryConfig( @@ -81,7 +81,7 @@ def pipeline_config_with_sr_enabled() -> PipelineConfig: def test_load_schema_handler(): - config_enable = PipelineConfig( + config_enable = KpopsConfig( defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(enabled=True), @@ -102,7 +102,7 @@ def test_load_schema_handler(): def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): - config_enable = PipelineConfig( + config_enable = KpopsConfig( defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(enabled=True), @@ -124,7 +124,7 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): def test_should_raise_value_error_if_schema_provider_class_not_found( - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( pipeline_config=pipeline_config_with_sr_enabled, @@ -145,7 +145,7 @@ def test_should_raise_value_error_if_schema_provider_class_not_found( def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(): - config_enable = PipelineConfig( + config_enable = KpopsConfig( defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(enabled=True), @@ -175,7 +175,7 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( pipeline_config=pipeline_config_with_sr_enabled, @@ -197,7 +197,7 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( pipeline_config=pipeline_config_with_sr_enabled, @@ -220,7 +220,7 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl topic_config: TopicConfig, to_section: ToSection, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( @@ -262,7 +262,7 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve log_info_mock: MagicMock, log_debug_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( @@ -298,7 +298,7 @@ def test_should_submit_non_existing_schema_when_not_dry( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -327,7 +327,7 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( pipeline_config=pipeline_config_with_sr_enabled, @@ -348,7 +348,7 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( to_section: ToSection, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: PipelineConfig, + pipeline_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( pipeline_config=pipeline_config_with_sr_enabled, diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index d69de8e12..a569a53a7 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -7,7 +7,7 @@ from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers.topic.exception import ( KafkaRestProxyError, TopicNotFoundException, @@ -31,8 +31,8 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def setup(self, httpx_mock: HTTPXMock): - config = PipelineConfig(defaults_path=DEFAULTS_PATH, environment="development") - self.proxy_wrapper = ProxyWrapper(pipeline_config=config) + config = KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") + self.proxy_wrapper = ProxyWrapper(config=config) with open( DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json" diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 7b25e5f74..25cffc58e 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -3,7 +3,7 @@ import pytest -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, @@ -38,8 +38,8 @@ class EnvVarTest(BaseDefaultsComponent): @pytest.fixture -def config() -> PipelineConfig: - return PipelineConfig( +def config() -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", ) @@ -116,9 +116,7 @@ def test_load_defaults_with_environment( == defaults ) - def test_inherit_defaults( - self, config: PipelineConfig, handlers: ComponentHandlers - ): + def test_inherit_defaults(self, config: KpopsConfig, handlers: ComponentHandlers): component = Child(config=config, handlers=handlers) assert ( @@ -137,7 +135,7 @@ def test_inherit_defaults( component.hard_coded == "hard_coded_value" ), "Defaults in code should be kept for parents" - def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_inherit(self, config: KpopsConfig, handlers: ComponentHandlers): component = Child( config=config, handlers=handlers, @@ -161,7 +159,7 @@ def test_inherit(self, config: PipelineConfig, handlers: ComponentHandlers): ), "Defaults in code should be kept for parents" def test_multiple_generations( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): component = GrandChild(config=config, handlers=handlers) @@ -183,7 +181,7 @@ def test_multiple_generations( assert component.grand_child == "grand-child-value" def test_env_var_substitution( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): ENV["pipeline_name"] = str(DEFAULTS_PATH) component = EnvVarTest(config=config, handlers=handlers) diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index c6527c00c..b9b746d3b 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -4,7 +4,7 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -18,8 +18,8 @@ class TestKafkaApp: @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", helm_diff_config=HelmDiffConfig(), @@ -33,7 +33,7 @@ def handlers(self) -> ComponentHandlers: topic_handler=MagicMock(), ) - def test_default_configs(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers): kafka_app = KafkaApp( name="example-name", config=config, @@ -59,7 +59,7 @@ def test_default_configs(self, config: PipelineConfig, handlers: ComponentHandle def test_should_deploy_kafka_app( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 4e8424e5c..a533010d7 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -4,7 +4,7 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig +from kpops.cli.config import KpopsConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig @@ -21,8 +21,8 @@ class TestKafkaConnector: @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", topic_name_config=TopicNameConfig( @@ -64,7 +64,7 @@ def connector_config(self) -> KafkaConnectorConfig: def test_connector_config_name_override( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 5de354739..cd0d5d2cc 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -3,7 +3,7 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmUpgradeInstallFlags, @@ -41,7 +41,7 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture def connector( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ) -> KafkaSinkConnector: @@ -62,7 +62,7 @@ def connector( def test_connector_config_parsing( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -92,7 +92,7 @@ def test_connector_config_parsing( def test_from_section_parsing_input_topic( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -119,7 +119,7 @@ def test_from_section_parsing_input_topic( def test_from_section_parsing_input_pattern( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -253,7 +253,7 @@ def test_clean_when_dry_run_is_true( def test_clean_when_dry_run_is_false( self, connector: KafkaSinkConnector, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, log_info_mock: MagicMock, @@ -347,7 +347,7 @@ def test_clean_when_dry_run_is_false( def test_clean_without_to_when_dry_run_is_true( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, dry_run_handler: MagicMock, connector_config: KafkaConnectorConfig, @@ -366,7 +366,7 @@ def test_clean_without_to_when_dry_run_is_true( def test_clean_without_to_when_dry_run_is_false( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, dry_run_handler: MagicMock, diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index 72c487e74..2bb1215fa 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -3,7 +3,7 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmUpgradeInstallFlags, @@ -34,7 +34,7 @@ class TestKafkaSourceConnector(TestKafkaConnector): @pytest.fixture def connector( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ) -> KafkaSourceConnector: @@ -56,7 +56,7 @@ def connector( def test_from_section_raises_exception( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, connector_config: KafkaConnectorConfig, ): @@ -263,7 +263,7 @@ def test_clean_when_dry_run_is_false( def test_clean_without_to_when_dry_run_is_false( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, dry_run_handler: MagicMock, @@ -339,7 +339,7 @@ def test_clean_without_to_when_dry_run_is_false( def test_clean_without_to_when_dry_run_is_true( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, dry_run_handler: MagicMock, connector_config: KafkaConnectorConfig, diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 46eb9795d..458e01e22 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -5,7 +5,7 @@ from pytest_mock import MockerFixture from typing_extensions import override -from kpops.cli.pipeline_config import PipelineConfig +from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -28,8 +28,8 @@ class KubernetesTestValue(KubernetesAppConfig): class TestKubernetesApp: @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", helm_diff_config=HelmDiffConfig(), @@ -64,7 +64,7 @@ def repo_config(self) -> HelmRepoConfig: @pytest.fixture def kubernetes_app( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, app_value: KubernetesTestValue, repo_config: HelmRepoConfig, @@ -106,7 +106,7 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, mocker: MockerFixture, @@ -152,7 +152,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( def test_should_deploy_app_with_local_helm_chart( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, helm_mock: MagicMock, app_value: KubernetesTestValue, @@ -218,7 +218,7 @@ def test_should_call_helm_uninstall_when_destroying_kubernetes_app( def test_should_raise_value_error_when_name_is_not_valid( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, app_value: KubernetesTestValue, repo_config: HelmRepoConfig, diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index 56d52a68b..b8927c313 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -5,7 +5,7 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig +from kpops.cli.config import KpopsConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmUpgradeInstallFlags from kpops.components import ProducerApp @@ -30,8 +30,8 @@ def handlers(self) -> ComponentHandlers: ) @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", topic_name_config=TopicNameConfig( @@ -42,7 +42,7 @@ def config(self) -> PipelineConfig: @pytest.fixture def producer_app( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ) -> ProducerApp: return ProducerApp( name=self.PRODUCER_APP_NAME, @@ -65,7 +65,7 @@ def producer_app( }, ) - def test_output_topics(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_output_topics(self, config: KpopsConfig, handlers: ComponentHandlers): producer_app = ProducerApp( name=self.PRODUCER_APP_NAME, config=config, diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index dce2c7e96..14ecdda9a 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -4,7 +4,7 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig +from kpops.cli.config import KpopsConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -34,8 +34,8 @@ def handlers(self) -> ComponentHandlers: ) @pytest.fixture - def config(self) -> PipelineConfig: - return PipelineConfig( + def config(self) -> KpopsConfig: + return KpopsConfig( defaults_path=DEFAULTS_PATH, environment="development", topic_name_config=TopicNameConfig( @@ -47,7 +47,7 @@ def config(self) -> PipelineConfig: @pytest.fixture def streams_app( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ) -> StreamsApp: return StreamsApp( name=self.STREAMS_APP_NAME, @@ -68,7 +68,7 @@ def streams_app( }, ) - def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_set_topics(self, config: KpopsConfig, handlers: ComponentHandlers): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, config=config, @@ -113,7 +113,7 @@ def test_set_topics(self, config: PipelineConfig, handlers: ComponentHandlers): assert "extraInputPatterns" in streams_config def test_no_empty_input_topic( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -143,7 +143,7 @@ def test_no_empty_input_topic( assert "inputPattern" in streams_config assert "extraInputPatterns" not in streams_config - def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandlers): + def test_should_validate(self, config: KpopsConfig, handlers: ComponentHandlers): # An exception should be raised when both role and type are defined and type is input with pytest.raises(ValueError): StreamsApp( @@ -189,7 +189,7 @@ def test_should_validate(self, config: PipelineConfig, handlers: ComponentHandle ) def test_set_streams_output_from_to( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -228,7 +228,7 @@ def test_set_streams_output_from_to( assert streams_app.app.streams.error_topic == "${error_topic_name}" def test_weave_inputs_from_prev_component( - self, config: PipelineConfig, handlers: ComponentHandlers + self, config: KpopsConfig, handlers: ComponentHandlers ): streams_app = StreamsApp( name=self.STREAMS_APP_NAME, @@ -265,7 +265,7 @@ def test_weave_inputs_from_prev_component( def test_deploy_order_when_dry_run_is_false( self, - config: PipelineConfig, + config: KpopsConfig, handlers: ComponentHandlers, mocker: MockerFixture, ): diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index 8188a32c9..ebf474db5 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -4,8 +4,10 @@ topic_name_config: default_error_topic_name: "${component_name}-dead-letter-topic" default_output_topic_name: "${component_name}-test-topic" brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" -kafka_connect_url: "http://localhost:8083" -kafka_rest_url: "http://localhost:8082" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" schema_registry: enabled: true url: "http://localhost:8081" diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 630b046f6..451cb95d4 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -6,5 +6,7 @@ topic_name_config: default_output_topic_name: ${component_type}-output-topic helm_diff_config: enable: false -kafka_connect_url: "http://kafka_connect_url:8083" -kafka_rest_url: "http://kafka_rest_url:8082" +kafka_connect: + url: "http://kafka_connect_url:8083" +kafka_rest: + url: "http://kafka_rest_url:8082" diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index af9cde479..079a2c2e4 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -462,9 +462,8 @@ def test_default_config(self, snapshot: SnapshotTest): def test_env_vars_precedence_over_config( self, monkeypatch: MonkeyPatch, - snapshot: SnapshotTest, ): - monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker") + monkeypatch.setenv(name="KPOPS_BROKERS", value="env_broker") result = runner.invoke( app, From 77ec605f69690f7fcb92cf0dae0fd8e203ceb3a2 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Thu, 7 Sep 2023 14:16:39 +0200 Subject: [PATCH 10/20] add reviews --- docs/docs/schema/config.json | 8 ++++---- docs/docs/user/migration-guide/v2-v3.md | 2 +- kpops/cli/config.py | 4 ++-- .../kafka_connect/connect_wrapper.py | 10 ++++++++-- .../kafka_connect/kafka_connect_handler.py | 2 +- .../kafka_connect/test_connect_wrapper.py | 2 +- 6 files changed, 17 insertions(+), 11 deletions(-) diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 038a65d1d..d1199ade0 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -48,9 +48,9 @@ "url": { "default": "http://localhost:8083", "description": "Address of Kafka Connect.", - "env": "KPOPS__KAFKA_CONNECT_URL", + "env": "KPOPS_KAFKA_CONNECT_URL", "env_names": [ - "kpops__kafka_connect_url" + "kpops_kafka_connect_url" ], "format": "uri", "maxLength": 65536, @@ -69,9 +69,9 @@ "url": { "default": "http://localhost:8082", "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS__KAFKA_REST_URL", + "env": "KPOPS_KAFKA_REST_URL", "env_names": [ - "kpops__kafka_rest_url" + "kpops_kafka_rest_url" ], "format": "uri", "maxLength": 65536, diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index e67e8484a..f99eae86b 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -22,7 +22,7 @@ Your `config.yaml` will change to: - schema_registry_url: "http://my-custom-sr.url:8081" + kafka_rest: + url: "http://my-custom-rest.url:8082" -+ Kafka_connect: ++ kafka_connect: + url: "http://my-custom-connect.url:8083" + schema_registry: + enabled: true diff --git a/kpops/cli/config.py b/kpops/cli/config.py index 88b90684e..d48477959 100644 --- a/kpops/cli/config.py +++ b/kpops/cli/config.py @@ -50,7 +50,7 @@ class KafkaRestConfig(BaseSettings): # For validating URLs use parse_obj_as # https://github.com/pydantic/pydantic/issues/1106 default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), - env=f"{ENV_PREFIX}_KAFKA_REST_URL", + env=f"{ENV_PREFIX}KAFKA_REST_URL", description="Address of the Kafka REST Proxy.", ) @@ -60,7 +60,7 @@ class KafkaConnectConfig(BaseSettings): url: AnyHttpUrl = Field( default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), - env=f"{ENV_PREFIX}_KAFKA_CONNECT_URL", + env=f"{ENV_PREFIX}KAFKA_CONNECT_URL", description="Address of Kafka Connect.", ) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 07928c7c8..6ec62bf6e 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import logging import time from time import sleep +from typing import TYPE_CHECKING import httpx from pydantic import AnyHttpUrl @@ -15,6 +18,9 @@ KafkaConnectResponse, ) +if TYPE_CHECKING: + from kpops.cli.config import KpopsConfig + HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} log = logging.getLogger("KafkaConnectAPI") @@ -25,8 +31,8 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, url: AnyHttpUrl) -> None: - self._url: AnyHttpUrl = url + def __init__(self, config: KpopsConfig) -> None: + self._url: AnyHttpUrl = config.kafka_connect.url @property def url(self) -> AnyHttpUrl: diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 910fd7eeb..4bb58b5c2 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -138,6 +138,6 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: @classmethod def from_pipeline_config(cls, config: KpopsConfig) -> Self: return cls( - connect_wrapper=ConnectWrapper(url=config.kafka_connect.url), + connect_wrapper=ConnectWrapper(config=config), timeout=config.timeout, ) diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index f7f9668f9..9b0198392 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -31,7 +31,7 @@ def setup(self): defaults_path=DEFAULTS_PATH, environment="development", ) - self.connect_wrapper = ConnectWrapper(url=config.kafka_connect.url) + self.connect_wrapper = ConnectWrapper(config=config) @pytest.fixture def connector_config(self) -> KafkaConnectorConfig: From 6c3851613946beedfe898a43cd6545fe8311fc29 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Fri, 8 Sep 2023 09:32:15 +0200 Subject: [PATCH 11/20] change config --- kpops/cli/main.py | 2 +- .../kafka_connect/connect_wrapper.py | 6 +++--- .../kafka_connect/kafka_connect_handler.py | 2 +- kpops/component_handlers/topic/proxy_wrapper.py | 11 ++++++++--- .../kafka_connect/test_connect_wrapper.py | 2 +- tests/component_handlers/topic/test_proxy_wrapper.py | 2 +- 6 files changed, 15 insertions(+), 10 deletions(-) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index a37947612..d252c12a4 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -129,7 +129,7 @@ def setup_handlers( ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) connector_handler = KafkaConnectHandler.from_pipeline_config(config) - proxy_wrapper = ProxyWrapper(config) + proxy_wrapper = ProxyWrapper(config.kafka_rest) topic_handler = TopicHandler(proxy_wrapper) return ComponentHandlers(schema_handler, connector_handler, topic_handler) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 6ec62bf6e..b5a392862 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -19,7 +19,7 @@ ) if TYPE_CHECKING: - from kpops.cli.config import KpopsConfig + from kpops.cli.config import KafkaConnectConfig HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} @@ -31,8 +31,8 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, config: KpopsConfig) -> None: - self._url: AnyHttpUrl = config.kafka_connect.url + def __init__(self, kafka_connect_config: KafkaConnectConfig) -> None: + self._url: AnyHttpUrl = kafka_connect_config.url @property def url(self) -> AnyHttpUrl: diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 4bb58b5c2..6eb27c891 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -138,6 +138,6 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: @classmethod def from_pipeline_config(cls, config: KpopsConfig) -> Self: return cls( - connect_wrapper=ConnectWrapper(config=config), + connect_wrapper=ConnectWrapper(kafka_connect_config=config.kafka_connect), timeout=config.timeout, ) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index e4862d640..b7684c765 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -1,10 +1,12 @@ +from __future__ import annotations + import logging from functools import cached_property +from typing import TYPE_CHECKING import httpx from pydantic import AnyHttpUrl -from kpops.cli.config import KpopsConfig from kpops.component_handlers.topic.exception import ( KafkaRestProxyError, TopicNotFoundException, @@ -16,6 +18,9 @@ TopicSpec, ) +if TYPE_CHECKING: + from kpops.cli.config import KafkaRestConfig + log = logging.getLogger("KafkaRestProxy") HEADERS = {"Content-Type": "application/json"} @@ -26,8 +31,8 @@ class ProxyWrapper: Wraps Kafka REST Proxy APIs """ - def __init__(self, config: KpopsConfig) -> None: - self._url: AnyHttpUrl = config.kafka_rest.url + def __init__(self, kafka_rest_config: KafkaRestConfig) -> None: + self._url: AnyHttpUrl = kafka_rest_config.url @cached_property def cluster_id(self) -> str: diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 9b0198392..0a92bf3f3 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -31,7 +31,7 @@ def setup(self): defaults_path=DEFAULTS_PATH, environment="development", ) - self.connect_wrapper = ConnectWrapper(config=config) + self.connect_wrapper = ConnectWrapper(kafka_connect_config=config.kafka_connect) @pytest.fixture def connector_config(self) -> KafkaConnectorConfig: diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index a569a53a7..3f034d8c2 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -32,7 +32,7 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def setup(self, httpx_mock: HTTPXMock): config = KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") - self.proxy_wrapper = ProxyWrapper(config=config) + self.proxy_wrapper = ProxyWrapper(kafka_rest_config=config.kafka_rest) with open( DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json" From cd3019807bc14267f9fcef5cfa0fd42be622dae6 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Mon, 11 Sep 2023 10:01:36 +0200 Subject: [PATCH 12/20] change wrapper fields --- .../kafka_connect/connect_wrapper.py | 14 +++++++------- kpops/component_handlers/topic/proxy_wrapper.py | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index b5a392862..4120bf9e9 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -32,11 +32,11 @@ class ConnectWrapper: """ def __init__(self, kafka_connect_config: KafkaConnectConfig) -> None: - self._url: AnyHttpUrl = kafka_connect_config.url + self._config: KafkaConnectConfig = kafka_connect_config @property def url(self) -> AnyHttpUrl: - return self._url + return self._config.url def create_connector( self, connector_config: KafkaConnectorConfig @@ -50,7 +50,7 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._url}/connectors", headers=HEADERS, json=connect_data + url=f"{self._config.url}/connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -72,7 +72,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector """ response = httpx.get( - url=f"{self._url}/connectors/{connector_name}", headers=HEADERS + url=f"{self._config.url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -100,7 +100,7 @@ def update_connector_config( connector_name = connector_config.name config_json = connector_config.dict() response = httpx.put( - url=f"{self._url}/connectors/{connector_name}/config", + url=f"{self._config.url}/connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -130,7 +130,7 @@ def validate_connector_config( :return: """ response = httpx.put( - url=f"{self._url}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self._config.url}/connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, json=connector_config.dict(), ) @@ -157,7 +157,7 @@ def delete_connector(self, connector_name: str) -> None: API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)- """ response = httpx.delete( - url=f"{self._url}/connectors/{connector_name}", headers=HEADERS + url=f"{self._config.url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index b7684c765..0d1d852c7 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -32,7 +32,7 @@ class ProxyWrapper: """ def __init__(self, kafka_rest_config: KafkaRestConfig) -> None: - self._url: AnyHttpUrl = kafka_rest_config.url + self._config: KafkaRestConfig = kafka_rest_config @cached_property def cluster_id(self) -> str: @@ -45,7 +45,7 @@ def cluster_id(self) -> str: bootstrap.servers configuration. Therefore, only one Kafka cluster will be returned. :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._url}/v3/clusters") + response = httpx.get(url=f"{self._config.url}/v3/clusters") if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -54,7 +54,7 @@ def cluster_id(self) -> str: @property def url(self) -> AnyHttpUrl: - return self._url + return self._config.url def create_topic(self, topic_spec: TopicSpec) -> None: """ @@ -63,7 +63,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :param topic_spec: The topic specification. """ response = httpx.post( - url=f"{self._url}/v3/clusters/{self.cluster_id}/topics", + url=f"{self._config.url}/v3/clusters/{self.cluster_id}/topics", headers=HEADERS, json=topic_spec.dict(exclude_none=True), ) From 160b58e2b58e5519e0466d7b77eecd4c304f8416 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Mon, 11 Sep 2023 10:26:50 +0200 Subject: [PATCH 13/20] change brokers to kafka_brokers --- config.yaml | 2 +- .../pipeline-components/kafka-app.yaml | 2 +- .../pipeline-components/pipeline.yaml | 6 ++--- .../pipeline-components/producer-app.yaml | 2 +- .../sections/app-kafka-app.yaml | 2 +- .../sections/app-producer-app.yaml | 2 +- .../sections/app-streams-app.yaml | 2 +- .../pipeline-components/streams-app.yaml | 2 +- .../pipeline-defaults/defaults-kafka-app.yaml | 2 +- .../defaults-producer-app.yaml | 2 +- .../defaults-streams-app.yaml | 2 +- .../resources/pipeline-defaults/defaults.yaml | 6 ++--- .../resources/variables/config_env_vars.env | 4 +-- .../resources/variables/config_env_vars.md | 2 +- docs/docs/schema/config.json | 22 ++++++++-------- docs/docs/user/migration-guide/v2-v3.md | 25 ++++++++++++++++--- .../bakdata/atm-fraud-detection/config.yaml | 2 +- .../bakdata/atm-fraud-detection/defaults.yaml | 2 +- kpops/cli/config.py | 4 +-- .../base_components/kafka_connector.py | 2 +- tests/cli/test_handlers.py | 2 ++ tests/cli/test_kpops_config.py | 8 +++--- .../schema_handler/test_schema_handler.py | 2 +- tests/components/test_kafka_connector.py | 2 +- .../resources/custom-config/config.yaml | 2 +- tests/pipeline/resources/defaults.yaml | 2 +- .../kafka-connect-sink-config/config.yaml | 2 +- .../no-topics-defaults/defaults.yaml | 2 +- .../defaults.yaml | 2 +- .../pipeline-with-env-defaults/defaults.yaml | 2 +- .../pipeline-with-short-topics/defaults.yaml | 2 +- tests/pipeline/test_pipeline.py | 2 +- 32 files changed, 73 insertions(+), 52 deletions(-) diff --git a/config.yaml b/config.yaml index ba8b885cb..8fe8bb213 100644 --- a/config.yaml +++ b/config.yaml @@ -1,2 +1,2 @@ environment: development -brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" diff --git a/docs/docs/resources/pipeline-components/kafka-app.yaml b/docs/docs/resources/pipeline-components/kafka-app.yaml index 6d8045ad5..cdc49ef28 100644 --- a/docs/docs/resources/pipeline-components/kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/kafka-app.yaml @@ -49,7 +49,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/pipeline.yaml b/docs/docs/resources/pipeline-components/pipeline.yaml index 27c5d45c1..eb7930376 100644 --- a/docs/docs/resources/pipeline-components/pipeline.yaml +++ b/docs/docs/resources/pipeline-components/pipeline.yaml @@ -49,7 +49,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app @@ -275,7 +275,7 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: @@ -346,7 +346,7 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-components/producer-app.yaml b/docs/docs/resources/pipeline-components/producer-app.yaml index 7a01ad24b..5be3551d8 100644 --- a/docs/docs/resources/pipeline-components/producer-app.yaml +++ b/docs/docs/resources/pipeline-components/producer-app.yaml @@ -32,7 +32,7 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: diff --git a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml index 991e862e0..73b70c59e 100644 --- a/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-kafka-app.yaml @@ -2,7 +2,7 @@ # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml index 5cd9b000b..0cbe04ded 100644 --- a/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-producer-app.yaml @@ -2,7 +2,7 @@ # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: diff --git a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml index 44f6604aa..1c5f0849f 100644 --- a/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml +++ b/docs/docs/resources/pipeline-components/sections/app-streams-app.yaml @@ -4,7 +4,7 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-components/streams-app.yaml b/docs/docs/resources/pipeline-components/streams-app.yaml index 0dde5be5c..f77edf80c 100644 --- a/docs/docs/resources/pipeline-components/streams-app.yaml +++ b/docs/docs/resources/pipeline-components/streams-app.yaml @@ -51,7 +51,7 @@ app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml index e0af3b7a7..bd6c9e2d9 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-kafka-app.yaml @@ -7,7 +7,7 @@ kafka-app: # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app diff --git a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml index 1d81f5ced..bfa5521c4 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-producer-app.yaml @@ -10,7 +10,7 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: diff --git a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml index 83ff13f14..ae1adab98 100644 --- a/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults-streams-app.yaml @@ -9,7 +9,7 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/pipeline-defaults/defaults.yaml b/docs/docs/resources/pipeline-defaults/defaults.yaml index e74272bdc..3a43d81e7 100644 --- a/docs/docs/resources/pipeline-defaults/defaults.yaml +++ b/docs/docs/resources/pipeline-defaults/defaults.yaml @@ -7,7 +7,7 @@ kafka-app: # add the key-value pairs they need. app: # required streams: # required - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} nameOverride: override-with-this-name # kafka-app-specific imageTag: "1.0.0" # Example values that are shared between streams-app and producer-app @@ -170,7 +170,7 @@ producer-app: # https://github.com/bakdata/streams-bootstrap/tree/master/charts/producer-app app: # required streams: # required, producer-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} outputTopic: output_topic extraOutputTopics: @@ -188,7 +188,7 @@ streams-app: app: # required # Streams Bootstrap streams section streams: # required, streams-app-specific - brokers: ${brokers} # required + brokers: ${kafka_brokers} # required schemaRegistryUrl: ${schema_registry_url} inputTopics: - topic1 diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index bdb17fffe..77b2cbf73 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -9,9 +9,9 @@ # Suffix your environment files with this value (e.g. # defaults_development.yaml for environment=development). KPOPS_ENVIRONMENT # No default value, required -# brokers +# kafka_brokers # The comma separated Kafka brokers address. -KPOPS_BROKERS # No default value, required +KPOPS_KAFKA_BROKERS # No default value, required # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index c9d1e1343..b8473d7d7 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -4,6 +4,6 @@ These variables are a lower priority alternative to the settings in `config.yaml | Name |Default Value|Required| Description | Setting name | |-----------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| |KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_BROKERS | |True |The comma separated Kafka brokers address. |brokers | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | |KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | |KPOPS_RETAIN_CLEAN_JOBS|False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index d1199ade0..dce427c6e 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -87,16 +87,6 @@ "additionalProperties": false, "description": "Pipeline configuration unrelated to the components.", "properties": { - "brokers": { - "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_BROKERS", - "env_names": [ - "kpops_brokers" - ], - "example": "broker1:9092,broker2:9092,broker3:9092", - "title": "Brokers", - "type": "string" - }, "create_namespace": { "default": false, "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", @@ -168,6 +158,16 @@ ], "title": "Helm Diff Config" }, + "kafka_brokers": { + "description": "The comma separated Kafka brokers address.", + "env": "KPOPS_KAFKA_BROKERS", + "env_names": [ + "kpops_kafka_brokers" + ], + "example": "broker1:9092,broker2:9092,broker3:9092", + "title": "Kafka Brokers", + "type": "string" + }, "kafka_connect": { "allOf": [ { @@ -253,7 +253,7 @@ }, "required": [ "environment", - "brokers" + "kafka_brokers" ], "title": "KpopsConfig", "type": "object" diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index f99eae86b..e83610346 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -10,16 +10,19 @@ The breaking changes target the `config.yaml` file: - `kafka_connect_host` is replaced with `kafka_connect.url` (default `http://localhost:8083`). -The environment variable name of these config fields changed respectively. The environment variable `KPOPS_KAFKA_BROKERS` changed to `KPOPS_BROKERS`. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes. +- `brokers` is renamed to `kafka_brokers`. -Your `config.yaml` will change to: +The environment variable name of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes. + +#### config.yaml ```diff environment: development - brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +- brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" - kafka_rest_host: "http://my-custom-rest.url:8082" - kafka_connect_host: "http://my-custom-connect.url:8083" - schema_registry_url: "http://my-custom-sr.url:8081" ++ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" + kafka_rest: + url: "http://my-custom-rest.url:8082" + kafka_connect: @@ -28,3 +31,19 @@ Your `config.yaml` will change to: + enabled: true + url: "http://my-custom-sr.url:8081" ``` + +#### pipeline.yaml and default.yaml + +The variable is now called `kafka_brokers`. + +```diff +... + app: + streams: +- brokers: ${brokers} ++ brokers: ${kafka_brokers} + schemaRegistryUrl: ${schema_registry_url} + nameOverride: override-with-this-name + imageTag: "1.0.0" +... +``` diff --git a/examples/bakdata/atm-fraud-detection/config.yaml b/examples/bakdata/atm-fraud-detection/config.yaml index a31ab2d0a..d03a12c64 100644 --- a/examples/bakdata/atm-fraud-detection/config.yaml +++ b/examples/bakdata/atm-fraud-detection/config.yaml @@ -4,7 +4,7 @@ topic_name_config: default_error_topic_name: "${pipeline_name}-${component_name}-dead-letter-topic" default_output_topic_name: "${pipeline_name}-${component_name}-topic" -brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" schema_registry: enabled: true diff --git a/examples/bakdata/atm-fraud-detection/defaults.yaml b/examples/bakdata/atm-fraud-detection/defaults.yaml index 609933f13..e3ba49c67 100644 --- a/examples/bakdata/atm-fraud-detection/defaults.yaml +++ b/examples/bakdata/atm-fraud-detection/defaults.yaml @@ -10,7 +10,7 @@ kafka-connector: kafka-app: app: streams: - brokers: ${brokers} + brokers: ${kafka_brokers} schemaRegistryUrl: ${schema_registry_url} optimizeLeaveGroupBehavior: false diff --git a/kpops/cli/config.py b/kpops/cli/config.py index d48477959..1a9a42091 100644 --- a/kpops/cli/config.py +++ b/kpops/cli/config.py @@ -81,9 +81,9 @@ class KpopsConfig(BaseSettings): description="The environment you want to generate and deploy the pipeline to. " "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", ) - brokers: str = Field( + kafka_brokers: str = Field( default=..., - env=f"{ENV_PREFIX}BROKERS", + env=f"{ENV_PREFIX}KAFKA_BROKERS", description="The comma separated Kafka brokers address.", example="broker1:9092,broker2:9092,broker3:9092", ) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index c97cc987f..e5cab891f 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -266,7 +266,7 @@ def _get_kafka_connect_resetter_values( **KafkaConnectResetterValues( config=KafkaConnectResetterConfig( connector=connector_name, - brokers=self.config.brokers, + brokers=self.config.kafka_brokers, **kwargs, ), connector_type=connector_type.value, diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index 808c5c9c2..9e747d65c 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -20,6 +20,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(), + kafka_brokers="broker:9092", ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") connector_handler = KafkaConnectHandler.from_pipeline_config(config=config) @@ -54,6 +55,7 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): defaults_path=Path("fake"), environment="development", schema_registry=SchemaRegistryConfig(enabled=True), + kafka_brokers="broker:9092", ) schema_handler_mock = mocker.patch("kpops.cli.main.SchemaHandler") schema_handler = SchemaHandler.load_schema_handler(MODULE, config) diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 3988cc17b..a93e50fcd 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -13,7 +13,7 @@ def test_pipeline_config_with_default_values(): default_config = KpopsConfig( - environment="development", brokers="http://broker:9092" + environment="development", kafka_brokers="http://broker:9092" ) assert default_config.defaults_path == Path(".") @@ -43,7 +43,7 @@ def test_pipeline_config_with_different_invalid_urls(): with pytest.raises(ValidationError): KpopsConfig( environment="development", - brokers="http://broker:9092", + kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( url=parse_obj_as(AnyHttpUrl, "in-valid-host") ), @@ -52,14 +52,14 @@ def test_pipeline_config_with_different_invalid_urls(): with pytest.raises(ValidationError): KpopsConfig( environment="development", - brokers="http://broker:9092", + kafka_brokers="http://broker:9092", kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "in-valid-host")), ) with pytest.raises(ValidationError): KpopsConfig( environment="development", - brokers="http://broker:9092", + kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, url=parse_obj_as(AnyHttpUrl, "in-valid-host"), diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index b00ecdaea..1bc7f9e3c 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -73,7 +73,7 @@ def to_section(topic_config: TopicConfig) -> ToSection: def pipeline_config_with_sr_enabled() -> KpopsConfig: return KpopsConfig( environment="development", - brokers="broker:9092", + kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") ), diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index a533010d7..c2e3541eb 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -29,7 +29,7 @@ def config(self) -> KpopsConfig: default_error_topic_name="${component_type}-error-topic", default_output_topic_name="${component_type}-output-topic", ), - brokers="broker:9092", + kafka_brokers="broker:9092", helm_diff_config=HelmDiffConfig(), ) diff --git a/tests/pipeline/resources/custom-config/config.yaml b/tests/pipeline/resources/custom-config/config.yaml index ebf474db5..8a9ca81c3 100644 --- a/tests/pipeline/resources/custom-config/config.yaml +++ b/tests/pipeline/resources/custom-config/config.yaml @@ -3,7 +3,7 @@ defaults_path: ../no-topics-defaults topic_name_config: default_error_topic_name: "${component_name}-dead-letter-topic" default_output_topic_name: "${component_name}-test-topic" -brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" kafka_connect: url: "http://localhost:8083" kafka_rest: diff --git a/tests/pipeline/resources/defaults.yaml b/tests/pipeline/resources/defaults.yaml index c4e2aa259..e1223203b 100644 --- a/tests/pipeline/resources/defaults.yaml +++ b/tests/pipeline/resources/defaults.yaml @@ -5,7 +5,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${brokers}" + brokers: "${kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml index 451cb95d4..14c488c5f 100644 --- a/tests/pipeline/resources/kafka-connect-sink-config/config.yaml +++ b/tests/pipeline/resources/kafka-connect-sink-config/config.yaml @@ -1,6 +1,6 @@ environment: development defaults_path: .. -brokers: "broker:9092" +kafka_brokers: "broker:9092" topic_name_config: default_error_topic_name: ${component_type}-error-topic default_output_topic_name: ${component_type}-output-topic diff --git a/tests/pipeline/resources/no-topics-defaults/defaults.yaml b/tests/pipeline/resources/no-topics-defaults/defaults.yaml index 47de626e6..87d21d47d 100644 --- a/tests/pipeline/resources/no-topics-defaults/defaults.yaml +++ b/tests/pipeline/resources/no-topics-defaults/defaults.yaml @@ -1,7 +1,7 @@ kafka-app: app: streams: - brokers: "${brokers}" + brokers: "${kafka_brokers}" schemaRegistryUrl: "${schema_registry_url}" producer-app: diff --git a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml index dfbe23db9..c67f869d9 100644 --- a/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml +++ b/tests/pipeline/resources/pipeline-component-should-have-prefix/defaults.yaml @@ -7,5 +7,5 @@ kubernetes-app: kafka-app: app: streams: - brokers: ${brokers} + brokers: ${kafka_brokers} schemaRegistryUrl: ${schema_registry_url} diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index 2564e0012..77d666b1e 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -4,7 +4,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${brokers}" + brokers: "${kafka_brokers}" schemaRegistryUrl: "${schema_registry_url}" producer-app: {} # inherits from kafka-app diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml index 00b3b2673..3b9e93eb7 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml @@ -5,7 +5,7 @@ kubernetes-app: kafka-app: app: streams: - brokers: "${broker}" + brokers: "${kafka_brokers}" schema_registry_url: "${schema_registry_url}" version: "2.4.2" diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 079a2c2e4..42992bbb7 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -463,7 +463,7 @@ def test_env_vars_precedence_over_config( self, monkeypatch: MonkeyPatch, ): - monkeypatch.setenv(name="KPOPS_BROKERS", value="env_broker") + monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker") result = runner.invoke( app, From 236639a945c0f28ffcf9bbc602d9a8596ebf57f5 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Mon, 11 Sep 2023 10:31:25 +0200 Subject: [PATCH 14/20] clean ups --- .../kafka_connect/connect_wrapper.py | 10 +++++----- kpops/component_handlers/topic/proxy_wrapper.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 4120bf9e9..9f7e3941c 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -50,7 +50,7 @@ def create_connector( config_json = connector_config.dict() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self._config.url}/connectors", headers=HEADERS, json=connect_data + url=f"{self.url}/connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -72,7 +72,7 @@ def get_connector(self, connector_name: str) -> KafkaConnectResponse: :return: Information about the connector """ response = httpx.get( - url=f"{self._config.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -100,7 +100,7 @@ def update_connector_config( connector_name = connector_config.name config_json = connector_config.dict() response = httpx.put( - url=f"{self._config.url}/connectors/{connector_name}/config", + url=f"{self.url}/connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -130,7 +130,7 @@ def validate_connector_config( :return: """ response = httpx.put( - url=f"{self._config.url}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self.url}/connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, json=connector_config.dict(), ) @@ -157,7 +157,7 @@ def delete_connector(self, connector_name: str) -> None: API Reference:https://docs.confluent.io/platform/current/connect/references/restapi.html#delete--connectors-(string-name)- """ response = httpx.delete( - url=f"{self._config.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}/connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 0d1d852c7..ceba48a85 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -63,7 +63,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :param topic_spec: The topic specification. """ response = httpx.post( - url=f"{self._config.url}/v3/clusters/{self.cluster_id}/topics", + url=f"{self.url}/v3/clusters/{self.cluster_id}/topics", headers=HEADERS, json=topic_spec.dict(exclude_none=True), ) From bcdbfed83b5285267d31402c556e3f7d29207f29 Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Mon, 11 Sep 2023 10:38:09 +0200 Subject: [PATCH 15/20] fix path to defaults --- docs/docs/resources/examples/defaults.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/resources/examples/defaults.md b/docs/docs/resources/examples/defaults.md index a8740485e..abab9cc8f 100644 --- a/docs/docs/resources/examples/defaults.md +++ b/docs/docs/resources/examples/defaults.md @@ -20,7 +20,7 @@ ??? example "defaults.yaml" ```yaml --8<-- - https://raw.githubusercontent.com/bakdata/kpops-examples/main/word-count/deployment/kpops/defaults/defaults.yaml + https://raw.githubusercontent.com/bakdata/kpops-examples/main/word-count/deployment/kpops/defaults.yaml --8<-- ``` From 37b3cb4b65771634d46f3b938da7d9f35116595c Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Mon, 11 Sep 2023 16:07:15 +0200 Subject: [PATCH 16/20] add reviews --- docs/docs/developer/auto-generation.md | 2 +- hooks/gen_docs/gen_docs_env_vars.py | 4 +- kpops/cli/main.py | 38 +++++++++---------- .../kafka_connect/connect_wrapper.py | 4 +- .../kafka_connect/kafka_connect_handler.py | 4 +- .../schema_handler/schema_handler.py | 9 ++--- .../component_handlers/topic/proxy_wrapper.py | 4 +- tests/cli/test_handlers.py | 12 +++--- tests/cli/test_kpops_config.py | 4 +- .../kafka_connect/test_connect_wrapper.py | 2 +- .../schema_handler/test_schema_handler.py | 34 ++++++++--------- .../topic/test_proxy_wrapper.py | 2 +- 12 files changed, 58 insertions(+), 61 deletions(-) diff --git a/docs/docs/developer/auto-generation.md b/docs/docs/developer/auto-generation.md index 249f52b77..b87cbcad0 100644 --- a/docs/docs/developer/auto-generation.md +++ b/docs/docs/developer/auto-generation.md @@ -10,7 +10,7 @@ Auto generation happens mostly with [`pre-commit`](https://pre-commit.com/) hook - `cli_env_vars.env` -- All CLI environment variables in a `dotenv` file. - `cli_env_vars.md` -- All CLI environment variables in a table. -- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`PipelineConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/pipeline_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it. +- `config_env_vars.env` -- Almost all pipeline config environment variables in a `dotenv` file. The script checks for each field in [`PipelineConfig`](https://github.com/bakdata/kpops/blob/main/kpops/cli/kpops_config.py) whether it has an `env` attribute defined. The script is currently unable to visit the classes of fields like `topic_name_config`, hence any environment variables defined there would remain unknown to it. - `config_env_vars.env` -- Almost all pipeline config environment variables in a table. - `variable_substitution.yaml` -- A copy of `./tests/pipeline/resources/component-type-substitution/pipeline.yaml` used as an example of substitution. diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index ab034d8bf..5e4464c39 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -239,7 +239,7 @@ def write_csv_to_md_file( writer.dump(output=f) -def __fill_csv_pipeline_config(target: Path) -> None: +def __fill_csv_kpops_config(target: Path) -> None: """Append all ``PipelineConfig``-related env vars to a ``.csv`` file. Finds all ``PipelineConfig``-related env vars and appends them to @@ -373,7 +373,7 @@ def gen_vars( + DESCRIPTION_CONFIG_ENV_VARS, columns=list(EnvVarAttrs.values()), description_md_file=DESCRIPTION_CONFIG_ENV_VARS, - variable_extraction_function=__fill_csv_pipeline_config, + variable_extraction_function=__fill_csv_kpops_config, ) # Find all cli-related env variables, write them into a file gen_vars( diff --git a/kpops/cli/main.py b/kpops/cli/main.py index d252c12a4..d643e60b9 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -111,16 +111,16 @@ def setup_pipeline( pipeline_base_dir: Path, pipeline_path: Path, components_module: str | None, - pipeline_config: KpopsConfig, + kpops_config: KpopsConfig, ) -> Pipeline: registry = Registry() if components_module: registry.find_components(components_module) registry.find_components("kpops.components") - handlers = setup_handlers(components_module, pipeline_config) + handlers = setup_handlers(components_module, kpops_config) return Pipeline.load_from_yaml( - pipeline_base_dir, pipeline_path, registry, pipeline_config, handlers + pipeline_base_dir, pipeline_path, registry, kpops_config, handlers ) @@ -128,7 +128,7 @@ def setup_handlers( components_module: str | None, config: KpopsConfig ) -> ComponentHandlers: schema_handler = SchemaHandler.load_schema_handler(components_module, config) - connector_handler = KafkaConnectHandler.from_pipeline_config(config) + connector_handler = KafkaConnectHandler.from_kpops_config(config) proxy_wrapper = ProxyWrapper(config.kafka_rest) topic_handler = TopicHandler(proxy_wrapper) @@ -191,17 +191,17 @@ def log_action(action: str, pipeline_component: PipelineComponent): log.info("\n") -def create_pipeline_config( +def create_kpops_config( config: Path, defaults: Optional[Path], verbose: bool ) -> KpopsConfig: setup_logging_level(verbose) KpopsConfig.Config.config_path = config if defaults: - pipeline_config = KpopsConfig(defaults_path=defaults) + kpops_config = KpopsConfig(defaults_path=defaults) else: - pipeline_config = KpopsConfig() - pipeline_config.defaults_path = config.parent / pipeline_config.defaults_path - return pipeline_config + kpops_config = KpopsConfig() + kpops_config.defaults_path = config.parent / kpops_config.defaults_path + return kpops_config @app.command( # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 @@ -248,9 +248,9 @@ def generate( filter_type: FilterType = FILTER_TYPE, verbose: bool = VERBOSE_OPTION, ) -> Pipeline: - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) if not template: @@ -283,9 +283,9 @@ def deploy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) @@ -308,9 +308,9 @@ def destroy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -332,9 +332,9 @@ def reset( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: @@ -357,9 +357,9 @@ def clean( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - pipeline_config = create_pipeline_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose) pipeline = setup_pipeline( - pipeline_base_dir, pipeline_path, components_module, pipeline_config + pipeline_base_dir, pipeline_path, components_module, kpops_config ) pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) for component in pipeline_steps: diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 9f7e3941c..eb76e986d 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -31,8 +31,8 @@ class ConnectWrapper: Wraps Kafka Connect APIs """ - def __init__(self, kafka_connect_config: KafkaConnectConfig) -> None: - self._config: KafkaConnectConfig = kafka_connect_config + def __init__(self, config: KafkaConnectConfig) -> None: + self._config: KafkaConnectConfig = config @property def url(self) -> AnyHttpUrl: diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 6eb27c891..5025bd21b 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -136,8 +136,8 @@ def __dry_run_connector_deletion(self, connector_name: str) -> None: ) @classmethod - def from_pipeline_config(cls, config: KpopsConfig) -> Self: + def from_kpops_config(cls, config: KpopsConfig) -> Self: return cls( - connect_wrapper=ConnectWrapper(kafka_connect_config=config.kafka_connect), + connect_wrapper=ConnectWrapper(config.kafka_connect), timeout=config.timeout, ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index e46fb1eff..ef1b34b0f 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -23,11 +23,11 @@ class SchemaHandler: def __init__( self, - pipeline_config: KpopsConfig, + kpops_config: KpopsConfig, components_module: str | None, ) -> None: self.schema_registry_client = SchemaRegistryClient( - pipeline_config.schema_registry.url + kpops_config.schema_registry.url ) self.components_module = components_module @@ -51,10 +51,7 @@ def load_schema_handler( cls, components_module: str | None, config: KpopsConfig ) -> SchemaHandler | None: if config.schema_registry.enabled: - return cls( - pipeline_config=config, - components_module=components_module, - ) + return cls(config, components_module) return None def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index ceba48a85..421c15b56 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -31,8 +31,8 @@ class ProxyWrapper: Wraps Kafka REST Proxy APIs """ - def __init__(self, kafka_rest_config: KafkaRestConfig) -> None: - self._config: KafkaRestConfig = kafka_rest_config + def __init__(self, config: KafkaRestConfig) -> None: + self._config: KafkaRestConfig = config @cached_property def cluster_id(self) -> str: diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index 9e747d65c..da1aa736c 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -23,8 +23,8 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): kafka_brokers="broker:9092", ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") - connector_handler = KafkaConnectHandler.from_pipeline_config(config=config) - connector_handler_mock.from_pipeline_config.return_value = connector_handler + connector_handler = KafkaConnectHandler.from_kpops_config(config) + connector_handler_mock.from_kpops_config.return_value = connector_handler topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler") wrapper = mocker.patch("kpops.cli.main.ProxyWrapper") @@ -39,7 +39,7 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): actual_handlers = setup_handlers(MODULE, config) - connector_handler_mock.from_pipeline_config.assert_called_once_with(config) + connector_handler_mock.from_kpops_config.assert_called_once_with(config) assert actual_handlers.schema_handler == expected.schema_handler assert actual_handlers.connector_handler == expected.connector_handler @@ -62,8 +62,8 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): schema_handler_mock.load_schema_handler.return_value = schema_handler connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") - connector_handler = KafkaConnectHandler.from_pipeline_config(config=config) - connector_handler_mock.from_pipeline_config.return_value = connector_handler + connector_handler = KafkaConnectHandler.from_kpops_config(config) + connector_handler_mock.from_kpops_config.return_value = connector_handler topic_handler_mock = mocker.patch("kpops.cli.main.TopicHandler") wrapper = mocker.patch("kpops.cli.main.ProxyWrapper") @@ -80,7 +80,7 @@ def test_set_up_handlers_with_schema_handler(mocker: MockerFixture): schema_handler_mock.load_schema_handler.assert_called_once_with(MODULE, config) - connector_handler_mock.from_pipeline_config.assert_called_once_with(config) + connector_handler_mock.from_kpops_config.assert_called_once_with(config) assert actual_handlers.schema_handler == expected.schema_handler assert actual_handlers.connector_handler == expected.connector_handler diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index a93e50fcd..4ee51da15 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -11,7 +11,7 @@ ) -def test_pipeline_config_with_default_values(): +def test_kpops_config_with_default_values(): default_config = KpopsConfig( environment="development", kafka_brokers="http://broker:9092" ) @@ -39,7 +39,7 @@ def test_pipeline_config_with_default_values(): assert default_config.retain_clean_jobs is False -def test_pipeline_config_with_different_invalid_urls(): +def test_kpops_config_with_different_invalid_urls(): with pytest.raises(ValidationError): KpopsConfig( environment="development", diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 0a92bf3f3..3a93397f8 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -31,7 +31,7 @@ def setup(self): defaults_path=DEFAULTS_PATH, environment="development", ) - self.connect_wrapper = ConnectWrapper(kafka_connect_config=config.kafka_connect) + self.connect_wrapper = ConnectWrapper(config.kafka_connect) @pytest.fixture def connector_config(self) -> KafkaConnectorConfig: diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 1bc7f9e3c..4172f15d8 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -70,7 +70,7 @@ def to_section(topic_config: TopicConfig) -> ToSection: @pytest.fixture() -def pipeline_config_with_sr_enabled() -> KpopsConfig: +def kpops_config_with_sr_enabled() -> KpopsConfig: return KpopsConfig( environment="development", kafka_brokers="broker:9092", @@ -124,10 +124,10 @@ def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): def test_should_raise_value_error_if_schema_provider_class_not_found( - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=NON_EXISTING_PROVIDER_MODULE, ) @@ -175,10 +175,10 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -197,10 +197,10 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -220,11 +220,11 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl topic_config: TopicConfig, to_section: ToSection, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -262,11 +262,11 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve log_info_mock: MagicMock, log_debug_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" @@ -298,13 +298,13 @@ def test_should_submit_non_existing_schema_when_not_dry( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_provider = TestSchemaProvider() schema_class = "com.bakdata.kpops.test.SchemaHandlerTest" schema = schema_provider.provide_schema(schema_class, {}) schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -327,10 +327,10 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) @@ -348,10 +348,10 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( def test_should_delete_schemas_when_not_in_dry_run( to_section: ToSection, schema_registry_mock: MagicMock, - pipeline_config_with_sr_enabled: KpopsConfig, + kpops_config_with_sr_enabled: KpopsConfig, ): schema_handler = SchemaHandler( - pipeline_config=pipeline_config_with_sr_enabled, + kpops_config=kpops_config_with_sr_enabled, components_module=TEST_SCHEMA_PROVIDER_MODULE, ) diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 3f034d8c2..553f7a4a8 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -32,7 +32,7 @@ def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) def setup(self, httpx_mock: HTTPXMock): config = KpopsConfig(defaults_path=DEFAULTS_PATH, environment="development") - self.proxy_wrapper = ProxyWrapper(kafka_rest_config=config.kafka_rest) + self.proxy_wrapper = ProxyWrapper(config.kafka_rest) with open( DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json" From 1707ed04c9316575d7821edc1e9236a99ad0220e Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 27 Sep 2023 14:49:41 +0200 Subject: [PATCH 17/20] Update files --- kpops/cli/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/cli/config.py b/kpops/cli/config.py index 1a9a42091..0097ef5e0 100644 --- a/kpops/cli/config.py +++ b/kpops/cli/config.py @@ -134,7 +134,7 @@ class Config(BaseConfig): config_path = Path("config.yaml") env_file = ".env" env_file_encoding = "utf-8" - env_prefix = f"{ENV_PREFIX}" + env_prefix = ENV_PREFIX @classmethod def customise_sources( From 7a00f03fbb79c2e6ce9bfca9dabdd5a510cfc34c Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 27 Sep 2023 14:56:00 +0200 Subject: [PATCH 18/20] use config fixture --- .../schema_handler/test_schema_handler.py | 44 ++++++++----------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 4172f15d8..13dce3c63 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -1,5 +1,4 @@ import json -from pathlib import Path from unittest import mock from unittest.mock import MagicMock @@ -80,19 +79,15 @@ def kpops_config_with_sr_enabled() -> KpopsConfig: ) -def test_load_schema_handler(): - config_enable = KpopsConfig( - defaults_path=Path("fake"), - environment="development", - schema_registry=SchemaRegistryConfig(enabled=True), - ) - +def test_load_schema_handler(kpops_config_with_sr_enabled: KpopsConfig): assert isinstance( - SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_enable), + SchemaHandler.load_schema_handler( + TEST_SCHEMA_PROVIDER_MODULE, kpops_config_with_sr_enabled + ), SchemaHandler, ) - config_disable = config_enable.copy() + config_disable = kpops_config_with_sr_enabled.copy() config_disable.schema_registry = SchemaRegistryConfig(enabled=False) assert ( @@ -101,14 +96,11 @@ def test_load_schema_handler(): ) -def test_should_lazy_load_schema_provider(find_class_mock: MagicMock): - config_enable = KpopsConfig( - defaults_path=Path("fake"), - environment="development", - schema_registry=SchemaRegistryConfig(enabled=True), - ) +def test_should_lazy_load_schema_provider( + find_class_mock: MagicMock, kpops_config_with_sr_enabled: KpopsConfig +): schema_handler = SchemaHandler.load_schema_handler( - TEST_SCHEMA_PROVIDER_MODULE, config_enable + TEST_SCHEMA_PROVIDER_MODULE, kpops_config_with_sr_enabled ) assert schema_handler is not None @@ -144,22 +136,22 @@ def test_should_raise_value_error_if_schema_provider_class_not_found( ) -def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty(): - config_enable = KpopsConfig( - defaults_path=Path("fake"), - environment="development", - schema_registry=SchemaRegistryConfig(enabled=True), - ) - +def test_should_raise_value_error_when_schema_provider_is_called_and_components_module_is_empty( + kpops_config_with_sr_enabled: KpopsConfig, +): with pytest.raises(ValueError): - schema_handler = SchemaHandler.load_schema_handler(None, config_enable) + schema_handler = SchemaHandler.load_schema_handler( + None, kpops_config_with_sr_enabled + ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( "com.bakdata.kpops.test.SchemaHandlerTest", {} ) with pytest.raises(ValueError) as value_error: - schema_handler = SchemaHandler.load_schema_handler("", config_enable) + schema_handler = SchemaHandler.load_schema_handler( + "", kpops_config_with_sr_enabled + ) assert schema_handler is not None schema_handler.schema_provider.provide_schema( "com.bakdata.kpops.test.SchemaHandlerTest", {} From 1a6907f578d6ec39ca82009bb10505171caf261d Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Wed, 27 Sep 2023 14:59:06 +0200 Subject: [PATCH 19/20] Update files --- docs/docs/user/migration-guide/v2-v3.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index 724e0b43c..c45637c05 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -46,6 +46,7 @@ The variable is now called `kafka_brokers`. nameOverride: override-with-this-name imageTag: "1.0.0" ... +``` ## [Move GitHub action to repsitory root](https://github.com/bakdata/kpops/pull/356) From 636a85d0cfd19a10fd68c0931ca3711d4a8a250b Mon Sep 17 00:00:00 2001 From: Ramin Gharib Date: Fri, 29 Sep 2023 11:14:31 +0200 Subject: [PATCH 20/20] Add reviews --- docs/docs/schema/config.json | 4 ++-- docs/docs/user/migration-guide/v2-v3.md | 8 ++++---- hooks/gen_docs/gen_docs_env_vars.py | 2 +- kpops/cli/main.py | 2 +- .../kafka_connect/connect_wrapper.py | 2 +- .../kafka_connect/kafka_connect_handler.py | 2 +- .../schema_handler/schema_handler.py | 2 +- kpops/component_handlers/topic/proxy_wrapper.py | 2 +- .../base_components/base_defaults_component.py | 2 +- kpops/{cli => }/config.py | 17 +++++++++-------- kpops/pipeline_generator/pipeline.py | 2 +- kpops/utils/gen_schema.py | 2 +- tests/cli/test_handlers.py | 3 +-- tests/cli/test_kpops_config.py | 8 ++++---- tests/compiler/test_pipeline_name.py | 2 +- .../kafka_connect/test_connect_wrapper.py | 2 +- .../schema_handler/test_schema_handler.py | 2 +- .../topic/test_proxy_wrapper.py | 2 +- .../components/test_base_defaults_component.py | 2 +- tests/components/test_kafka_app.py | 2 +- tests/components/test_kafka_connector.py | 2 +- tests/components/test_kafka_sink_connector.py | 2 +- tests/components/test_kafka_source_connector.py | 2 +- tests/components/test_kubernetes_app.py | 2 +- tests/components/test_producer_app.py | 2 +- tests/components/test_streams_app.py | 2 +- 26 files changed, 41 insertions(+), 41 deletions(-) rename kpops/{cli => }/config.py (91%) diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index dce427c6e..a0841dae3 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -264,7 +264,7 @@ "properties": { "enabled": { "default": false, - "description": "If the Schema Registry handler should be initialized.", + "description": "Whether the Schema Registry handler should be initialized.", "env_names": [ "enabled" ], @@ -290,7 +290,7 @@ }, "TopicNameConfig": { "additionalProperties": false, - "description": "Configures topic names.", + "description": "Configure the topic name variables you can use in the pipeline definition.", "properties": { "default_error_topic_name": { "default": "${pipeline_name}-${component_name}-error", diff --git a/docs/docs/user/migration-guide/v2-v3.md b/docs/docs/user/migration-guide/v2-v3.md index c45637c05..def10c0f0 100644 --- a/docs/docs/user/migration-guide/v2-v3.md +++ b/docs/docs/user/migration-guide/v2-v3.md @@ -1,10 +1,10 @@ # Migrate from V2 to V3 -## [Make Kafka rest proxy & connect hosts default and improve schema registry config](https://github.com/bakdata/kpops/pull/354) +## [Make Kafka REST Proxy & Kafka Connect hosts default and improve Schema Registry config](https://github.com/bakdata/kpops/pull/354) The breaking changes target the `config.yaml` file: -- The `schema_registry_url` is replaced with `schema_registry.enabled` (default `false`) and `schema_registry.url` (default `http://localhost:8081`). +- The `schema_registry_url` is replaced with `schema_registry.url` (default `http://localhost:8081`) and `schema_registry.enabled` (default `false`). - `kafka_rest_host` is renamed to `kafka_rest.url` (default `http://localhost:8082`). @@ -12,7 +12,7 @@ The breaking changes target the `config.yaml` file: - `brokers` is renamed to `kafka_brokers`. -The environment variable name of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes. +The environment variable names of these config fields changed respectively. Please refer to the [environment variables documentation page](../core-concepts/variables/environment_variables.md) to see the newest changes. #### config.yaml @@ -50,7 +50,7 @@ The variable is now called `kafka_brokers`. ## [Move GitHub action to repsitory root](https://github.com/bakdata/kpops/pull/356) -The location of the GitHub action has changed and it's now available directly as `bakdata/kpops`. +The location of the GitHub action has changed, and it's now available directly as `bakdata/kpops`. You'll need to change it in your GitHub CI workflows. diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 7aed52963..24106e18f 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -21,7 +21,7 @@ from hooks import PATH_ROOT from hooks.gen_docs import IterableStrEnum from kpops.cli import main -from kpops.cli.config import KpopsConfig +from kpops.config import KpopsConfig PATH_DOCS_RESOURCES = PATH_ROOT / "docs/docs/resources" PATH_DOCS_VARIABLES = PATH_DOCS_RESOURCES / "variables" diff --git a/kpops/cli/main.py b/kpops/cli/main.py index d643e60b9..f689231af 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -9,7 +9,6 @@ import typer from kpops import __version__ -from kpops.cli.config import ENV_PREFIX, KpopsConfig from kpops.cli.custom_formatter import CustomFormatter from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers @@ -19,6 +18,7 @@ from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper +from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index eb76e986d..aa1918a43 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -19,7 +19,7 @@ ) if TYPE_CHECKING: - from kpops.cli.config import KafkaConnectConfig + from kpops.config import KafkaConnectConfig HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 5025bd21b..7e3d798fe 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -19,7 +19,7 @@ from typing_extensions import Self if TYPE_CHECKING: - from kpops.cli.config import KpopsConfig + from kpops.config import KpopsConfig log = logging.getLogger("KafkaConnectHandler") diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index ef1b34b0f..4b21083de 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -7,7 +7,6 @@ from schema_registry.client import SchemaRegistryClient from schema_registry.client.schema import AvroSchema -from kpops.cli.config import KpopsConfig from kpops.cli.exception import ClassNotFoundError from kpops.cli.registry import find_class from kpops.component_handlers.schema_handler.schema_provider import ( @@ -15,6 +14,7 @@ SchemaProvider, ) from kpops.components.base_components.models.to_section import ToSection +from kpops.config import KpopsConfig from kpops.utils.colorify import greenify, magentaify log = logging.getLogger("SchemaHandler") diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 421c15b56..407dcfcd8 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -19,7 +19,7 @@ ) if TYPE_CHECKING: - from kpops.cli.config import KafkaRestConfig + from kpops.config import KafkaRestConfig log = logging.getLogger("KafkaRestProxy") diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 9dff5c6df..545813f53 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -9,8 +9,8 @@ import typer from pydantic import BaseModel, Field -from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers +from kpops.config import KpopsConfig from kpops.utils import cached_classproperty from kpops.utils.dict_ops import update_nested from kpops.utils.docstring import describe_attr diff --git a/kpops/cli/config.py b/kpops/config.py similarity index 91% rename from kpops/cli/config.py rename to kpops/config.py index 0097ef5e0..95193bd53 100644 --- a/kpops/cli/config.py +++ b/kpops/config.py @@ -6,6 +6,7 @@ from pydantic import AnyHttpUrl, BaseConfig, BaseSettings, Field, parse_obj_as from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig +from kpops.utils.docstring import describe_object from kpops.utils.yaml_loading import load_yaml_file if TYPE_CHECKING: @@ -17,7 +18,7 @@ class TopicNameConfig(BaseSettings): - """Configures topic names.""" + """Configure the topic name variables you can use in the pipeline definition.""" default_output_topic_name: str = Field( default="${pipeline_name}-${component_name}", @@ -34,9 +35,11 @@ class SchemaRegistryConfig(BaseSettings): enabled: bool = Field( default=False, - description="If the Schema Registry handler should be initialized.", + description="Whether the Schema Registry handler should be initialized.", ) url: AnyHttpUrl = Field( + # For validating URLs use parse_obj_as + # https://github.com/pydantic/pydantic/issues/1106 default=parse_obj_as(AnyHttpUrl, "http://localhost:8081"), env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", description="Address of the Schema Registry.", @@ -47,8 +50,6 @@ class KafkaRestConfig(BaseSettings): """Configuration for Kafka REST Proxy.""" url: AnyHttpUrl = Field( - # For validating URLs use parse_obj_as - # https://github.com/pydantic/pydantic/issues/1106 default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), env=f"{ENV_PREFIX}KAFKA_REST_URL", description="Address of the Kafka REST Proxy.", @@ -93,19 +94,19 @@ class KpopsConfig(BaseSettings): ) topic_name_config: TopicNameConfig = Field( default=TopicNameConfig(), - description="Configure the topic name variables you can use in the pipeline definition.", + description=describe_object(TopicNameConfig.__doc__), ) schema_registry: SchemaRegistryConfig = Field( default=SchemaRegistryConfig(), - description="Configuration for Schema Registry.", + description=describe_object(SchemaRegistryConfig.__doc__), ) kafka_rest: KafkaRestConfig = Field( default=KafkaRestConfig(), - description="Configuration for Kafka REST Proxy.", + description=describe_object(KafkaRestConfig.__doc__), ) kafka_connect: KafkaConnectConfig = Field( default=KafkaConnectConfig(), - description="Configuration for Kafka Connect.", + description=describe_object(KafkaConnectConfig.__doc__), ) timeout: int = Field( default=300, diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 5c3f5ef8c..ce5b698cc 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -12,10 +12,10 @@ from rich.console import Console from rich.syntax import Syntax -from kpops.cli.config import KpopsConfig from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.config import KpopsConfig from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index b9ecf55d4..571a82a7d 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -8,9 +8,9 @@ from pydantic.fields import FieldInfo, ModelField from pydantic.schema import SkipField -from kpops.cli.config import KpopsConfig from kpops.cli.registry import _find_classes from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.config import KpopsConfig from kpops.utils.docstring import describe_object diff --git a/tests/cli/test_handlers.py b/tests/cli/test_handlers.py index da1aa736c..40c496497 100644 --- a/tests/cli/test_handlers.py +++ b/tests/cli/test_handlers.py @@ -2,7 +2,6 @@ from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig, SchemaRegistryConfig from kpops.cli.main import setup_handlers from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( @@ -10,6 +9,7 @@ ) from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler +from kpops.config import KpopsConfig, SchemaRegistryConfig from tests.cli.resources.module import CustomSchemaProvider MODULE = CustomSchemaProvider.__module__ @@ -19,7 +19,6 @@ def test_set_up_handlers_with_no_schema_handler(mocker: MockerFixture): config = KpopsConfig( defaults_path=Path("fake"), environment="development", - schema_registry=SchemaRegistryConfig(), kafka_brokers="broker:9092", ) connector_handler_mock = mocker.patch("kpops.cli.main.KafkaConnectHandler") diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 4ee51da15..254a2d73a 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -3,7 +3,7 @@ import pytest from pydantic import AnyHttpUrl, ValidationError, parse_obj_as -from kpops.cli.config import ( +from kpops.config import ( KafkaConnectConfig, KafkaRestConfig, KpopsConfig, @@ -45,7 +45,7 @@ def test_kpops_config_with_different_invalid_urls(): environment="development", kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( - url=parse_obj_as(AnyHttpUrl, "in-valid-host") + url=parse_obj_as(AnyHttpUrl, "invalid-host") ), ) @@ -53,7 +53,7 @@ def test_kpops_config_with_different_invalid_urls(): KpopsConfig( environment="development", kafka_brokers="http://broker:9092", - kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "in-valid-host")), + kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "invalid-host")), ) with pytest.raises(ValidationError): @@ -62,6 +62,6 @@ def test_kpops_config_with_different_invalid_urls(): kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, - url=parse_obj_as(AnyHttpUrl, "in-valid-host"), + url=parse_obj_as(AnyHttpUrl, "invalid-host"), ), ) diff --git a/tests/compiler/test_pipeline_name.py b/tests/compiler/test_pipeline_name.py index 87b47565d..9a44412dd 100644 --- a/tests/compiler/test_pipeline_name.py +++ b/tests/compiler/test_pipeline_name.py @@ -2,7 +2,7 @@ import pytest -from kpops.cli.config import KpopsConfig +from kpops.config import KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.environment import ENV diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 3a93397f8..ca9d53313 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -6,7 +6,6 @@ import pytest from pytest_httpx import HTTPXMock -from kpops.cli.config import KpopsConfig from kpops.component_handlers.kafka_connect.connect_wrapper import ConnectWrapper from kpops.component_handlers.kafka_connect.exception import ( ConnectorNotFoundException, @@ -17,6 +16,7 @@ KafkaConnectResponse, ) from kpops.component_handlers.kafka_connect.timeout import timeout +from kpops.config import KpopsConfig HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 13dce3c63..1ead99781 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -8,7 +8,6 @@ from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion -from kpops.cli.config import KpopsConfig, SchemaRegistryConfig from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.schema_handler.schema_provider import SchemaProvider from kpops.components.base_components.models import TopicName @@ -17,6 +16,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig, SchemaRegistryConfig from kpops.utils.colorify import greenify, magentaify from tests.pipeline.test_components import TestSchemaProvider diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index 553f7a4a8..e1ff9ae40 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -7,13 +7,13 @@ from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig from kpops.component_handlers.topic.exception import ( KafkaRestProxyError, TopicNotFoundException, ) from kpops.component_handlers.topic.model import TopicResponse, TopicSpec from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper +from kpops.config import KpopsConfig HEADERS = {"Content-Type": "application/json"} DEFAULT_HOST = "http://localhost:8082" diff --git a/tests/components/test_base_defaults_component.py b/tests/components/test_base_defaults_component.py index 25cffc58e..dd593f826 100644 --- a/tests/components/test_base_defaults_component.py +++ b/tests/components/test_base_defaults_component.py @@ -3,12 +3,12 @@ import pytest -from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, load_defaults, ) +from kpops.config import KpopsConfig from kpops.utils.environment import ENV DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_kafka_app.py b/tests/components/test_kafka_app.py index b9b746d3b..66d9daa31 100644 --- a/tests/components/test_kafka_app.py +++ b/tests/components/test_kafka_app.py @@ -4,7 +4,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -12,6 +11,7 @@ HelmUpgradeInstallFlags, ) from kpops.components.base_components import KafkaApp +from kpops.config import KpopsConfig DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index a952e7285..46616cd17 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -4,11 +4,11 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig from kpops.components.base_components.kafka_connector import KafkaConnector +from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" CONNECTOR_NAME = "test-connector-with-long-name-0123456789abcdefghijklmnop" diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 88ee9bdc9..6861817bd 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -3,7 +3,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmUpgradeInstallFlags, @@ -25,6 +24,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify from tests.components.test_kafka_connector import ( CONNECTOR_CLEAN_FULL_NAME, diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index 4bbbde8a6..82b042d0c 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -3,7 +3,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmUpgradeInstallFlags, @@ -22,6 +21,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig from kpops.utils.environment import ENV from tests.components.test_kafka_connector import ( CONNECTOR_CLEAN_FULL_NAME, diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index 458e01e22..d89db64bd 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -5,7 +5,6 @@ from pytest_mock import MockerFixture from typing_extensions import override -from kpops.cli.config import KpopsConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -17,6 +16,7 @@ KubernetesApp, KubernetesAppConfig, ) +from kpops.config import KpopsConfig from kpops.utils.colorify import magentaify DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index b8927c313..2c7853fb3 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -5,7 +5,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmUpgradeInstallFlags from kpops.components import ProducerApp @@ -13,6 +12,7 @@ OutputTopicTypes, TopicConfig, ) +from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index 14ecdda9a..50ab2c332 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -4,7 +4,6 @@ import pytest from pytest_mock import MockerFixture -from kpops.cli.config import KpopsConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import ( HelmDiffConfig, @@ -17,6 +16,7 @@ TopicConfig, ToSection, ) +from kpops.config import KpopsConfig, TopicNameConfig DEFAULTS_PATH = Path(__file__).parent / "resources"