From 86f8db90bf39881eb81fcba7ee99e829784034af Mon Sep 17 00:00:00 2001 From: Alejandro Jaramillo Date: Mon, 29 Jan 2024 08:16:47 +0100 Subject: [PATCH] Add support for pipeline steps parallelization (#312) closes #177 --------- Co-authored-by: Ivan Yordanov --- docs/docs/user/references/cli-commands.md | 4 + kpops/cli/main.py | 180 +++++++++++++----- kpops/component_handlers/helm_wrapper/helm.py | 23 ++- .../kafka_connect/connect_wrapper.py | 45 +++-- .../kafka_connect/kafka_connect_handler.py | 40 ++-- .../kafka_connect/timeout.py | 20 +- .../schema_handler/schema_handler.py | 50 ++--- kpops/component_handlers/topic/handler.py | 34 ++-- .../component_handlers/topic/proxy_wrapper.py | 37 ++-- kpops/components/base_components/helm_app.py | 8 +- kpops/components/base_components/kafka_app.py | 17 +- .../base_components/kafka_connector.py | 58 +++--- .../base_components/pipeline_component.py | 48 ++++- .../producer/producer_app.py | 14 +- .../streams_bootstrap/streams/streams_app.py | 28 ++- kpops/pipeline.py | 126 ++++++++++-- poetry.lock | 84 +++++++- pyproject.toml | 3 + tests/cli/test_pipeline_steps.py | 36 ++-- .../helm_wrapper/test_helm_wrapper.py | 61 +++--- .../kafka_connect/test_connect_handler.py | 95 ++++----- .../kafka_connect/test_connect_wrapper.py | 135 +++++++------ .../schema_handler/test_schema_handler.py | 60 +++--- .../topic/test_proxy_wrapper.py | 81 ++++---- .../topic/test_topic_handler.py | 101 ++++++---- tests/components/test_helm_app.py | 36 ++-- tests/components/test_kafka_connector.py | 12 +- tests/components/test_kafka_sink_connector.py | 45 +++-- .../components/test_kafka_source_connector.py | 42 ++-- tests/components/test_kubernetes_app.py | 8 +- tests/components/test_producer_app.py | 66 +++++-- tests/components/test_streams_app.py | 84 ++++++-- tests/components/test_streams_bootstrap.py | 5 +- .../resources/parallel-pipeline/config.yaml | 15 ++ .../resources/parallel-pipeline/defaults.yaml | 27 +++ .../resources/parallel-pipeline/pipeline.yaml | 64 +++++++ .../pipeline-with-loop/defaults.yaml | 19 ++ .../pipeline-with-loop/pipeline.yaml | 34 ++++ .../pipeline-with-short-topics/defaults.yaml | 11 +- .../same-topic-and-component-name/config.yaml | 12 ++ .../defaults.yaml | 27 +++ .../pipeline.yaml | 8 + .../simple-pipeline/config.yaml | 12 ++ .../simple-pipeline/defaults.yaml | 28 +++ .../simple-pipeline/pipeline.yaml | 6 + tests/pipeline/test_generate.py | 166 +++++++++++++++- 46 files changed, 1564 insertions(+), 551 deletions(-) create mode 100644 tests/pipeline/resources/parallel-pipeline/config.yaml create mode 100644 tests/pipeline/resources/parallel-pipeline/defaults.yaml create mode 100644 tests/pipeline/resources/parallel-pipeline/pipeline.yaml create mode 100644 tests/pipeline/resources/pipeline-with-loop/defaults.yaml create mode 100644 tests/pipeline/resources/pipeline-with-loop/pipeline.yaml create mode 100644 tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/config.yaml create mode 100644 tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml create mode 100644 tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml create mode 100644 tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/config.yaml create mode 100644 tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml create mode 100644 tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 0a7617224..de8a7febe 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -47,6 +47,7 @@ $ kpops clean [OPTIONS] PIPELINE_PATH * `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] * `--help`: Show this message and exit. ## `kpops deploy` @@ -73,6 +74,7 @@ $ kpops deploy [OPTIONS] PIPELINE_PATH * `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] * `--help`: Show this message and exit. ## `kpops destroy` @@ -99,6 +101,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH * `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] * `--help`: Show this message and exit. ## `kpops generate` @@ -175,6 +178,7 @@ $ kpops reset [OPTIONS] PIPELINE_PATH * `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] * `--help`: Show this message and exit. ## `kpops schema` diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 4c342cdac..3a3936226 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -1,7 +1,7 @@ from __future__ import annotations +import asyncio import logging -from collections.abc import Iterator from enum import Enum from pathlib import Path from typing import TYPE_CHECKING, Optional @@ -32,6 +32,8 @@ from kpops.utils.yaml import print_yaml if TYPE_CHECKING: + from collections.abc import Awaitable, Callable, Coroutine, Iterator + from kpops.components.base_components import PipelineComponent @@ -92,6 +94,13 @@ help="Whether to dry run the command or execute it", ) +PARALLEL: bool = typer.Option( + False, + "--parallel/--no-parallel", + rich_help_panel="EXPERIMENTAL: features in preview, not production-ready", + help="Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially.", +) + class FilterType(str, Enum): INCLUDE = "include" @@ -183,6 +192,26 @@ def is_in_steps(component: PipelineComponent) -> bool: return filtered_steps +def get_reverse_concurrently_tasks_to_execute( + pipeline: Pipeline, + steps: str | None, + filter_type: FilterType, + runner: Callable[[PipelineComponent], Coroutine], +) -> Awaitable: + steps_to_apply = reverse_pipeline_steps(pipeline, steps, filter_type) + return pipeline.build_execution_graph_from(list(steps_to_apply), True, runner) + + +def get_concurrently_tasks_to_execute( + pipeline: Pipeline, + steps: str | None, + filter_type: FilterType, + runner: Callable[[PipelineComponent], Coroutine], +) -> Awaitable: + steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) + return pipeline.build_execution_graph_from(steps_to_apply, False, runner) + + def get_steps_to_apply( pipeline: Pipeline, steps: str | None, filter_type: FilterType ) -> list[PipelineComponent]: @@ -283,6 +312,7 @@ def generate( environment, verbose, ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) if output: print_yaml(pipeline.to_yaml()) @@ -335,20 +365,33 @@ def deploy( environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, + parallel: bool = PARALLEL, ): - kpops_config = create_kpops_config( - config, - defaults, - dotenv, - environment, - verbose, - ) - pipeline = setup_pipeline(pipeline_path, kpops_config, environment) - - steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) - for component in steps_to_apply: + async def deploy_runner(component: PipelineComponent): log_action("Deploy", component) - component.deploy(dry_run) + await component.deploy(dry_run) + + async def async_deploy(): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, + ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) + + if parallel: + pipeline_tasks = get_concurrently_tasks_to_execute( + pipeline, steps, filter_type, deploy_runner + ) + await pipeline_tasks + else: + steps_to_apply = get_steps_to_apply(pipeline, steps, filter_type) + for component in steps_to_apply: + await deploy_runner(component) + + asyncio.run(async_deploy()) @app.command(help="Destroy pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 @@ -362,19 +405,34 @@ def destroy( environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, + parallel: bool = PARALLEL, ): - kpops_config = create_kpops_config( - config, - defaults, - dotenv, - environment, - verbose, - ) - pipeline = setup_pipeline(pipeline_path, kpops_config, environment) - pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) - for component in pipeline_steps: + async def destroy_runner(component: PipelineComponent): log_action("Destroy", component) - component.destroy(dry_run) + await component.destroy(dry_run) + + async def async_destroy(): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, + ) + + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) + + if parallel: + pipeline_tasks = get_reverse_concurrently_tasks_to_execute( + pipeline, steps, filter_type, destroy_runner + ) + await pipeline_tasks + else: + pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) + for component in pipeline_steps: + await destroy_runner(component) + + asyncio.run(async_destroy()) @app.command(help="Reset pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 @@ -388,20 +446,33 @@ def reset( environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, + parallel: bool = PARALLEL, ): - kpops_config = create_kpops_config( - config, - defaults, - dotenv, - environment, - verbose, - ) - pipeline = setup_pipeline(pipeline_path, kpops_config, environment) - pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) - for component in pipeline_steps: + async def reset_runner(component: PipelineComponent): log_action("Reset", component) - component.destroy(dry_run) - component.reset(dry_run) + await component.destroy(dry_run) + await component.reset(dry_run) + + async def async_reset(): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, + ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) + if parallel: + pipeline_tasks = get_reverse_concurrently_tasks_to_execute( + pipeline, steps, filter_type, reset_runner + ) + await pipeline_tasks + else: + pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) + for component in pipeline_steps: + await reset_runner(component) + + asyncio.run(async_reset()) @app.command(help="Clean pipeline steps") # pyright: ignore[reportGeneralTypeIssues] https://github.com/rec/dtyper/issues/8 @@ -415,20 +486,33 @@ def clean( environment: Optional[str] = ENVIRONMENT, dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, + parallel: bool = PARALLEL, ): - kpops_config = create_kpops_config( - config, - defaults, - dotenv, - environment, - verbose, - ) - pipeline = setup_pipeline(pipeline_path, kpops_config, environment) - pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) - for component in pipeline_steps: + async def clean_runner(component: PipelineComponent): log_action("Clean", component) - component.destroy(dry_run) - component.clean(dry_run) + await component.destroy(dry_run) + await component.clean(dry_run) + + async def async_clean(): + kpops_config = create_kpops_config( + config, + defaults, + dotenv, + environment, + verbose, + ) + pipeline = setup_pipeline(pipeline_path, kpops_config, environment) + if parallel: + pipeline_steps = get_reverse_concurrently_tasks_to_execute( + pipeline, steps, filter_type, clean_runner + ) + await pipeline_steps + else: + pipeline_steps = reverse_pipeline_steps(pipeline, steps, filter_type) + for component in pipeline_steps: + await clean_runner(component) + + asyncio.run(async_clean()) def version_callback(show_version: bool) -> None: diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 8499504ba..2f2c5dcf9 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -1,5 +1,6 @@ from __future__ import annotations +import asyncio import logging import re import subprocess @@ -74,7 +75,7 @@ def add_repo( else: self.__execute(["helm", "repo", "update"]) - def upgrade_install( + async def upgrade_install( self, release_name: str, chart: str, @@ -103,9 +104,9 @@ def upgrade_install( command.extend(flags.to_command()) if dry_run: command.append("--dry-run") - return self.__execute(command) + return await self.__async_execute(command) - def uninstall( + async def uninstall( self, namespace: str, release_name: str, @@ -122,7 +123,7 @@ def uninstall( if dry_run: command.append("--dry-run") try: - return self.__execute(command) + return await self.__async_execute(command) except ReleaseNotFoundException: log.warning( f"Release with name {release_name} not found. Could not uninstall app." @@ -229,6 +230,20 @@ def __execute(self, command: list[str]) -> str: log.debug(process.stdout) return process.stdout + async def __async_execute(self, command: list[str]): + command = self.__set_global_flags(command) + log.debug(f"Executing {' '.join(command)}") + proc = await asyncio.create_subprocess_exec( + *command, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + stdout, stderr = await proc.communicate() + Helm.parse_helm_command_stderr_output(stderr.decode()) + log.debug(stdout) + return stdout.decode() + def __set_global_flags(self, command: list[str]) -> list[str]: if self._context: log.debug(f"Changing the Kubernetes context to {self._context}") diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 4d92bad03..687075c2e 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -1,8 +1,7 @@ from __future__ import annotations +import asyncio import logging -import time -from time import sleep from typing import TYPE_CHECKING import httpx @@ -32,12 +31,13 @@ class ConnectWrapper: def __init__(self, config: KafkaConnectConfig) -> None: self._config: KafkaConnectConfig = config + self._client = httpx.AsyncClient() @property def url(self) -> AnyHttpUrl: return self._config.url - def create_connector( + async def create_connector( self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: """Create a new connector. @@ -48,7 +48,7 @@ def create_connector( """ config_json = connector_config.model_dump() connect_data = {"name": connector_config.name, "config": config_json} - response = httpx.post( + response = await self._client.post( url=f"{self.url}connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: @@ -59,11 +59,13 @@ def create_connector( log.warning( "Rebalancing in progress while creating a connector... Retrying..." ) - time.sleep(1) - self.create_connector(connector_config) + + await asyncio.sleep(1) + await self.create_connector(connector_config) + raise KafkaConnectError(response) - def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: + async def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: """Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) @@ -73,7 +75,7 @@ def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: if connector_name is None: msg = "Connector name not set" raise Exception(msg) - response = httpx.get( + response = await self._client.get( url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: @@ -87,28 +89,29 @@ def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: log.warning( "Rebalancing in progress while getting a connector... Retrying..." ) - sleep(1) - self.get_connector(connector_name) + await asyncio.sleep(1) + await self.get_connector(connector_name) raise KafkaConnectError(response) - def update_connector_config( + async def update_connector_config( self, connector_config: KafkaConnectorConfig ) -> KafkaConnectResponse: """Create or update a connector. Create a new connector using the given configuration,or update the configuration for an existing connector. - :param connector_config: Configuration parameters for the connector. :return: Information about the connector after the change has been made. """ connector_name = connector_config.name + config_json = connector_config.model_dump() - response = httpx.put( + response = await self._client.put( url=f"{self.url}connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) + data: dict = response.json() if response.status_code == httpx.codes.OK: log.info(f"Config for connector {connector_name} updated.") @@ -122,11 +125,11 @@ def update_connector_config( log.warning( "Rebalancing in progress while updating a connector... Retrying..." ) - sleep(1) - self.update_connector_config(connector_config) + await asyncio.sleep(1) + await self.update_connector_config(connector_config) raise KafkaConnectError(response) - def validate_connector_config( + async def validate_connector_config( self, connector_config: KafkaConnectorConfig ) -> list[str]: """Validate connector config using the given configuration. @@ -135,7 +138,7 @@ def validate_connector_config( :raises KafkaConnectError: Kafka Konnect error :return: List of all found errors """ - response = httpx.put( + response = await self._client.put( url=f"{self.url}connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, json=connector_config.model_dump(), @@ -157,7 +160,7 @@ def validate_connector_config( return errors raise KafkaConnectError(response) - def delete_connector(self, connector_name: str) -> None: + async def delete_connector(self, connector_name: str) -> None: """Delete a connector, halting all tasks and deleting its configuration. API Reference: @@ -165,7 +168,7 @@ def delete_connector(self, connector_name: str) -> None: :param connector_name: Configuration parameters for the connector. :raises ConnectorNotFoundException: Connector not found """ - response = httpx.delete( + response = await self._client.delete( url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: @@ -178,6 +181,6 @@ def delete_connector(self, connector_name: str) -> None: log.warning( "Rebalancing in progress while deleting a connector... Retrying..." ) - sleep(1) - self.delete_connector(connector_name) + await asyncio.sleep(1) + await self.delete_connector(connector_name) raise KafkaConnectError(response) diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index fb644dd7a..46ec61a68 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -33,7 +33,7 @@ def __init__( self._connect_wrapper = connect_wrapper self._timeout = timeout - def create_connector( + async def create_connector( self, connector_config: KafkaConnectorConfig, *, dry_run: bool ) -> None: """Create a connector. @@ -44,44 +44,42 @@ def create_connector( :param dry_run: If the connector creation should be run in dry run mode. """ if dry_run: - self.__dry_run_connector_creation(connector_config) + await self.__dry_run_connector_creation(connector_config) else: try: - timeout( - lambda: self._connect_wrapper.get_connector(connector_config.name), + await timeout( + self._connect_wrapper.get_connector(connector_config.name), secs=self._timeout, ) - timeout( - lambda: self._connect_wrapper.update_connector_config( - connector_config - ), + await timeout( + self._connect_wrapper.update_connector_config(connector_config), secs=self._timeout, ) except ConnectorNotFoundException: - timeout( - lambda: self._connect_wrapper.create_connector(connector_config), + await timeout( + self._connect_wrapper.create_connector(connector_config), secs=self._timeout, ) - def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: + async def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: """Delete a connector resource from the cluster. :param connector_name: The connector name. :param dry_run: If the connector deletion should be run in dry run mode. """ if dry_run: - self.__dry_run_connector_deletion(connector_name) + await self.__dry_run_connector_deletion(connector_name) else: try: - timeout( - lambda: self._connect_wrapper.get_connector(connector_name), + await timeout( + self._connect_wrapper.get_connector(connector_name), secs=self._timeout, ) - timeout( - lambda: self._connect_wrapper.delete_connector(connector_name), + await timeout( + self._connect_wrapper.delete_connector(connector_name), secs=self._timeout, ) except ConnectorNotFoundException: @@ -89,12 +87,12 @@ def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None: f"Connector Destruction: the connector {connector_name} does not exist. Skipping." ) - def __dry_run_connector_creation( + async def __dry_run_connector_creation( self, connector_config: KafkaConnectorConfig ) -> None: connector_name = connector_config.name try: - connector = self._connect_wrapper.get_connector(connector_name) + connector = await self._connect_wrapper.get_connector(connector_name) log.info(f"Connector Creation: connector {connector_name} already exists.") if diff := render_diff(connector.config, connector_config.model_dump()): @@ -111,7 +109,7 @@ def __dry_run_connector_creation( log.debug("POST /connectors HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.url}") - errors = self._connect_wrapper.validate_connector_config(connector_config) + errors = await self._connect_wrapper.validate_connector_config(connector_config) if len(errors) > 0: formatted_errors = "\n".join(errors) msg = f"Connector Creation: validating the connector config for connector {connector_name} resulted in the following errors: {formatted_errors}" @@ -121,9 +119,9 @@ def __dry_run_connector_creation( f"Connector Creation: connector config for {connector_name} is valid!" ) - def __dry_run_connector_deletion(self, connector_name: str) -> None: + async def __dry_run_connector_deletion(self, connector_name: str) -> None: try: - self._connect_wrapper.get_connector(connector_name) + await self._connect_wrapper.get_connector(connector_name) log.info( magentaify( f"Connector Destruction: connector {connector_name} already exists. Deleting connector." diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index e75ac7361..bb036ed29 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -1,33 +1,27 @@ import asyncio import logging from asyncio import TimeoutError -from collections.abc import Callable -from typing import TypeVar +from collections.abc import Coroutine +from typing import Any, TypeVar log = logging.getLogger("Timeout") T = TypeVar("T") -def timeout(func: Callable[..., T], *, secs: int = 0) -> T | None: +async def timeout(coro: Coroutine[Any, Any, T], *, secs: int = 0) -> T | None: """Set a timeout for a given lambda function. - :param func: The callable function + :param coro: The callable function :param secs: The timeout in seconds. """ - - async def main_supervisor(func: Callable[..., T], secs: int) -> T: - runner = asyncio.to_thread(func) - task = asyncio.create_task(runner) + try: + task = asyncio.create_task(coro) if secs == 0: return await task else: return await asyncio.wait_for(task, timeout=secs) - - loop = asyncio.get_event_loop() - try: - return loop.run_until_complete(main_supervisor(func, secs)) except TimeoutError: log.exception( - f"Kafka Connect operation {func.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml." + f"Kafka Connect operation {coro.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml." ) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index 0545e9727..e99ca3403 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import TYPE_CHECKING -from schema_registry.client import SchemaRegistryClient +from schema_registry.client import AsyncSchemaRegistryClient from schema_registry.client.schema import AvroSchema from kpops.cli.exception import ClassNotFoundError @@ -25,7 +25,7 @@ class SchemaHandler: def __init__(self, kpops_config: KpopsConfig) -> None: - self.schema_registry_client = SchemaRegistryClient( + self.schema_registry_client = AsyncSchemaRegistryClient( str(kpops_config.schema_registry.url) ) self.components_module = kpops_config.components_module @@ -55,7 +55,7 @@ def load_schema_handler(cls, config: KpopsConfig) -> SchemaHandler | None: ) return None - def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: + async def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: for topic_name, config in to_section.topics.items(): value_schema_class = config.value_schema key_schema_class = config.key_schema @@ -63,23 +63,25 @@ def submit_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: schema = self.schema_provider.provide_schema( value_schema_class, to_section.models ) - self.__submit_value_schema( + await self.__submit_value_schema( schema, value_schema_class, dry_run, topic_name ) if key_schema_class is not None: schema = self.schema_provider.provide_schema( key_schema_class, to_section.models ) - self.__submit_key_schema(schema, key_schema_class, dry_run, topic_name) + await self.__submit_key_schema( + schema, key_schema_class, dry_run, topic_name + ) - def delete_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: + async def delete_schemas(self, to_section: ToSection, dry_run: bool = True) -> None: for topic_name, config in to_section.topics.items(): if config.value_schema is not None: - self.__delete_subject(f"{topic_name}-value", dry_run) + await self.__delete_subject(f"{topic_name}-value", dry_run) if config.key_schema is not None: - self.__delete_subject(f"{topic_name}-key", dry_run) + await self.__delete_subject(f"{topic_name}-key", dry_run) - def __submit_key_schema( + async def __submit_key_schema( self, schema: Schema, schema_class: str, @@ -87,14 +89,14 @@ def __submit_key_schema( topic_name: str, ) -> None: subject = f"{topic_name}-key" - self.__submit_schema( + await self.__submit_schema( subject=subject, schema=schema, schema_class=schema_class, dry_run=dry_run, ) - def __submit_value_schema( + async def __submit_value_schema( self, schema: Schema, schema_class: str, @@ -102,14 +104,14 @@ def __submit_value_schema( topic_name: str, ) -> None: subject = f"{topic_name}-value" - self.__submit_schema( + await self.__submit_schema( subject=subject, schema=schema, schema_class=schema_class, dry_run=dry_run, ) - def __submit_schema( + async def __submit_schema( self, subject: str, schema: Schema, @@ -117,8 +119,8 @@ def __submit_schema( dry_run: bool, ): if dry_run: - if self.__subject_exists(subject): - self.__check_compatibility(schema, schema_class, subject) + if await self.__subject_exists(subject): + await self.__check_compatibility(schema, schema_class, subject) else: log.info( greenify( @@ -126,20 +128,22 @@ def __submit_schema( ) ) else: - self.schema_registry_client.register(subject=subject, schema=schema) + await self.schema_registry_client.register(subject=subject, schema=schema) log.info( f"Schema Submission: schema submitted for {subject} with model {schema_class}." ) - def __subject_exists(self, subject: str) -> bool: - return len(self.schema_registry_client.get_versions(subject)) > 0 + async def __subject_exists(self, subject: str) -> bool: + return len(await self.schema_registry_client.get_versions(subject)) > 0 - def __check_compatibility( + async def __check_compatibility( self, schema: Schema, schema_class: str, subject: str ) -> None: - registered_version = self.schema_registry_client.check_version(subject, schema) + registered_version = await self.schema_registry_client.check_version( + subject, schema + ) if registered_version is None: - if not self.schema_registry_client.test_compatibility( + if not await self.schema_registry_client.test_compatibility( subject=subject, schema=schema ): schema_str = ( @@ -158,11 +162,11 @@ def __check_compatibility( f"Schema Submission: compatible schema for {subject} with model {schema_class}." ) - def __delete_subject(self, subject: str, dry_run: bool) -> None: + async def __delete_subject(self, subject: str, dry_run: bool) -> None: if dry_run: log.info(magentaify(f"Schema Deletion: will delete subject {subject}.")) else: - version_list = self.schema_registry_client.delete_subject(subject) + version_list = await self.schema_registry_client.delete_subject(subject) log.info( f"Schema Deletion: deleted {len(version_list)} versions for subject {subject}." ) diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 9a08e5512..00ff6e2b8 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -26,15 +26,17 @@ class TopicHandler: def __init__(self, proxy_wrapper: ProxyWrapper): self.proxy_wrapper = proxy_wrapper - def create_topics(self, to_section: ToSection, dry_run: bool) -> None: + async def create_topics(self, to_section: ToSection, dry_run: bool) -> None: for topic_name, topic_config in to_section.topics.items(): topic_spec = self.__prepare_body(topic_name, topic_config) if dry_run: - self.__dry_run_topic_creation(topic_name, topic_spec, topic_config) + await self.__dry_run_topic_creation( + topic_name, topic_spec, topic_config + ) else: try: - self.proxy_wrapper.get_topic(topic_name=topic_name) - topic_config_in_cluster = self.proxy_wrapper.get_topic_config( + await self.proxy_wrapper.get_topic(topic_name=topic_name) + topic_config_in_cluster = await self.proxy_wrapper.get_topic_config( topic_name=topic_name ) differences = self.__get_topic_config_diff( @@ -52,7 +54,7 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: json_body.append( {"name": difference.key, "value": config_value} ) - self.proxy_wrapper.batch_alter_topic_config( + await self.proxy_wrapper.batch_alter_topic_config( topic_name=topic_name, json_body=json_body, ) @@ -62,16 +64,16 @@ def create_topics(self, to_section: ToSection, dry_run: bool) -> None: f"Topic Creation: config of topic {topic_name} didn't change. Skipping update." ) except TopicNotFoundException: - self.proxy_wrapper.create_topic(topic_spec=topic_spec) + await self.proxy_wrapper.create_topic(topic_spec=topic_spec) - def delete_topics(self, to_section: ToSection, dry_run: bool) -> None: + async def delete_topics(self, to_section: ToSection, dry_run: bool) -> None: for topic_name in to_section.topics: if dry_run: - self.__dry_run_topic_deletion(topic_name=topic_name) + await self.__dry_run_topic_deletion(topic_name=topic_name) else: try: - self.proxy_wrapper.get_topic(topic_name=topic_name) - self.proxy_wrapper.delete_topic(topic_name=topic_name) + await self.proxy_wrapper.get_topic(topic_name=topic_name) + await self.proxy_wrapper.delete_topic(topic_name=topic_name) except TopicNotFoundException: log.warning( f"Topic Deletion: topic {topic_name} does not exist in the cluster and cannot be deleted. Skipping." @@ -86,17 +88,17 @@ def __get_topic_config_diff( ) return list(Diff.from_dicts(comparable_in_cluster_config_dict, current_config)) - def __dry_run_topic_creation( + async def __dry_run_topic_creation( self, topic_name: str, topic_spec: TopicSpec, topic_config: TopicConfig | None = None, ) -> None: try: - topic_in_cluster = self.proxy_wrapper.get_topic(topic_name=topic_name) + topic_in_cluster = await self.proxy_wrapper.get_topic(topic_name=topic_name) topic_name = topic_in_cluster.topic_name if topic_config: - topic_config_in_cluster = self.proxy_wrapper.get_topic_config( + topic_config_in_cluster = await self.proxy_wrapper.get_topic_config( topic_name=topic_name ) in_cluster_config, new_config = parse_and_compare_topic_configs( @@ -115,7 +117,7 @@ def __dry_run_topic_creation( } log.debug(error_message) - broker_config = self.proxy_wrapper.get_broker_config() + broker_config = await self.proxy_wrapper.get_broker_config() effective_config = get_effective_config(broker_config) self.__check_partition_count(topic_in_cluster, topic_spec, effective_config) @@ -170,9 +172,9 @@ def __check_replication_factor( msg = f"Topic Creation: replication factor of topic {topic_name} changed! Replication factor of topic {topic_name} is {replication_factor}. The given replication count {topic_spec.replication_factor}." raise TopicTransactionError(msg) - def __dry_run_topic_deletion(self, topic_name: str) -> None: + async def __dry_run_topic_deletion(self, topic_name: str) -> None: try: - topic_in_cluster = self.proxy_wrapper.get_topic(topic_name=topic_name) + topic_in_cluster = await self.proxy_wrapper.get_topic(topic_name=topic_name) log.info( magentaify( f"Topic Deletion: topic {topic_in_cluster.topic_name} exists in the cluster. Deleting topic." diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index aa1db6283..7bfc4a276 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -32,6 +32,8 @@ class ProxyWrapper: def __init__(self, config: KafkaRestConfig) -> None: self._config: KafkaRestConfig = config + self._client = httpx.AsyncClient() + self._sync_client = httpx.Client() @cached_property def cluster_id(self) -> str: @@ -46,7 +48,8 @@ def cluster_id(self) -> str: :raises KafkaRestProxyError: Kafka REST proxy error :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._config.url!s}v3/clusters") + response = self._sync_client.get(url=f"{self._config.url!s}v3/clusters") + if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -57,7 +60,7 @@ def cluster_id(self) -> str: def url(self) -> AnyHttpUrl: return self._config.url - def create_topic(self, topic_spec: TopicSpec) -> None: + async def create_topic(self, topic_spec: TopicSpec) -> None: """Create a topic. API Reference: @@ -66,11 +69,12 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :param topic_spec: The topic specification. :raises KafkaRestProxyError: Kafka REST proxy error """ - response = httpx.post( + response = await self._client.post( url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics", headers=HEADERS, json=topic_spec.model_dump(exclude_none=True), ) + if response.status_code == httpx.codes.CREATED: log.info(f"Topic {topic_spec.topic_name} created.") log.debug(response.json()) @@ -78,7 +82,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: raise KafkaRestProxyError(response) - def delete_topic(self, topic_name: str) -> None: + async def delete_topic(self, topic_name: str) -> None: """Delete a topic. API Reference: @@ -87,31 +91,32 @@ def delete_topic(self, topic_name: str) -> None: :param topic_name: Name of the topic. :raises KafkaRestProxyError: Kafka REST proxy error """ - response = httpx.delete( + response = await self._client.delete( url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) + if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Topic {topic_name} deleted.") return raise KafkaRestProxyError(response) - def get_topic(self, topic_name: str) -> TopicResponse: + async def get_topic(self, topic_name: str) -> TopicResponse: """Return the topic with the given topic_name. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#get--clusters-cluster_id-topics-topic_name - :param topic_name: The topic name. :raises TopicNotFoundException: Topic not found :raises KafkaRestProxyError: Kafka REST proxy error :return: Response of the get topic API. """ - response = httpx.get( + response = await self._client.get( url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) + if response.status_code == httpx.codes.OK: log.debug(f"Topic {topic_name} found.") log.debug(response.json()) @@ -127,18 +132,17 @@ def get_topic(self, topic_name: str) -> TopicResponse: raise KafkaRestProxyError(response) - def get_topic_config(self, topic_name: str) -> TopicConfigResponse: + async def get_topic_config(self, topic_name: str) -> TopicConfigResponse: """Return the config with the given topic_name. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#acl-v3 - :param topic_name: The topic name. :raises TopicNotFoundException: Topic not found :raises KafkaRestProxyError: Kafka REST proxy error :return: The topic configuration. """ - response = httpx.get( + response = await self._client.get( url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", headers=HEADERS, ) @@ -158,7 +162,9 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: raise KafkaRestProxyError(response) - def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> None: + async def batch_alter_topic_config( + self, topic_name: str, json_body: list[dict] + ) -> None: """Reset config of given config_name param to the default value on the kafka server. API Reference: @@ -168,18 +174,19 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No :param config_name: The configuration parameter name. :raises KafkaRestProxyError: Kafka REST proxy error """ - response = httpx.post( + response = await self._client.post( url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": json_body}, ) + if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Config of topic {topic_name} was altered.") return raise KafkaRestProxyError(response) - def get_broker_config(self) -> BrokerConfigResponse: + async def get_broker_config(self) -> BrokerConfigResponse: """Return the list of configuration parameters for all the brokers in the given Kafka cluster. API Reference: @@ -188,7 +195,7 @@ def get_broker_config(self) -> BrokerConfigResponse: :raises KafkaRestProxyError: Kafka REST proxy error :return: The broker configuration. """ - response = httpx.get( + response = await self._client.get( url=f"{self.url!s}v3/clusters/{self.cluster_id}/brokers/-/configs", headers=HEADERS, ) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index b8978c5af..3bc7ec1b7 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -145,8 +145,8 @@ def deploy_flags(self) -> HelmUpgradeInstallFlags: return HelmUpgradeInstallFlags(**self.helm_flags.model_dump()) @override - def deploy(self, dry_run: bool) -> None: - stdout = self.helm.upgrade_install( + async def deploy(self, dry_run: bool) -> None: + stdout = await self.helm.upgrade_install( self.helm_release_name, self.helm_chart, dry_run, @@ -158,8 +158,8 @@ def deploy(self, dry_run: bool) -> None: self.dry_run_handler.print_helm_diff(stdout, self.helm_release_name, log) @override - def destroy(self, dry_run: bool) -> None: - stdout = self.helm.uninstall( + async def destroy(self, dry_run: bool) -> None: + stdout = await self.helm.uninstall( self.namespace, self.helm_release_name, dry_run, diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 7ee67b09c..71d5758d2 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -72,20 +72,20 @@ def helm_flags(self) -> HelmFlags: ) @override - def clean(self, dry_run: bool) -> None: + async def clean(self, dry_run: bool) -> None: """Clean an app using a cleanup job. :param dry_run: Dry run command """ log.info(f"Uninstall old cleanup job for {self.helm_release_name}") - self.destroy(dry_run) + await self.destroy(dry_run) log.info(f"Init cleanup job for {self.helm_release_name}") - self.deploy(dry_run) + await self.deploy(dry_run) if not self.config.retain_clean_jobs: log.info(f"Uninstall cleanup job for {self.helm_release_name}") - self.destroy(dry_run) + await self.destroy(dry_run) class KafkaApp(PipelineComponent, ABC): @@ -102,14 +102,15 @@ class KafkaApp(PipelineComponent, ABC): ) @override - def deploy(self, dry_run: bool) -> None: + async def deploy(self, dry_run: bool) -> None: if self.to: - self.handlers.topic_handler.create_topics( + await self.handlers.topic_handler.create_topics( to_section=self.to, dry_run=dry_run ) if self.handlers.schema_handler: - self.handlers.schema_handler.submit_schemas( + await self.handlers.schema_handler.submit_schemas( to_section=self.to, dry_run=dry_run ) - super().deploy(dry_run) + + await super().deploy(dry_run) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 38f490458..fc966d6ca 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -75,7 +75,7 @@ def helm_flags(self) -> HelmFlags: ) @override - def reset(self, dry_run: bool) -> None: + async def reset(self, dry_run: bool) -> None: """Reset connector. At first, it deletes the previous cleanup job (connector resetter) @@ -89,22 +89,22 @@ def reset(self, dry_run: bool) -> None: f"Connector Cleanup: uninstalling cleanup job Helm release from previous runs for {self.app.config.connector}" ) ) - self.destroy(dry_run) + await self.destroy(dry_run) log.info( magentaify( f"Connector Cleanup: deploy Connect {self.app.connector_type} resetter for {self.app.config.connector}" ) ) - self.deploy(dry_run) + await self.deploy(dry_run) if not self.config.retain_clean_jobs: log.info(magentaify("Connector Cleanup: uninstall Kafka Resetter.")) - self.destroy(dry_run) + await self.destroy(dry_run) @override - def clean(self, dry_run: bool) -> None: - self.reset(dry_run) + async def clean(self, dry_run: bool) -> None: + await self.reset(dry_run) class KafkaConnector(PipelineComponent, ABC): @@ -172,33 +172,35 @@ def _resetter(self) -> KafkaConnectorResetter: ) @override - def deploy(self, dry_run: bool) -> None: + async def deploy(self, dry_run: bool) -> None: if self.to: - self.handlers.topic_handler.create_topics( + await self.handlers.topic_handler.create_topics( to_section=self.to, dry_run=dry_run ) if self.handlers.schema_handler: - self.handlers.schema_handler.submit_schemas( + await self.handlers.schema_handler.submit_schemas( to_section=self.to, dry_run=dry_run ) - self.handlers.connector_handler.create_connector(self.app, dry_run=dry_run) + await self.handlers.connector_handler.create_connector( + self.app, dry_run=dry_run + ) @override - def destroy(self, dry_run: bool) -> None: - self.handlers.connector_handler.destroy_connector( + async def destroy(self, dry_run: bool) -> None: + await self.handlers.connector_handler.destroy_connector( self.full_name, dry_run=dry_run ) @override - def clean(self, dry_run: bool) -> None: + async def clean(self, dry_run: bool) -> None: if self.to: if self.handlers.schema_handler: - self.handlers.schema_handler.delete_schemas( + await self.handlers.schema_handler.delete_schemas( to_section=self.to, dry_run=dry_run ) - self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run) + await self.handlers.topic_handler.delete_topics(self.to, dry_run=dry_run) class KafkaSourceConnector(KafkaConnector): @@ -222,15 +224,15 @@ def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: raise NotImplementedError(msg) @override - def reset(self, dry_run: bool) -> None: + async def reset(self, dry_run: bool) -> None: self._resetter.app.config.offset_topic = self.offset_topic - self._resetter.reset(dry_run) + await self._resetter.reset(dry_run) @override - def clean(self, dry_run: bool) -> None: - super().clean(dry_run) + async def clean(self, dry_run: bool) -> None: + await super().clean(dry_run) self._resetter.app.config.offset_topic = self.offset_topic - self._resetter.clean(dry_run) + await self._resetter.clean(dry_run) class KafkaSinkConnector(KafkaConnector): @@ -238,6 +240,12 @@ class KafkaSinkConnector(KafkaConnector): _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SINK) + @property + @override + def input_topics(self) -> list[str]: + topics = getattr(self.app, "topics", None) + return topics.split(",") if topics is not None else [] + @override def add_input_topics(self, topics: list[str]) -> None: existing_topics: str | None = getattr(self.app, "topics", None) @@ -254,12 +262,12 @@ def set_error_topic(self, topic_name: str) -> None: setattr(self.app, "errors.deadletterqueue.topic.name", topic_name) @override - def reset(self, dry_run: bool) -> None: + async def reset(self, dry_run: bool) -> None: self._resetter.app.config.delete_consumer_group = False - self._resetter.reset(dry_run) + await self._resetter.reset(dry_run) @override - def clean(self, dry_run: bool) -> None: - super().clean(dry_run) + async def clean(self, dry_run: bool) -> None: + await super().clean(dry_run) self._resetter.app.config.delete_consumer_group = True - self._resetter.clean(dry_run) + await self._resetter.clean(dry_run) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index e37e9dcc5..07f66571c 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,6 +1,7 @@ from __future__ import annotations from abc import ABC +from typing import TYPE_CHECKING from pydantic import AliasChoices, ConfigDict, Field @@ -20,6 +21,9 @@ ) from kpops.utils.docstring import describe_attr +if TYPE_CHECKING: + from collections.abc import Iterator + class PipelineComponent(BaseDefaultsComponent, ABC): """Base class for all components. @@ -60,6 +64,30 @@ def __init__(self, **kwargs) -> None: self.set_input_topics() self.set_output_topics() + @property + def input_topics(self) -> list[str]: + """Get all the input topics from config.""" + return [] + + @property + def extra_input_topics(self) -> dict[str, list[str]]: + """Get extra input topics list from config.""" + return {} + + @property + def output_topic(self) -> str | None: + """Get output topic from config.""" + return None + + @property + def extra_output_topics(self) -> dict[str, str]: + """Get extra output topics list from config.""" + return {} + + @property + def id(self) -> str: + return f"component-{self.full_name}" + @property def full_name(self) -> str: return self.prefix + self.name @@ -118,6 +146,18 @@ def set_input_topics(self) -> None: for name, topic in self.from_.topics.items(): self.apply_from_inputs(name, topic) + @property + def inputs(self) -> Iterator[str]: + yield from self.input_topics + for role_topics in self.extra_input_topics.values(): + yield from role_topics + + @property + def outputs(self) -> Iterator[str]: + if output_topic := self.output_topic: + yield output_topic + yield from self.extra_output_topics.values() + def apply_from_inputs(self, name: str, topic: FromTopic) -> None: """Add a `from` section input to the component config. @@ -192,25 +232,25 @@ def manifest(self) -> Resource: """Render final component resources, e.g. Kubernetes manifests.""" return [] - def deploy(self, dry_run: bool) -> None: + async def deploy(self, dry_run: bool) -> None: """Deploy component, e.g. to Kubernetes cluster. :param dry_run: Whether to do a dry run of the command """ - def destroy(self, dry_run: bool) -> None: + async def destroy(self, dry_run: bool) -> None: """Uninstall component, e.g. from Kubernetes cluster. :param dry_run: Whether to do a dry run of the command """ - def reset(self, dry_run: bool) -> None: + async def reset(self, dry_run: bool) -> None: """Reset component state. :param dry_run: Whether to do a dry run of the command """ - def clean(self, dry_run: bool) -> None: + async def clean(self, dry_run: bool) -> None: """Destroy component including related states. :param dry_run: Whether to do a dry run of the command diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index 2d6a586b2..43e32534d 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -68,6 +68,16 @@ def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: case _: super().apply_to_outputs(name, topic) + @property + @override + def output_topic(self) -> str | None: + return self.app.streams.output_topic + + @property + @override + def extra_output_topics(self) -> dict[str, str]: + return self.app.streams.extra_output_topics + @override def set_output_topic(self, topic_name: str) -> None: self.app.streams.output_topic = topic_name @@ -82,5 +92,5 @@ def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.PRODUCER_APP.value}" @override - def clean(self, dry_run: bool) -> None: - self._cleaner.clean(dry_run) + async def clean(self, dry_run: bool) -> None: + await self._cleaner.clean(dry_run) diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index 2c632e882..4c855a688 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -41,6 +41,26 @@ def _cleaner(self) -> StreamsAppCleaner: **self.model_dump(), ) + @property + @override + def input_topics(self) -> list[str]: + return self.app.streams.input_topics + + @property + @override + def extra_input_topics(self) -> dict[str, list[str]]: + return self.app.streams.extra_input_topics + + @property + @override + def output_topic(self) -> str | None: + return self.app.streams.output_topic + + @property + @override + def extra_output_topics(self) -> dict[str, str]: + return self.app.streams.extra_output_topics + @override def add_input_topics(self, topics: list[str]) -> None: self.app.streams.add_input_topics(topics) @@ -75,11 +95,11 @@ def helm_chart(self) -> str: return f"{self.repo_config.repository_name}/{AppType.STREAMS_APP.value}" @override - def reset(self, dry_run: bool) -> None: + async def reset(self, dry_run: bool) -> None: self._cleaner.app.streams.delete_output = False - self._cleaner.clean(dry_run) + await self._cleaner.clean(dry_run) @override - def clean(self, dry_run: bool) -> None: + async def clean(self, dry_run: bool) -> None: self._cleaner.app.streams.delete_output = True - self._cleaner.clean(dry_run) + await self._cleaner.clean(dry_run) diff --git a/kpops/pipeline.py b/kpops/pipeline.py index aff9ca475..d997814b3 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -1,13 +1,15 @@ from __future__ import annotations +import asyncio import json import logging from collections import Counter from dataclasses import dataclass, field from typing import TYPE_CHECKING +import networkx as nx import yaml -from pydantic import Field, RootModel, SerializeAsAny +from pydantic import BaseModel, Field, SerializeAsAny from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.dict_ops import generate_substitution, update_nested_pair @@ -16,7 +18,7 @@ from kpops.utils.yaml import load_yaml_file, substitute_nested if TYPE_CHECKING: - from collections.abc import Iterator + from collections.abc import Awaitable, Callable, Coroutine, Iterator from pathlib import Path from kpops.cli.registry import Registry @@ -34,44 +36,144 @@ class ValidationError(Exception): pass -class Pipeline(RootModel): +class Pipeline(BaseModel): """Pipeline representation.""" - root: list[SerializeAsAny[PipelineComponent]] = Field( + components: list[SerializeAsAny[PipelineComponent]] = Field( default=[], title="Components" ) + graph: nx.DiGraph = Field(default_factory=nx.DiGraph, exclude=True) + _component_index: dict[str, PipelineComponent | None] = {} + + class Config: + arbitrary_types_allowed = True @property def last(self) -> PipelineComponent: - return self.root[-1] + return self.components[-1] def find(self, component_name: str) -> PipelineComponent: - for component in self.root: + for component in self.components: if component_name == component.name: return component msg = f"Component {component_name} not found" raise ValueError(msg) + def __add_to_graph(self, component: PipelineComponent): + self._component_index[component.id] = component + self.graph.add_node(component.id) + + for input_topic in component.inputs: + self.__add_input(input_topic, component.id) + + for output_topic in component.outputs: + self.__add_output(output_topic, component.id) + def add(self, component: PipelineComponent) -> None: - self.root.append(component) + self.components.append(component) + self.__add_to_graph(component) def __bool__(self) -> bool: - return bool(self.root) + return bool(self.components) def __iter__(self) -> Iterator[PipelineComponent]: - return iter(self.root) + return iter(self.components) def __len__(self) -> int: - return len(self.root) + return len(self.components) def to_yaml(self) -> str: - return yaml.dump(self.model_dump(mode="json", by_alias=True, exclude_none=True)) + return yaml.dump( + self.model_dump(mode="json", by_alias=True, exclude_none=True)["components"] + ) + + def build_execution_graph_from( + self, + components: list[PipelineComponent], + reverse: bool, + runner: Callable[[PipelineComponent], Coroutine], + ) -> Awaitable: + sub_graph_nodes = self.__get_graph_nodes(components) + + async def run_parallel_tasks(coroutines: list[Coroutine]) -> None: + tasks = [] + for coro in coroutines: + tasks.append(asyncio.create_task(coro)) + await asyncio.gather(*tasks) + + async def run_graph_tasks(pending_tasks: list[Awaitable]): + for pending_task in pending_tasks: + await pending_task + + sub_graph = self.graph.subgraph(sub_graph_nodes) + transformed_graph = sub_graph.copy() + + root_node = "root_node_bfs" + # We add an extra node to the graph, connecting all the leaf nodes to it + # in that way we make this node the root of the graph, avoiding backtracking + transformed_graph.add_node(root_node) + + for node in sub_graph: + predecessors = list(sub_graph.predecessors(node)) + if not predecessors: + transformed_graph.add_edge(root_node, node) + + layers_graph: list[list[str]] = list( + nx.bfs_layers(transformed_graph, root_node) + ) + + sorted_tasks = [] + for layer in layers_graph[1:]: + parallel_tasks = self.__get_parallel_tasks_from(layer, runner) + + if parallel_tasks: + sorted_tasks.append(run_parallel_tasks(parallel_tasks)) + + if reverse: + sorted_tasks.reverse() + + return run_graph_tasks(sorted_tasks) + + @staticmethod + def __get_graph_nodes(components: list[PipelineComponent]) -> Iterator[str]: + for component in components: + yield component.id + yield from component.inputs + yield from component.outputs + + def __get_parallel_tasks_from( + self, layer: list[str], runner: Callable[[PipelineComponent], Coroutine] + ) -> list[Coroutine]: + parallel_tasks = [] + + for node_in_layer in layer: + component = self._component_index[node_in_layer] + if component is not None: + parallel_tasks.append(runner(component)) + + return parallel_tasks + + def __validate_graph(self) -> None: + if not nx.is_directed_acyclic_graph(self.graph): + msg = "Pipeline is not a valid DAG." + raise ValueError(msg) def validate(self) -> None: self.validate_unique_names() + self.__validate_graph() + + def __add_output(self, output_topic: str, source: str) -> None: + self._component_index[output_topic] = None + self.graph.add_node(output_topic) + self.graph.add_edge(source, output_topic) + + def __add_input(self, input_topic: str, target: str) -> None: + self._component_index[input_topic] = None + self.graph.add_node(input_topic) + self.graph.add_edge(input_topic, target) def validate_unique_names(self) -> None: - step_names = [component.full_name for component in self.root] + step_names = [component.full_name for component in self.components] duplicates = [name for name, count in Counter(step_names).items() if count > 1] if duplicates: msg = f"step names should be unique. duplicate step names: {', '.join(duplicates)}" diff --git a/poetry.lock b/poetry.lock index 7774806f1..7778dcf88 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiofiles" -version = "22.1.0" +version = "23.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "aiofiles-22.1.0-py3-none-any.whl", hash = "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad"}, - {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, + {file = "aiofiles-23.1.0-py3-none-any.whl", hash = "sha256:9312414ae06472eb6f1d163f555e466a23aed1c8f60c30cccf7121dba2e53eb2"}, + {file = "aiofiles-23.1.0.tar.gz", hash = "sha256:edd247df9a19e0db16534d4baaf536d6609a43e1de5401d7a4c1c148753a1635"}, ] [[package]] @@ -263,6 +263,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "faker" +version = "22.0.0" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Faker-22.0.0-py3-none-any.whl", hash = "sha256:9c22c0a734ca01c6e4f2259eab5dab9081905a9d67b27272aea5c9feeb5a3789"}, + {file = "Faker-22.0.0.tar.gz", hash = "sha256:1d5dc0a75da7bc40741ee4c84d99dc087b97bd086d4222ad06ac4dd2219bcf3f"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + [[package]] name = "fastavro" version = "1.7.0" @@ -701,6 +715,24 @@ files = [ {file = "mkdocs_material_extensions-1.3.tar.gz", hash = "sha256:f0446091503acb110a7cab9349cbc90eeac51b58d1caa92a704a81ca1e24ddbd"}, ] +[[package]] +name = "networkx" +version = "3.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.8" +files = [ + {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, + {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, +] + +[package.extras] +default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] + [[package]] name = "nodeenv" version = "1.7.0" @@ -806,6 +838,30 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "polyfactory" +version = "2.13.0" +description = "Mock data generation factories" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "polyfactory-2.13.0-py3-none-any.whl", hash = "sha256:03acb0718f4efb2458c62eb8a2c888294c5b5bf2db31e0efc15a57ecc9eb3c2e"}, + {file = "polyfactory-2.13.0.tar.gz", hash = "sha256:d1e6d8952789de61dca2c32f3e3c9362d7681cf405cf9a41267915e0e33f7639"}, +] + +[package.dependencies] +faker = "*" +typing-extensions = "*" + +[package.extras] +attrs = ["attrs (>=22.2.0)"] +beanie = ["beanie", "pydantic[email]"] +full = ["attrs", "beanie", "msgspec", "odmantic", "pydantic", "sqlalchemy"] +msgspec = ["msgspec"] +odmantic = ["odmantic (<1.0.0)", "pydantic[email]"] +pydantic = ["pydantic[email]"] +sqlalchemy = ["sqlalchemy (>=1.4.29)"] + [[package]] name = "pre-commit" version = "2.20.0" @@ -1161,6 +1217,24 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + [[package]] name = "pytest-httpx" version = "0.22.0" @@ -1870,4 +1944,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "9eb753456d6c39de50c7312283fe268431a8edd1d64f7f119f5e69ea94491723" +content-hash = "29099842d13dbbf32f4602a8bd74ca0a25d5433b86e5464f24c48a24911dc068" diff --git a/pyproject.toml b/pyproject.toml index 774ecbd0e..0777abbe7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,7 @@ cachetools = "^5.2.0" dictdiffer = "^0.9.0" python-schema-registry-client = "^2.4.1" httpx = "^0.24.1" +networkx = "^3.1" [tool.poetry.group.dev.dependencies] pytest = "^7.1.2" @@ -50,8 +51,10 @@ ruff = "^0.1.7" typer-cli = "^0.0.13" pyright = "^1.1.314" pytest-rerunfailures = "^11.1.2" +pytest-asyncio = "^0.21.1" pytest-httpx = "^0.22.0" pytablewriter = { extras = ["from"], version = "^1.0.0" } +polyfactory = "^2.13.0" [tool.poetry.group.docs] optional = true diff --git a/tests/cli/test_pipeline_steps.py b/tests/cli/test_pipeline_steps.py index f9a345ae7..fe0cfe68e 100644 --- a/tests/cli/test_pipeline_steps.py +++ b/tests/cli/test_pipeline_steps.py @@ -1,35 +1,45 @@ -from dataclasses import dataclass -from typing import cast from unittest.mock import MagicMock import pytest +from polyfactory.factories.pydantic_factory import ModelFactory from pytest_mock import MockerFixture from kpops.cli.main import FilterType, get_steps_to_apply +from kpops.component_handlers import ( + ComponentHandlers, +) from kpops.components import PipelineComponent +from kpops.components.base_components.models.from_section import FromSection +from kpops.components.base_components.models.to_section import ToSection from kpops.pipeline import Pipeline PREFIX = "example-prefix-" -@dataclass -class TestComponent: - __test__ = False - name: str - prefix: str = PREFIX +class TestComponentFactory(ModelFactory[PipelineComponent]): + to = ToSection() + from_ = FromSection() + enrich = False + validate = False + handlers = ComponentHandlers(None, MagicMock(), MagicMock()) -test_component_1 = TestComponent("example1") -test_component_2 = TestComponent("example2") -test_component_3 = TestComponent("example3") +run_validation = False +test_component_1 = TestComponentFactory.build(run_validation) +test_component_2 = TestComponentFactory.build(run_validation) +test_component_3 = TestComponentFactory.build(run_validation) + +test_component_1.name = "example1" +test_component_2.name = "example2" +test_component_3.name = "example3" @pytest.fixture(autouse=True) def pipeline() -> Pipeline: pipeline = Pipeline() - pipeline.add(cast(PipelineComponent, test_component_1)) - pipeline.add(cast(PipelineComponent, test_component_2)) - pipeline.add(cast(PipelineComponent, test_component_3)) + pipeline.add(test_component_1) + pipeline.add(test_component_2) + pipeline.add(test_component_3) return pipeline diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index cdc7e9d9d..6740c72a2 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -2,7 +2,7 @@ from pathlib import Path from textwrap import dedent from unittest import mock -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from pytest_mock import MockerFixture @@ -36,6 +36,10 @@ def mock_execute(self, mocker: MockerFixture) -> MagicMock: mock_execute.return_value = "" return mock_execute + @pytest.fixture() + def run_command_async(self, mocker: MockerFixture) -> MagicMock: + return mocker.patch.object(Helm, "_Helm__async_execute") + @pytest.fixture() def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.component_handlers.helm_wrapper.helm.log.warning") @@ -50,10 +54,11 @@ def mock_get_version(self, mocker: MockerFixture) -> MagicMock: def helm(self, mock_get_version: MagicMock) -> Helm: return Helm(helm_config=HelmConfig()) - def test_should_call_run_command_method_when_helm_install_with_defaults( - self, helm: Helm, mock_execute: MagicMock + @pytest.mark.asyncio() + async def test_should_call_run_command_method_when_helm_install_with_defaults( + self, helm: Helm, run_command_async: AsyncMock ): - helm.upgrade_install( + await helm.upgrade_install( release_name="test-release", chart=f"bakdata-streams-bootstrap/{AppType.STREAMS_APP.value}", dry_run=False, @@ -61,7 +66,8 @@ def test_should_call_run_command_method_when_helm_install_with_defaults( values={"commandLine": "test"}, flags=HelmUpgradeInstallFlags(), ) - mock_execute.assert_called_once_with( + + run_command_async.assert_called_once_with( [ "helm", "upgrade", @@ -134,10 +140,11 @@ def test_should_include_configured_tls_parameters_on_add_when_version_is_new( ), ] - def test_should_include_configured_tls_parameters_on_update( - self, helm: Helm, mock_execute: MagicMock + @pytest.mark.asyncio() + async def test_should_include_configured_tls_parameters_on_update( + self, helm: Helm, run_command_async: AsyncMock ): - helm.upgrade_install( + await helm.upgrade_install( release_name="test-release", chart="test-repository/test-chart", dry_run=False, @@ -149,7 +156,7 @@ def test_should_include_configured_tls_parameters_on_update( ), ) - mock_execute.assert_called_once_with( + run_command_async.assert_called_once_with( [ "helm", "upgrade", @@ -169,10 +176,11 @@ def test_should_include_configured_tls_parameters_on_update( ], ) - def test_should_call_run_command_method_when_helm_install_with_non_defaults( - self, helm: Helm, mock_execute: MagicMock + @pytest.mark.asyncio() + async def test_should_call_run_command_method_when_helm_install_with_non_defaults( + self, helm: Helm, run_command_async: AsyncMock ): - helm.upgrade_install( + await helm.upgrade_install( release_name="test-release", chart="test-repository/streams-app", namespace="test-namespace", @@ -188,7 +196,7 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( version="2.4.2", ), ) - mock_execute.assert_called_once_with( + run_command_async.assert_called_once_with( [ "helm", "upgrade", @@ -213,26 +221,28 @@ def test_should_call_run_command_method_when_helm_install_with_non_defaults( ], ) - def test_should_call_run_command_method_when_uninstalling_streams_app( - self, helm: Helm, mock_execute: MagicMock + @pytest.mark.asyncio() + async def test_should_call_run_command_method_when_uninstalling_streams_app( + self, helm: Helm, run_command_async: AsyncMock ): - helm.uninstall( + await helm.uninstall( namespace="test-namespace", release_name="test-release", dry_run=False, ) - mock_execute.assert_called_once_with( + run_command_async.assert_called_once_with( ["helm", "uninstall", "test-release", "--namespace", "test-namespace"], ) - def test_should_log_warning_when_release_not_found( + @pytest.mark.asyncio() + async def test_should_log_warning_when_release_not_found( self, + run_command_async: AsyncMock, helm: Helm, - mock_execute: MagicMock, log_warning_mock: MagicMock, ): - mock_execute.side_effect = ReleaseNotFoundException() - helm.uninstall( + run_command_async.side_effect = ReleaseNotFoundException() + await helm.uninstall( namespace="test-namespace", release_name="test-release", dry_run=False, @@ -242,15 +252,16 @@ def test_should_log_warning_when_release_not_found( "Release with name test-release not found. Could not uninstall app." ) - def test_should_call_run_command_method_when_installing_streams_app__with_dry_run( - self, helm: Helm, mock_execute: MagicMock + @pytest.mark.asyncio() + async def test_should_call_run_command_method_when_installing_streams_app__with_dry_run( + self, helm: Helm, run_command_async: AsyncMock ): - helm.uninstall( + await helm.uninstall( namespace="test-namespace", release_name="test-release", dry_run=True, ) - mock_execute.assert_called_once_with( + run_command_async.assert_called_once_with( [ "helm", "uninstall", diff --git a/tests/component_handlers/kafka_connect/test_connect_handler.py b/tests/component_handlers/kafka_connect/test_connect_handler.py index db64690e9..cb0a1e78e 100644 --- a/tests/component_handlers/kafka_connect/test_connect_handler.py +++ b/tests/component_handlers/kafka_connect/test_connect_handler.py @@ -1,5 +1,5 @@ from unittest import mock -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from pytest_mock import MockerFixture @@ -40,6 +40,10 @@ def log_error_mock(self, mocker: MockerFixture) -> MagicMock: "kpops.component_handlers.kafka_connect.kafka_connect_handler.log.error" ) + @pytest.fixture() + def connector_wrapper(self) -> AsyncMock: + return AsyncMock() + @pytest.fixture() def renderer_diff_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch( @@ -62,17 +66,18 @@ def connector_config(self) -> KafkaConnectorConfig: } ) - def test_should_create_connector_in_dry_run( + @pytest.mark.asyncio() + async def test_should_create_connector_in_dry_run( self, connector_config: KafkaConnectorConfig, renderer_diff_mock: MagicMock, log_info_mock: MagicMock, + connector_wrapper: AsyncMock, ): - connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) renderer_diff_mock.return_value = None - handler.create_connector(connector_config, dry_run=True) + await handler.create_connector(connector_config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with( connector_config @@ -87,11 +92,10 @@ def test_should_create_connector_in_dry_run( ), ] - def test_should_log_correct_message_when_create_connector_and_connector_not_exists_in_dry_run( - self, - log_info_mock: MagicMock, + @pytest.mark.asyncio() + async def test_should_log_correct_message_when_create_connector_and_connector_not_exists_in_dry_run( + self, log_info_mock: MagicMock, connector_wrapper: AsyncMock ): - connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) connector_wrapper.get_connector.side_effect = ConnectorNotFoundException() @@ -102,8 +106,9 @@ def test_should_log_correct_message_when_create_connector_and_connector_not_exis "tasks.max": "1", "topics": TOPIC_NAME, } + config = KafkaConnectorConfig(**configs) - handler.create_connector(config, dry_run=True) + await handler.create_connector(config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with(config) @@ -116,11 +121,10 @@ def test_should_log_correct_message_when_create_connector_and_connector_not_exis ), ] - def test_should_log_correct_message_when_create_connector_and_connector_exists_in_dry_run( - self, - log_info_mock: MagicMock, + @pytest.mark.asyncio() + async def test_should_log_correct_message_when_create_connector_and_connector_exists_in_dry_run( + self, log_info_mock: MagicMock, connector_wrapper: AsyncMock ): - connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) actual_response = { @@ -143,8 +147,9 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i "tasks.max": "2", "topics": TOPIC_NAME, } + connector_config = KafkaConnectorConfig(**configs) - handler.create_connector(connector_config, dry_run=True) + await handler.create_connector(connector_config, dry_run=True) connector_wrapper.get_connector.assert_called_once_with(CONNECTOR_NAME) connector_wrapper.validate_connector_config.assert_called_once_with( connector_config @@ -162,11 +167,11 @@ def test_should_log_correct_message_when_create_connector_and_connector_exists_i ), ] - def test_should_log_invalid_config_when_create_connector_in_dry_run( + @pytest.mark.asyncio() + async def test_should_log_invalid_config_when_create_connector_in_dry_run( self, connector_config: KafkaConnectorConfig, renderer_diff_mock: MagicMock ): - connector_wrapper = MagicMock() - + connector_wrapper = AsyncMock() errors = [ "Missing required configuration file which has no default value.", "Missing connector name.", @@ -181,46 +186,48 @@ def test_should_log_invalid_config_when_create_connector_in_dry_run( ConnectorStateException, match=f"Connector Creation: validating the connector config for connector {CONNECTOR_NAME} resulted in the following errors: {formatted_errors}", ): - handler.create_connector(connector_config, dry_run=True) + await handler.create_connector(connector_config, dry_run=True) connector_wrapper.validate_connector_config.assert_called_once_with( connector_config ) - def test_should_call_update_connector_config_when_connector_exists_not_dry_run( + @pytest.mark.asyncio() + async def test_should_call_update_connector_config_when_connector_exists_not_dry_run( self, connector_config: KafkaConnectorConfig ): - connector_wrapper = MagicMock() + connector_wrapper = AsyncMock() + handler = self.connector_handler(connector_wrapper) - handler.create_connector(connector_config, dry_run=False) + await handler.create_connector(connector_config, dry_run=False) assert connector_wrapper.mock_calls == [ mock.call.get_connector(CONNECTOR_NAME), mock.call.update_connector_config(connector_config), ] - def test_should_call_create_connector_when_connector_does_not_exists_not_dry_run( + @pytest.mark.asyncio() + async def test_should_call_create_connector_when_connector_does_not_exists_not_dry_run( self, connector_config: KafkaConnectorConfig ): - connector_wrapper = MagicMock() + connector_wrapper = AsyncMock() handler = self.connector_handler(connector_wrapper) connector_wrapper.get_connector.side_effect = ConnectorNotFoundException() - handler.create_connector(connector_config, dry_run=False) + + await handler.create_connector(connector_config, dry_run=False) connector_wrapper.create_connector.assert_called_once_with(connector_config) - def test_should_print_correct_log_when_destroying_connector_in_dry_run( - self, - log_info_mock: MagicMock, + @pytest.mark.asyncio() + async def test_should_print_correct_log_when_destroying_connector_in_dry_run( + self, log_info_mock: MagicMock, connector_wrapper: AsyncMock ): - connector_wrapper = MagicMock() - handler = self.connector_handler(connector_wrapper) - handler.destroy_connector(CONNECTOR_NAME, dry_run=True) + await handler.destroy_connector(CONNECTOR_NAME, dry_run=True) log_info_mock.assert_called_once_with( magentaify( @@ -228,43 +235,43 @@ def test_should_print_correct_log_when_destroying_connector_in_dry_run( ) ) - def test_should_print_correct_warning_log_when_destroying_connector_and_connector_exists_in_dry_run( - self, - log_warning_mock: MagicMock, + @pytest.mark.asyncio() + async def test_should_print_correct_warning_log_when_destroying_connector_and_connector_exists_in_dry_run( + self, log_warning_mock: MagicMock, connector_wrapper: AsyncMock ): - connector_wrapper = MagicMock() + connector_wrapper = AsyncMock() + connector_wrapper.get_connector.side_effect = ConnectorNotFoundException() handler = self.connector_handler(connector_wrapper) - handler.destroy_connector(CONNECTOR_NAME, dry_run=True) + await handler.destroy_connector(CONNECTOR_NAME, dry_run=True) log_warning_mock.assert_called_once_with( f"Connector Destruction: connector {CONNECTOR_NAME} does not exist and cannot be deleted. Skipping." ) - def test_should_call_delete_connector_when_destroying_existing_connector_not_dry_run( - self, + @pytest.mark.asyncio() + async def test_should_call_delete_connector_when_destroying_existing_connector_not_dry_run( + self, connector_wrapper: AsyncMock ): - connector_wrapper = MagicMock() handler = self.connector_handler(connector_wrapper) - handler.destroy_connector(CONNECTOR_NAME, dry_run=False) + await handler.destroy_connector(CONNECTOR_NAME, dry_run=False) assert connector_wrapper.mock_calls == [ mock.call.get_connector(CONNECTOR_NAME), mock.call.delete_connector(CONNECTOR_NAME), ] - def test_should_print_correct_warning_log_when_destroying_connector_and_connector_exists_not_dry_run( - self, - log_warning_mock: MagicMock, + @pytest.mark.asyncio() + async def test_should_print_correct_warning_log_when_destroying_connector_and_connector_exists_not_dry_run( + self, log_warning_mock: MagicMock, connector_wrapper: AsyncMock ): - connector_wrapper = MagicMock() connector_wrapper.get_connector.side_effect = ConnectorNotFoundException() handler = self.connector_handler(connector_wrapper) - handler.destroy_connector(CONNECTOR_NAME, dry_run=False) + await handler.destroy_connector(CONNECTOR_NAME, dry_run=False) log_warning_mock.assert_called_once_with( f"Connector Destruction: the connector {CONNECTOR_NAME} does not exist. Skipping." diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 86eb0690b..cec146e9b 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -1,9 +1,10 @@ import json import sys from pathlib import Path -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest +import pytest_asyncio from pytest_httpx import HTTPXMock from kpops.component_handlers.kafka_connect.connect_wrapper import ConnectWrapper @@ -25,7 +26,7 @@ class TestConnectorApiWrapper: - @pytest.fixture(autouse=True) + @pytest_asyncio.fixture(autouse=True) def _setup(self): config = KpopsConfig(defaults_path=DEFAULTS_PATH) self.connect_wrapper = ConnectWrapper(config.kafka_connect) @@ -39,9 +40,10 @@ def connector_config(self) -> KafkaConnectorConfig: } ) - @patch("httpx.post") - def test_should_create_post_requests_for_given_connector_configuration( - self, mock_post: MagicMock + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.post") + async def test_should_create_post_requests_for_given_connector_configuration( + self, mock_post: AsyncMock ): configs = { "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", @@ -55,7 +57,7 @@ def test_should_create_post_requests_for_given_connector_configuration( } with pytest.raises(KafkaConnectError): - self.connect_wrapper.create_connector(KafkaConnectorConfig(**configs)) + await self.connect_wrapper.create_connector(KafkaConnectorConfig(**configs)) mock_post.assert_called_with( url=f"{DEFAULT_HOST}/connectors", @@ -66,7 +68,8 @@ def test_should_create_post_requests_for_given_connector_configuration( }, ) - def test_should_return_correct_response_when_connector_created( + @pytest.mark.asyncio() + async def test_should_return_correct_response_when_connector_created( self, httpx_mock: HTTPXMock, connector_config: KafkaConnectorConfig ): actual_response = { @@ -95,11 +98,16 @@ def test_should_return_correct_response_when_connector_created( json=actual_response, status_code=201, ) - expected_response = self.connect_wrapper.create_connector(connector_config) + + expected_response = await self.connect_wrapper.create_connector( + connector_config + ) + assert KafkaConnectResponse(**actual_response) == expected_response + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") - def test_should_raise_connector_exists_exception_when_connector_exists( + async def test_should_raise_connector_exists_exception_when_connector_exists( self, log_warning: MagicMock, httpx_mock: HTTPXMock, @@ -112,20 +120,23 @@ def test_should_raise_connector_exists_exception_when_connector_exists( status_code=409, ) - timeout( - lambda: self.connect_wrapper.create_connector(connector_config), - secs=1, + await timeout( + self.connect_wrapper.create_connector(connector_config), + secs=10, ) log_warning.assert_called_with( "Rebalancing in progress while creating a connector... Retrying..." ) - @patch("httpx.get") - def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.get") + async def test_should_create_correct_get_connector_request( + self, mock_get: AsyncMock + ): connector_name = "test-connector" with pytest.raises(KafkaConnectError): - self.connect_wrapper.get_connector(connector_name) + await self.connect_wrapper.get_connector(connector_name) mock_get.assert_called_with( url=f"{DEFAULT_HOST}/connectors/{connector_name}", @@ -133,8 +144,9 @@ def test_should_create_correct_get_connector_request(self, mock_get: MagicMock): ) @pytest.mark.flaky(reruns=5, condition=sys.platform.startswith("win32")) + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") - def test_should_return_correct_response_when_getting_connector( + async def test_should_return_correct_response_when_getting_connector( self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -164,12 +176,13 @@ def test_should_return_correct_response_when_getting_connector( json=actual_response, status_code=200, ) - expected_response = self.connect_wrapper.get_connector(connector_name) + expected_response = await self.connect_wrapper.get_connector(connector_name) assert KafkaConnectResponse(**actual_response) == expected_response log_info.assert_called_once_with(f"Connector {connector_name} exists.") + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") - def test_should_raise_connector_not_found_when_getting_connector( + async def test_should_raise_connector_not_found_when_getting_connector( self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -182,14 +195,15 @@ def test_should_raise_connector_not_found_when_getting_connector( status_code=404, ) with pytest.raises(ConnectorNotFoundException): - self.connect_wrapper.get_connector(connector_name) + await self.connect_wrapper.get_connector(connector_name) log_info.assert_called_once_with( f"The named connector {connector_name} does not exists." ) + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") - def test_should_raise_rebalance_in_progress_when_getting_connector( + async def test_should_raise_rebalance_in_progress_when_getting_connector( self, log_warning: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -202,8 +216,8 @@ def test_should_raise_rebalance_in_progress_when_getting_connector( status_code=409, ) - timeout( - lambda: self.connect_wrapper.get_connector(connector_name), + await timeout( + self.connect_wrapper.get_connector(connector_name), secs=1, ) @@ -211,8 +225,11 @@ def test_should_raise_rebalance_in_progress_when_getting_connector( "Rebalancing in progress while getting a connector... Retrying..." ) - @patch("httpx.put") - def test_should_create_correct_update_connector_request(self, mock_put: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.put") + async def test_should_create_correct_update_connector_request( + self, mock_put: AsyncMock + ): connector_name = "test-connector" configs = { "connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", @@ -225,7 +242,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc "connection.password": "fake-password", } with pytest.raises(KafkaConnectError): - self.connect_wrapper.update_connector_config( + await self.connect_wrapper.update_connector_config( KafkaConnectorConfig(**configs) ) @@ -235,8 +252,9 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc json=KafkaConnectorConfig(**configs).model_dump(), ) + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") - def test_should_return_correct_response_when_update_connector( + async def test_should_return_correct_response_when_update_connector( self, log_info: MagicMock, httpx_mock: HTTPXMock, @@ -269,7 +287,8 @@ def test_should_return_correct_response_when_update_connector( json=actual_response, status_code=200, ) - expected_response = self.connect_wrapper.update_connector_config( + + expected_response = await self.connect_wrapper.update_connector_config( connector_config ) assert KafkaConnectResponse(**actual_response) == expected_response @@ -277,8 +296,9 @@ def test_should_return_correct_response_when_update_connector( f"Config for connector {connector_name} updated." ) + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") - def test_should_return_correct_response_when_update_connector_created( + async def test_should_return_correct_response_when_update_connector_created( self, log_info: MagicMock, httpx_mock: HTTPXMock, @@ -311,14 +331,15 @@ def test_should_return_correct_response_when_update_connector_created( json=actual_response, status_code=201, ) - expected_response = self.connect_wrapper.update_connector_config( + expected_response = await self.connect_wrapper.update_connector_config( connector_config ) assert KafkaConnectResponse(**actual_response) == expected_response log_info.assert_called_once_with(f"Connector {connector_name} created.") + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") - def test_should_raise_connector_exists_exception_when_update_connector( + async def test_should_raise_connector_exists_exception_when_update_connector( self, log_warning: MagicMock, httpx_mock: HTTPXMock, @@ -334,8 +355,8 @@ def test_should_raise_connector_exists_exception_when_update_connector( status_code=409, ) - timeout( - lambda: self.connect_wrapper.update_connector_config(connector_config), + await timeout( + self.connect_wrapper.update_connector_config(connector_config), secs=1, ) @@ -343,21 +364,23 @@ def test_should_raise_connector_exists_exception_when_update_connector( "Rebalancing in progress while updating a connector... Retrying..." ) - @patch("httpx.delete") - def test_should_create_correct_delete_connector_request( - self, mock_delete: MagicMock + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.delete") + async def test_should_create_correct_delete_connector_request( + self, mock_delete: AsyncMock ): connector_name = "test-connector" with pytest.raises(KafkaConnectError): - self.connect_wrapper.delete_connector(connector_name) + await self.connect_wrapper.delete_connector(connector_name) mock_delete.assert_called_with( url=f"{DEFAULT_HOST}/connectors/{connector_name}", headers=HEADERS, ) + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") - def test_should_return_correct_response_when_deleting_connector( + async def test_should_return_correct_response_when_deleting_connector( self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -387,12 +410,13 @@ def test_should_return_correct_response_when_deleting_connector( json=actual_response, status_code=204, ) - self.connect_wrapper.delete_connector(connector_name) + await self.connect_wrapper.delete_connector(connector_name) log_info.assert_called_once_with(f"Connector {connector_name} deleted.") + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") - def test_should_raise_connector_not_found_when_deleting_connector( + async def test_should_raise_connector_not_found_when_deleting_connector( self, log_info: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -405,14 +429,15 @@ def test_should_raise_connector_not_found_when_deleting_connector( status_code=404, ) with pytest.raises(ConnectorNotFoundException): - self.connect_wrapper.delete_connector(connector_name) + await self.connect_wrapper.delete_connector(connector_name) log_info.assert_called_once_with( f"The named connector {connector_name} does not exists." ) + @pytest.mark.asyncio() @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.warning") - def test_should_raise_rebalance_in_progress_when_deleting_connector( + async def test_should_raise_rebalance_in_progress_when_deleting_connector( self, log_warning: MagicMock, httpx_mock: HTTPXMock ): connector_name = "test-connector" @@ -425,8 +450,8 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( status_code=409, ) - timeout( - lambda: self.connect_wrapper.delete_connector(connector_name), + await timeout( + self.connect_wrapper.delete_connector(connector_name), secs=1, ) @@ -434,9 +459,10 @@ def test_should_raise_rebalance_in_progress_when_deleting_connector( "Rebalancing in progress while deleting a connector... Retrying..." ) - @patch("httpx.put") - def test_should_create_correct_validate_connector_config_request( - self, mock_put: MagicMock + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.put") + async def test_should_create_correct_validate_connector_config_request( + self, mock_put: AsyncMock ): connector_config = KafkaConnectorConfig( **{ @@ -447,7 +473,7 @@ def test_should_create_correct_validate_connector_config_request( } ) with pytest.raises(KafkaConnectError): - self.connect_wrapper.validate_connector_config(connector_config) + await self.connect_wrapper.validate_connector_config(connector_config) mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", @@ -455,9 +481,10 @@ def test_should_create_correct_validate_connector_config_request( json=connector_config.model_dump(), ) - @patch("httpx.put") - def test_should_create_correct_validate_connector_config_and_name_gets_added( - self, mock_put: MagicMock + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.put") + async def test_should_create_correct_validate_connector_config_and_name_gets_added( + self, mock_put: AsyncMock ): connector_name = "FileStreamSinkConnector" configs = { @@ -467,7 +494,7 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( "topics": "test-topic", } with pytest.raises(KafkaConnectError): - self.connect_wrapper.validate_connector_config( + await self.connect_wrapper.validate_connector_config( KafkaConnectorConfig(**configs) ) @@ -479,11 +506,13 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( ).model_dump(), ) - def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): + @pytest.mark.asyncio() + async def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): with Path( DEFAULTS_PATH / "connect_validation_response.json", ).open() as f: actual_response = json.load(f) + httpx_mock.add_response( method="PUT", url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", @@ -498,7 +527,7 @@ def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): "tasks.max": "1", "topics": "test-topic", } - errors = self.connect_wrapper.validate_connector_config( + errors = await self.connect_wrapper.validate_connector_config( KafkaConnectorConfig(**configs) ) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 901559958..db4611ee8 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -1,7 +1,7 @@ import json import logging from unittest import mock -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from pydantic import AnyHttpUrl, BaseModel, TypeAdapter @@ -57,11 +57,12 @@ def find_class_mock(mocker: MockerFixture) -> MagicMock: @pytest.fixture(autouse=True) -def schema_registry_mock(mocker: MockerFixture) -> MagicMock: - schema_registry_mock = mocker.patch( - "kpops.component_handlers.schema_handler.schema_handler.SchemaRegistryClient" +def schema_registry_mock(mocker: MockerFixture) -> AsyncMock: + schema_registry_mock_constructor = mocker.patch( + "kpops.component_handlers.schema_handler.schema_handler.AsyncSchemaRegistryClient", ) - return schema_registry_mock.return_value + schema_registry_mock_constructor.return_value = AsyncMock() + return schema_registry_mock_constructor.return_value @pytest.fixture() @@ -165,17 +166,18 @@ def test_should_raise_value_error_when_schema_provider_is_called_and_components_ ) -def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( +@pytest.mark.asyncio() +async def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( to_section: ToSection, log_info_mock: MagicMock, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] - schema_handler.submit_schemas(to_section, True) + await schema_handler.submit_schemas(to_section, True) log_info_mock.assert_called_once_with( greenify("Schema Submission: The subject topic-X-value will be submitted.") @@ -183,11 +185,12 @@ def test_should_log_info_when_submit_schemas_that_not_exists_and_dry_run_true( schema_registry_mock.register.assert_not_called() -def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( +@pytest.mark.asyncio() +async def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( topic_config: TopicConfig, to_section: ToSection, log_info_mock: MagicMock, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_handler = SchemaHandler(kpops_config) @@ -196,7 +199,7 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_registry_mock.check_version.return_value = None schema_registry_mock.test_compatibility.return_value = True - schema_handler.submit_schemas(to_section, True) + await schema_handler.submit_schemas(to_section, True) log_info_mock.assert_called_once_with( f"Schema Submission: compatible schema for topic-X-value with model {topic_config.value_schema}." @@ -204,10 +207,11 @@ def test_should_log_info_when_submit_schemas_that_exists_and_dry_run_true( schema_registry_mock.register.assert_not_called() -def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatible_and_dry_run_true( +@pytest.mark.asyncio() +async def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatible_and_dry_run_true( topic_config: TopicConfig, to_section: ToSection, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_provider = TestSchemaProvider() @@ -219,7 +223,7 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl schema_registry_mock.test_compatibility.return_value = False with pytest.raises(Exception, match="Schema is not compatible for") as exception: - schema_handler.submit_schemas(to_section, True) + await schema_handler.submit_schemas(to_section, True) EXPECTED_SCHEMA = { "type": "record", @@ -240,12 +244,13 @@ def test_should_raise_exception_when_submit_schema_that_exists_and_not_compatibl schema_registry_mock.register.assert_not_called() -def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_version_and_dry_run_true( +@pytest.mark.asyncio() +async def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_version_and_dry_run_true( topic_config: TopicConfig, to_section: ToSection, log_info_mock: MagicMock, log_debug_mock: MagicMock, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_provider = TestSchemaProvider() @@ -257,7 +262,7 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve schema_registry_mock.get_versions.return_value = [1] schema_registry_mock.check_version.return_value = registered_version - schema_handler.submit_schemas(to_section, True) + await schema_handler.submit_schemas(to_section, True) assert log_info_mock.mock_calls == [ mock.call( @@ -274,11 +279,12 @@ def test_should_log_debug_when_submit_schema_that_exists_and_registered_under_ve schema_registry_mock.register.assert_not_called() -def test_should_submit_non_existing_schema_when_not_dry( +@pytest.mark.asyncio() +async def test_should_submit_non_existing_schema_when_not_dry( topic_config: TopicConfig, to_section: ToSection, log_info_mock: MagicMock, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_provider = TestSchemaProvider() @@ -288,7 +294,7 @@ def test_should_submit_non_existing_schema_when_not_dry( schema_registry_mock.get_versions.return_value = [] - schema_handler.submit_schemas(to_section, False) + await schema_handler.submit_schemas(to_section, False) subject = "topic-X-value" log_info_mock.assert_called_once_with( @@ -301,17 +307,18 @@ def test_should_submit_non_existing_schema_when_not_dry( ) -def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( +@pytest.mark.asyncio() +async def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( to_section: ToSection, log_info_mock: MagicMock, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] - schema_handler.delete_schemas(to_section, True) + await schema_handler.delete_schemas(to_section, True) log_info_mock.assert_called_once_with( magentaify("Schema Deletion: will delete subject topic-X-value.") @@ -320,16 +327,17 @@ def test_should_log_correct_message_when_delete_schemas_and_in_dry_run( schema_registry_mock.delete_subject.assert_not_called() -def test_should_delete_schemas_when_not_in_dry_run( +@pytest.mark.asyncio() +async def test_should_delete_schemas_when_not_in_dry_run( to_section: ToSection, - schema_registry_mock: MagicMock, + schema_registry_mock: AsyncMock, kpops_config: KpopsConfig, ): schema_handler = SchemaHandler(kpops_config) schema_registry_mock.get_versions.return_value = [] - schema_handler.delete_schemas(to_section, False) + await schema_handler.delete_schemas(to_section, False) schema_registry_mock.delete_subject.assert_called_once_with("topic-X-value") diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index f46c4b87f..3cffa5302 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -1,9 +1,10 @@ import json from pathlib import Path from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest +import pytest_asyncio from pydantic import AnyHttpUrl from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture @@ -30,11 +31,10 @@ def log_info_mock(self, mocker: MockerFixture) -> MagicMock: def log_debug_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.component_handlers.topic.proxy_wrapper.log.debug") - @pytest.fixture(autouse=True) - def _setup(self, httpx_mock: HTTPXMock): + @pytest_asyncio.fixture(autouse=True) + async def _setup(self, httpx_mock: HTTPXMock): config = KpopsConfig(defaults_path=DEFAULTS_PATH) self.proxy_wrapper = ProxyWrapper(config.kafka_rest) - with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses" / "cluster-info.json", ).open() as f: @@ -49,9 +49,10 @@ def _setup(self, httpx_mock: HTTPXMock): assert self.proxy_wrapper.url == AnyHttpUrl(DEFAULT_HOST) assert self.proxy_wrapper.cluster_id == "cluster-1" - @patch("httpx.post") - def test_should_create_topic_with_all_topic_configuration( - self, mock_post: MagicMock + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.post") + async def test_should_create_topic_with_all_topic_configuration( + self, mock_post: AsyncMock ): topic_spec = { "topic_name": "topic-X", @@ -64,7 +65,7 @@ def test_should_create_topic_with_all_topic_configuration( } with pytest.raises(KafkaRestProxyError): - self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) + await self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) mock_post.assert_called_with( url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", @@ -72,12 +73,15 @@ def test_should_create_topic_with_all_topic_configuration( json=topic_spec, ) - @patch("httpx.post") - def test_should_create_topic_with_no_configuration(self, mock_post: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.post") + async def test_should_create_topic_with_no_configuration( + self, mock_post: AsyncMock + ): topic_spec: dict[str, Any] = {"topic_name": "topic-X"} with pytest.raises(KafkaRestProxyError): - self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) + await self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) mock_post.assert_called_with( url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics", @@ -85,24 +89,26 @@ def test_should_create_topic_with_no_configuration(self, mock_post: MagicMock): json=topic_spec, ) - @patch("httpx.get") - def test_should_call_get_topic(self, mock_get: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.get") + async def test_should_call_get_topic(self, mock_get: AsyncMock): topic_name = "topic-X" with pytest.raises(KafkaRestProxyError): - self.proxy_wrapper.get_topic(topic_name=topic_name) + await self.proxy_wrapper.get_topic(topic_name=topic_name) mock_get.assert_called_with( url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", headers=HEADERS, ) - @patch("httpx.post") - def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.post") + async def test_should_call_batch_alter_topic_config(self, mock_put: AsyncMock): topic_name = "topic-X" with pytest.raises(KafkaRestProxyError): - self.proxy_wrapper.batch_alter_topic_config( + await self.proxy_wrapper.batch_alter_topic_config( topic_name=topic_name, json_body=[ {"name": "cleanup.policy", "operation": "DELETE"}, @@ -121,29 +127,32 @@ def test_should_call_batch_alter_topic_config(self, mock_put: MagicMock): }, ) - @patch("httpx.delete") - def test_should_call_delete_topic(self, mock_delete: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.delete") + async def test_should_call_delete_topic(self, mock_delete: AsyncMock): topic_name = "topic-X" with pytest.raises(KafkaRestProxyError): - self.proxy_wrapper.delete_topic(topic_name=topic_name) + await self.proxy_wrapper.delete_topic(topic_name=topic_name) mock_delete.assert_called_with( url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/topics/{topic_name}", headers=HEADERS, ) - @patch("httpx.get") - def test_should_call_get_broker_config(self, mock_get: MagicMock): + @pytest.mark.asyncio() + @patch("httpx.AsyncClient.get") + async def test_should_call_get_broker_config(self, mock_get: AsyncMock): with pytest.raises(KafkaRestProxyError): - self.proxy_wrapper.get_broker_config() + await self.proxy_wrapper.get_broker_config() mock_get.assert_called_with( url=f"{DEFAULT_HOST}/v3/clusters/{self.proxy_wrapper.cluster_id}/brokers/-/configs", headers=HEADERS, ) - def test_should_log_topic_creation( + @pytest.mark.asyncio() + async def test_should_log_topic_creation( self, log_info_mock: MagicMock, httpx_mock: HTTPXMock ): topic_spec = { @@ -163,10 +172,11 @@ def test_should_log_topic_creation( headers=HEADERS, status_code=201, ) - self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) + await self.proxy_wrapper.create_topic(topic_spec=TopicSpec(**topic_spec)) log_info_mock.assert_called_once_with("Topic topic-X created.") - def test_should_log_topic_deletion( + @pytest.mark.asyncio() + async def test_should_log_topic_deletion( self, log_info_mock: MagicMock, httpx_mock: HTTPXMock ): topic_name = "topic-X" @@ -177,10 +187,13 @@ def test_should_log_topic_deletion( headers=HEADERS, status_code=204, ) - self.proxy_wrapper.delete_topic(topic_name=topic_name) + await self.proxy_wrapper.delete_topic(topic_name=topic_name) log_info_mock.assert_called_once_with("Topic topic-X deleted.") - def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock): + @pytest.mark.asyncio() + async def test_should_get_topic( + self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock + ): res = { "kind": "KafkaTopic", "metadata": { @@ -208,12 +221,13 @@ def test_should_get_topic(self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock json=res, ) - get_topic_response = self.proxy_wrapper.get_topic(topic_name=topic_name) + get_topic_response = await self.proxy_wrapper.get_topic(topic_name=topic_name) log_debug_mock.assert_any_call("Topic topic-X found.") assert get_topic_response == topic_response - def test_should_rais_topic_not_found_exception_get_topic( + @pytest.mark.asyncio() + async def test_should_rais_topic_not_found_exception_get_topic( self, log_debug_mock: MagicMock, httpx_mock: HTTPXMock ): topic_name = "topic-X" @@ -229,10 +243,11 @@ def test_should_rais_topic_not_found_exception_get_topic( }, ) with pytest.raises(TopicNotFoundException): - self.proxy_wrapper.get_topic(topic_name=topic_name) + await self.proxy_wrapper.get_topic(topic_name=topic_name) log_debug_mock.assert_any_call("Topic topic-X not found.") - def test_should_log_reset_default_topic_config_when_deleted( + @pytest.mark.asyncio() + async def test_should_log_reset_default_topic_config_when_deleted( self, log_info_mock: MagicMock, httpx_mock: HTTPXMock ): topic_name = "topic-X" @@ -246,7 +261,7 @@ def test_should_log_reset_default_topic_config_when_deleted( status_code=204, ) - self.proxy_wrapper.batch_alter_topic_config( + await self.proxy_wrapper.batch_alter_topic_config( topic_name=topic_name, json_body=[{"name": config_name, "operation": "DELETE"}], ) diff --git a/tests/component_handlers/topic/test_topic_handler.py b/tests/component_handlers/topic/test_topic_handler.py index 6b1b017fc..7286932c0 100644 --- a/tests/component_handlers/topic/test_topic_handler.py +++ b/tests/component_handlers/topic/test_topic_handler.py @@ -2,9 +2,10 @@ import logging from pathlib import Path from unittest import mock -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest +import pytest_asyncio from pytest_mock import MockerFixture from kpops.component_handlers.topic.exception import ( @@ -49,8 +50,8 @@ def log_warning_mock(self, mocker: MockerFixture) -> MagicMock: def log_error_mock(self, mocker: MockerFixture) -> MagicMock: return mocker.patch("kpops.component_handlers.topic.handler.log.error") - @pytest.fixture(autouse=True) - def get_topic_response_mock(self) -> MagicMock: + @pytest_asyncio.fixture(autouse=True) + async def get_topic_response_mock(self) -> MagicMock: with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/get_topic_response.json", ).open() as f: @@ -66,7 +67,7 @@ def get_topic_response_mock(self) -> MagicMock: ).open() as f: response_topic_config = json.load(f) - wrapper = MagicMock() + wrapper = AsyncMock() wrapper.get_topic.return_value = TopicResponse(**response) wrapper.get_broker_config.return_value = BrokerConfigResponse(**broker_response) wrapper.get_topic_config.return_value = TopicConfigResponse( @@ -74,8 +75,8 @@ def get_topic_response_mock(self) -> MagicMock: ) return wrapper - @pytest.fixture(autouse=True) - def get_default_topic_response_mock(self) -> MagicMock: + @pytest_asyncio.fixture(autouse=True) + async def get_default_topic_response_mock(self) -> MagicMock: with Path( DEFAULTS_PATH / "kafka_rest_proxy_responses/get_default_topic_response.json", @@ -87,13 +88,14 @@ def get_default_topic_response_mock(self) -> MagicMock: ).open() as f: broker_response = json.load(f) - wrapper = MagicMock() + wrapper = AsyncMock() wrapper.get_topic.return_value = TopicResponse(**response) wrapper.get_broker_config.return_value = BrokerConfigResponse(**broker_response) return wrapper - def test_should_call_create_topic_with_dry_run_false(self): - wrapper = MagicMock() + @pytest.mark.asyncio() + async def test_should_call_create_topic_with_dry_run_false(self): + wrapper = AsyncMock() wrapper.get_topic.side_effect = TopicNotFoundException() topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -105,7 +107,7 @@ def test_should_call_create_topic_with_dry_run_false(self): ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=False) + await topic_handler.create_topics(to_section=to_section, dry_run=False) topic_spec = { "topic_name": "topic-X", @@ -120,7 +122,8 @@ def test_should_call_create_topic_with_dry_run_false(self): wrapper.create_topic.assert_called_once_with(topic_spec=TopicSpec(**topic_spec)) wrapper.__dry_run_topic_creation.assert_not_called() - def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_false( + @pytest.mark.asyncio() + async def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_false( self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -134,7 +137,7 @@ def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_fals ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=False) + await topic_handler.create_topics(to_section=to_section, dry_run=False) wrapper.batch_alter_topic_config.assert_called_once_with( topic_name="topic-X", @@ -146,7 +149,8 @@ def test_should_call_update_topic_config_when_topic_exists_and_with_dry_run_fals ) wrapper.__dry_run_topic_creation.assert_not_called() - def test_should_update_topic_config_when_one_config_changed( + @pytest.mark.asyncio() + async def test_should_update_topic_config_when_one_config_changed( self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -161,14 +165,15 @@ def test_should_update_topic_config_when_one_config_changed( ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=False) + await topic_handler.create_topics(to_section=to_section, dry_run=False) wrapper.batch_alter_topic_config.assert_called_once_with( topic_name="topic-X", json_body=[{"name": "cleanup.policy", "value": "delete"}], ) - def test_should_not_update_topic_config_when_config_not_changed( + @pytest.mark.asyncio() + async def test_should_not_update_topic_config_when_config_not_changed( self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -183,14 +188,15 @@ def test_should_not_update_topic_config_when_config_not_changed( ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=False) + await topic_handler.create_topics(to_section=to_section, dry_run=False) wrapper.batch_alter_topic_config.assert_not_called() log_info_mock.assert_called_once_with( "Topic Creation: config of topic topic-X didn't change. Skipping update." ) - def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( + @pytest.mark.asyncio() + async def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -204,14 +210,15 @@ def test_should_not_update_topic_config_when_config_not_changed_and_not_ordered( ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=False) + await topic_handler.create_topics(to_section=to_section, dry_run=False) wrapper.batch_alter_topic_config.assert_not_called() log_info_mock.assert_called_once_with( "Topic Creation: config of topic topic-X didn't change. Skipping update." ) - def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topic_configs_change( + @pytest.mark.asyncio() + async def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topic_configs_change( self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -226,7 +233,7 @@ def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topi ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=False) + await topic_handler.create_topics(to_section=to_section, dry_run=False) wrapper.batch_alter_topic_config.assert_called_once_with( topic_name="topic-X", @@ -234,7 +241,10 @@ def test_should_call_reset_topic_config_when_topic_exists_dry_run_false_and_topi ) wrapper.__dry_run_topic_creation.assert_not_called() - def test_should_not_call_create_topics_with_dry_run_true_and_topic_not_exists(self): + @pytest.mark.asyncio() + async def test_should_not_call_create_topics_with_dry_run_true_and_topic_not_exists( + self, + ): wrapper = MagicMock() wrapper.get_topic.side_effect = TopicNotFoundException() topic_handler = TopicHandler(proxy_wrapper=wrapper) @@ -247,11 +257,12 @@ def test_should_not_call_create_topics_with_dry_run_true_and_topic_not_exists(se ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=True) + await topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.create_topic.assert_not_called() - def test_should_print_message_with_dry_run_true_and_topic_not_exists( + @pytest.mark.asyncio() + async def test_should_print_message_with_dry_run_true_and_topic_not_exists( self, log_info_mock: MagicMock ): wrapper = MagicMock() @@ -268,7 +279,7 @@ def test_should_print_message_with_dry_run_true_and_topic_not_exists( ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=True) + await topic_handler.create_topics(to_section=to_section, dry_run=True) log_info_mock.assert_called_once_with( greenify( @@ -276,7 +287,8 @@ def test_should_print_message_with_dry_run_true_and_topic_not_exists( ) ) - def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_count_and_replication_factor( + @pytest.mark.asyncio() + async def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_count_and_replication_factor( self, log_info_mock: MagicMock, log_debug_mock: MagicMock, @@ -293,7 +305,7 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_co ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=True) + await topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff assert log_info_mock.mock_calls == [ mock.call("Topic Creation: topic-X already exists in cluster.") @@ -312,7 +324,8 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_same_partition_co ), ] - def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition_count_and_replication_factor( + @pytest.mark.asyncio() + async def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition_count_and_replication_factor( self, log_info_mock: MagicMock, log_debug_mock: MagicMock, @@ -327,7 +340,7 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.create_topics(to_section=to_section, dry_run=True) + await topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff assert log_info_mock.mock_calls == [ mock.call("Config changes for topic topic-X:"), @@ -350,7 +363,8 @@ def test_should_print_message_if_dry_run_and_topic_exists_with_default_partition ), ] - def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( + @pytest.mark.asyncio() + async def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -369,10 +383,11 @@ def test_should_exit_if_dry_run_and_topic_exists_different_partition_count( TopicTransactionError, match="Topic Creation: partition count of topic topic-X changed! Partitions count of topic topic-X is 10. The given partitions count 200.", ): - topic_handler.create_topics(to_section=to_section, dry_run=True) + await topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff - def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( + @pytest.mark.asyncio() + async def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( self, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -391,10 +406,11 @@ def test_should_exit_if_dry_run_and_topic_exists_different_replication_factor( TopicTransactionError, match="Topic Creation: replication factor of topic topic-X changed! Replication factor of topic topic-X is 3. The given replication count 300.", ): - topic_handler.create_topics(to_section=to_section, dry_run=True) + await topic_handler.create_topics(to_section=to_section, dry_run=True) wrapper.get_topic_config.assert_called_once() # dry run requests the config to create the diff - def test_should_log_correct_message_when_delete_existing_topic_dry_run( + @pytest.mark.asyncio() + async def test_should_log_correct_message_when_delete_existing_topic_dry_run( self, log_info_mock: MagicMock, get_topic_response_mock: MagicMock ): wrapper = get_topic_response_mock @@ -409,7 +425,7 @@ def test_should_log_correct_message_when_delete_existing_topic_dry_run( ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.delete_topics(to_section, True) + await topic_handler.delete_topics(to_section, True) wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_info_mock.assert_called_once_with( @@ -418,7 +434,8 @@ def test_should_log_correct_message_when_delete_existing_topic_dry_run( ) ) - def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( + @pytest.mark.asyncio() + async def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( self, log_warning_mock: MagicMock ): wrapper = MagicMock() @@ -434,15 +451,16 @@ def test_should_log_correct_message_when_delete_non_existing_topic_dry_run( ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.delete_topics(to_section, True) + await topic_handler.delete_topics(to_section, True) wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_warning_mock.assert_called_once_with( "Topic Deletion: topic topic-X does not exist in the cluster and cannot be deleted. Skipping." ) - def test_should_call_delete_topic_not_dry_run(self): - wrapper = MagicMock() + @pytest.mark.asyncio() + async def test_should_call_delete_topic_not_dry_run(self): + wrapper = AsyncMock() topic_handler = TopicHandler(proxy_wrapper=wrapper) topic_config = TopicConfig( @@ -453,14 +471,15 @@ def test_should_call_delete_topic_not_dry_run(self): ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.delete_topics(to_section, False) + await topic_handler.delete_topics(to_section, False) assert wrapper.mock_calls == [ mock.call.get_topic(topic_name="topic-X"), mock.call.delete_topic(topic_name="topic-X"), ] - def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_not_dry_run( + @pytest.mark.asyncio() + async def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_not_dry_run( self, log_warning_mock: MagicMock ): wrapper = MagicMock() @@ -475,7 +494,7 @@ def test_should_print_correct_warning_when_deleting_topic_that_does_not_exists_n configs={"cleanup.policy": "compact", "compression.type": "gzip"}, ) to_section = ToSection(topics={TopicName("topic-X"): topic_config}) - topic_handler.delete_topics(to_section, False) + await topic_handler.delete_topics(to_section, False) wrapper.get_topic.assert_called_once_with(topic_name="topic-X") log_warning_mock.assert_called_once_with( diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index f01f30d10..30752190f 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -1,5 +1,5 @@ from pathlib import Path -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from pytest_mock import MockerFixture @@ -30,15 +30,16 @@ def config(self) -> KpopsConfig: @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( - schema_handler=MagicMock(), - connector_handler=MagicMock(), - topic_handler=MagicMock(), + schema_handler=AsyncMock(), + connector_handler=AsyncMock(), + topic_handler=AsyncMock(), ) @pytest.fixture() def helm_mock(self, mocker: MockerFixture) -> MagicMock: + async_mock = AsyncMock() return mocker.patch( - "kpops.components.base_components.helm_app.Helm" + "kpops.components.base_components.helm_app.Helm", return_value=async_mock ).return_value @pytest.fixture() @@ -70,7 +71,8 @@ def helm_app( repo_config=repo_config, ) - def test_should_lazy_load_helm_wrapper_and_not_repo_add( + @pytest.mark.asyncio() + async def test_should_lazy_load_helm_wrapper_and_not_repo_add( self, helm_app: HelmApp, mocker: MockerFixture, @@ -85,7 +87,7 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( new_callable=mocker.PropertyMock, ) - helm_app.deploy(False) + await helm_app.deploy(False) helm_mock.upgrade_install.assert_called_once_with( "${pipeline.name}-test-helm-app", @@ -99,7 +101,8 @@ def test_should_lazy_load_helm_wrapper_and_not_repo_add( HelmUpgradeInstallFlags(), ) - def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( + @pytest.mark.asyncio() + async def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -127,7 +130,7 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( new_callable=mocker.PropertyMock, ) - helm_app.deploy(dry_run=False) + await helm_app.deploy(dry_run=False) assert helm_mock.mock_calls == [ mocker.call.add_repo( @@ -148,7 +151,8 @@ def test_should_lazy_load_helm_wrapper_and_call_repo_add_when_implemented( ), ] - def test_should_deploy_app_with_local_helm_chart( + @pytest.mark.asyncio() + async def test_should_deploy_app_with_local_helm_chart( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -171,7 +175,7 @@ def helm_chart(self) -> str: namespace="test-namespace", ) - app_with_local_chart.deploy(dry_run=False) + await app_with_local_chart.deploy(dry_run=False) helm_mock.add_repo.assert_not_called() @@ -187,20 +191,22 @@ def helm_chart(self) -> str: HelmUpgradeInstallFlags(), ) - def test_should_raise_not_implemented_error_when_helm_chart_is_not_set( + @pytest.mark.asyncio() + async def test_should_raise_not_implemented_error_when_helm_chart_is_not_set( self, helm_app: HelmApp, helm_mock: MagicMock, ): with pytest.raises(NotImplementedError) as error: - helm_app.deploy(True) + await helm_app.deploy(True) helm_mock.add_repo.assert_called() assert ( str(error.value) == "Please implement the helm_chart property of the kpops.components.base_components.helm_app module." ) - def test_should_call_helm_uninstall_when_destroying_helm_app( + @pytest.mark.asyncio() + async def test_should_call_helm_uninstall_when_destroying_helm_app( self, helm_app: HelmApp, helm_mock: MagicMock, @@ -209,7 +215,7 @@ def test_should_call_helm_uninstall_when_destroying_helm_app( stdout = 'HelmApp - release "test-helm-app" uninstalled' helm_mock.uninstall.return_value = stdout - helm_app.destroy(True) + await helm_app.destroy(True) helm_mock.uninstall.assert_called_once_with( "test-namespace", "${pipeline.name}-test-helm-app", True diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 16d178f02..e0170d6b6 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -1,6 +1,6 @@ import re from pathlib import Path -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from pytest_mock import MockerFixture @@ -38,15 +38,17 @@ def config(self) -> KpopsConfig: @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( - schema_handler=MagicMock(), - connector_handler=MagicMock(), - topic_handler=MagicMock(), + schema_handler=AsyncMock(), + connector_handler=AsyncMock(), + topic_handler=AsyncMock(), ) @pytest.fixture(autouse=True) def helm_mock(self, mocker: MockerFixture) -> MagicMock: + async_mock = AsyncMock() return mocker.patch( - "kpops.components.base_components.helm_app.Helm" + "kpops.components.base_components.helm_app.Helm", + return_value=async_mock, ).return_value @pytest.fixture() diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index ef4f7caa3..381e70a62 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -148,7 +148,8 @@ def test_from_section_parsing_input_pattern( ) assert getattr(connector.app, "topics.regex") == topic_pattern - def test_deploy_order( + @pytest.mark.asyncio() + async def test_deploy_order( self, connector: KafkaSinkConnector, mocker: MockerFixture, @@ -160,16 +161,17 @@ def test_deploy_order( connector.handlers.connector_handler, "create_connector" ) - mock = mocker.MagicMock() + mock = mocker.AsyncMock() mock.attach_mock(mock_create_topics, "mock_create_topics") mock.attach_mock(mock_create_connector, "mock_create_connector") - connector.deploy(dry_run=True) + await connector.deploy(dry_run=True) assert mock.mock_calls == [ mocker.call.mock_create_topics(to_section=connector.to, dry_run=True), mocker.call.mock_create_connector(connector.app, dry_run=True), ] - def test_destroy( + @pytest.mark.asyncio() + async def test_destroy( self, connector: KafkaSinkConnector, mocker: MockerFixture, @@ -178,23 +180,25 @@ def test_destroy( connector.handlers.connector_handler, "destroy_connector" ) - connector.destroy(dry_run=True) + await connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( CONNECTOR_FULL_NAME, dry_run=True ) - def test_reset_when_dry_run_is_true( + @pytest.mark.asyncio() + async def test_reset_when_dry_run_is_true( self, connector: KafkaSinkConnector, dry_run_handler_mock: MagicMock, ): dry_run = True - connector.reset(dry_run=dry_run) + await connector.reset(dry_run=dry_run) dry_run_handler_mock.print_helm_diff.assert_called_once() - def test_reset_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_reset_when_dry_run_is_false( self, connector: KafkaSinkConnector, dry_run_handler_mock: MagicMock, @@ -214,7 +218,8 @@ def test_reset_when_dry_run_is_false( mock.attach_mock(helm_mock, "helm") dry_run = False - connector.reset(dry_run=dry_run) + + await connector.reset(dry_run=dry_run) mock_resetter_reset.assert_called_once_with(dry_run) mock.assert_has_calls( @@ -264,16 +269,19 @@ def test_reset_when_dry_run_is_false( dry_run_handler_mock.print_helm_diff.assert_not_called() mock_delete_topics.assert_not_called() - def test_clean_when_dry_run_is_true( + @pytest.mark.asyncio() + async def test_clean_when_dry_run_is_true( self, connector: KafkaSinkConnector, dry_run_handler_mock: MagicMock, ): dry_run = True - connector.clean(dry_run=dry_run) + + await connector.clean(dry_run=dry_run) dry_run_handler_mock.print_helm_diff.assert_called_once() - def test_clean_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_clean_when_dry_run_is_false( self, connector: KafkaSinkConnector, helm_mock: MagicMock, @@ -294,7 +302,7 @@ def test_clean_when_dry_run_is_false( mock.attach_mock(helm_mock, "helm") dry_run = False - connector.clean(dry_run=dry_run) + await connector.clean(dry_run=dry_run) assert log_info_mock.mock_calls == [ call.log_info( @@ -354,7 +362,8 @@ def test_clean_when_dry_run_is_false( ] dry_run_handler_mock.print_helm_diff.assert_not_called() - def test_clean_without_to_when_dry_run_is_true( + @pytest.mark.asyncio() + async def test_clean_without_to_when_dry_run_is_true( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -370,10 +379,12 @@ def test_clean_without_to_when_dry_run_is_true( ) dry_run = True - connector.clean(dry_run) + + await connector.clean(dry_run) dry_run_handler_mock.print_helm_diff.assert_called_once() - def test_clean_without_to_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_clean_without_to_when_dry_run_is_false( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -402,7 +413,7 @@ def test_clean_without_to_when_dry_run_is_false( mock.attach_mock(helm_mock, "helm") dry_run = False - connector.clean(dry_run) + await connector.clean(dry_run) assert mock.mock_calls == [ mocker.call.helm.add_repo( diff --git a/tests/components/test_kafka_source_connector.py b/tests/components/test_kafka_source_connector.py index 31511e81f..a8b1911dc 100644 --- a/tests/components/test_kafka_source_connector.py +++ b/tests/components/test_kafka_source_connector.py @@ -95,7 +95,8 @@ def test_from_section_raises_exception( ), ) - def test_deploy_order( + @pytest.mark.asyncio() + async def test_deploy_order( self, connector: KafkaSourceConnector, mocker: MockerFixture, @@ -108,16 +109,17 @@ def test_deploy_order( connector.handlers.connector_handler, "create_connector" ) - mock = mocker.MagicMock() + mock = mocker.AsyncMock() mock.attach_mock(mock_create_topics, "mock_create_topics") mock.attach_mock(mock_create_connector, "mock_create_connector") - connector.deploy(dry_run=True) + await connector.deploy(dry_run=True) assert mock.mock_calls == [ mocker.call.mock_create_topics(to_section=connector.to, dry_run=True), mocker.call.mock_create_connector(connector.app, dry_run=True), ] - def test_destroy( + @pytest.mark.asyncio() + async def test_destroy( self, connector: KafkaSourceConnector, mocker: MockerFixture, @@ -129,24 +131,26 @@ def test_destroy( connector.handlers.connector_handler, "destroy_connector" ) - connector.destroy(dry_run=True) + await connector.destroy(dry_run=True) mock_destroy_connector.assert_called_once_with( CONNECTOR_FULL_NAME, dry_run=True ) - def test_reset_when_dry_run_is_true( + @pytest.mark.asyncio() + async def test_reset_when_dry_run_is_true( self, connector: KafkaSourceConnector, dry_run_handler_mock: MagicMock, ): assert connector.handlers.connector_handler - connector.reset(dry_run=True) + await connector.reset(dry_run=True) dry_run_handler_mock.print_helm_diff.assert_called_once() - def test_reset_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_reset_when_dry_run_is_false( self, connector: KafkaSourceConnector, dry_run_handler_mock: MagicMock, @@ -165,7 +169,7 @@ def test_reset_when_dry_run_is_false( mock.attach_mock(mock_clean_connector, "mock_clean_connector") mock.attach_mock(helm_mock, "helm") - connector.reset(dry_run=False) + await connector.reset(dry_run=False) assert mock.mock_calls == [ mocker.call.helm.add_repo( @@ -211,18 +215,20 @@ def test_reset_when_dry_run_is_false( mock_delete_topics.assert_not_called() dry_run_handler_mock.print_helm_diff.assert_not_called() - def test_clean_when_dry_run_is_true( + @pytest.mark.asyncio() + async def test_clean_when_dry_run_is_true( self, connector: KafkaSourceConnector, dry_run_handler_mock: MagicMock, ): assert connector.handlers.connector_handler - connector.clean(dry_run=True) + await connector.clean(dry_run=True) dry_run_handler_mock.print_helm_diff.assert_called_once() - def test_clean_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_clean_when_dry_run_is_false( self, connector: KafkaSourceConnector, helm_mock: MagicMock, @@ -244,7 +250,7 @@ def test_clean_when_dry_run_is_false( mock.attach_mock(helm_mock, "helm") dry_run = False - connector.clean(dry_run) + await connector.clean(dry_run) assert mock.mock_calls == [ mocker.call.mock_delete_topics(connector.to, dry_run=dry_run), @@ -291,7 +297,8 @@ def test_clean_when_dry_run_is_false( dry_run_handler_mock.print_helm_diff.assert_not_called() - def test_clean_without_to_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_clean_without_to_when_dry_run_is_false( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -325,7 +332,7 @@ def test_clean_without_to_when_dry_run_is_false( mock.attach_mock(helm_mock, "helm") dry_run = False - connector.clean(dry_run) + await connector.clean(dry_run) assert mock.mock_calls == [ mocker.call.helm.add_repo( @@ -372,7 +379,8 @@ def test_clean_without_to_when_dry_run_is_false( mock_delete_topics.assert_not_called() dry_run_handler_mock.print_helm_diff.assert_not_called() - def test_clean_without_to_when_dry_run_is_true( + @pytest.mark.asyncio() + async def test_clean_without_to_when_dry_run_is_true( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -391,6 +399,6 @@ def test_clean_without_to_when_dry_run_is_true( assert connector.handlers.connector_handler - connector.clean(dry_run=True) + await connector.clean(dry_run=True) dry_run_handler_mock.print_helm_diff.assert_called_once() diff --git a/tests/components/test_kubernetes_app.py b/tests/components/test_kubernetes_app.py index c949f9832..16373d7d5 100644 --- a/tests/components/test_kubernetes_app.py +++ b/tests/components/test_kubernetes_app.py @@ -1,5 +1,5 @@ from pathlib import Path -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest from pytest_mock import MockerFixture @@ -32,9 +32,9 @@ def config(self) -> KpopsConfig: @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( - schema_handler=MagicMock(), - connector_handler=MagicMock(), - topic_handler=MagicMock(), + schema_handler=AsyncMock(), + connector_handler=AsyncMock(), + topic_handler=AsyncMock(), ) @pytest.fixture() diff --git a/tests/components/test_producer_app.py b/tests/components/test_producer_app.py index e143e3f74..9109f28a8 100644 --- a/tests/components/test_producer_app.py +++ b/tests/components/test_producer_app.py @@ -1,6 +1,6 @@ import logging from pathlib import Path -from unittest.mock import ANY, MagicMock +from unittest.mock import ANY, AsyncMock import pytest from pytest_mock import MockerFixture @@ -33,9 +33,9 @@ def test_release_name(self): @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( - schema_handler=MagicMock(), - connector_handler=MagicMock(), - topic_handler=MagicMock(), + schema_handler=AsyncMock(), + connector_handler=AsyncMock(), + topic_handler=AsyncMock(), ) @pytest.fixture() @@ -103,7 +103,8 @@ def test_output_topics(self, config: KpopsConfig, handlers: ComponentHandlers): "first-extra-topic": "extra-topic-1" } - def test_deploy_order_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_deploy_order_when_dry_run_is_false( self, producer_app: ProducerApp, mocker: MockerFixture, @@ -116,11 +117,11 @@ def test_deploy_order_when_dry_run_is_false( producer_app.helm, "upgrade_install" ) - mock = mocker.MagicMock() + mock = mocker.AsyncMock() mock.attach_mock(mock_create_topics, "mock_create_topics") mock.attach_mock(mock_helm_upgrade_install, "mock_helm_upgrade_install") - producer_app.deploy(dry_run=False) + await producer_app.deploy(dry_run=False) assert mock.mock_calls == [ mocker.call.mock_create_topics(to_section=producer_app.to, dry_run=False), mocker.call.mock_helm_upgrade_install( @@ -149,20 +150,22 @@ def test_deploy_order_when_dry_run_is_false( ), ] - def test_destroy( + @pytest.mark.asyncio() + async def test_destroy( self, producer_app: ProducerApp, mocker: MockerFixture, ): mock_helm_uninstall = mocker.patch.object(producer_app.helm, "uninstall") - producer_app.destroy(dry_run=True) + await producer_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( "test-namespace", PRODUCER_APP_RELEASE_NAME, True ) - def test_should_not_reset_producer_app( + @pytest.mark.asyncio() + async def test_should_not_reset_producer_app( self, producer_app: ProducerApp, mocker: MockerFixture, @@ -182,7 +185,7 @@ def test_should_not_reset_producer_app( mock.attach_mock(mock_helm_uninstall, "helm_uninstall") mock.attach_mock(mock_helm_print_helm_diff, "print_helm_diff") - producer_app.clean(dry_run=True) + await producer_app.clean(dry_run=True) mock.assert_has_calls( [ @@ -224,7 +227,8 @@ def test_should_not_reset_producer_app( ] ) - def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( + @pytest.mark.asyncio() + async def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_with_dry_run_false( self, mocker: MockerFixture, producer_app: ProducerApp ): mock_helm_upgrade_install = mocker.patch.object( @@ -238,7 +242,7 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w mock.attach_mock(mock_helm_upgrade_install, "helm_upgrade_install") mock.attach_mock(mock_helm_uninstall, "helm_uninstall") - producer_app.clean(dry_run=False) + await producer_app.clean(dry_run=False) mock.assert_has_calls( [ @@ -274,3 +278,39 @@ def test_should_clean_producer_app_and_deploy_clean_up_job_and_delete_clean_up_w ANY, # __str__ ] ) + + def test_get_output_topics( + self, + config: KpopsConfig, + handlers: ComponentHandlers, + ): + producer_app = ProducerApp( + name="my-producer", + config=config, + handlers=handlers, + **{ + "namespace": "test-namespace", + "app": { + "namespace": "test-namespace", + "streams": {"brokers": "fake-broker:9092"}, + }, + "to": { + "topics": { + "${output_topic_name}": TopicConfig( + type=OutputTopicTypes.OUTPUT, partitions_count=10 + ), + "extra-topic-1": TopicConfig( + role="first-extra-topic", + partitions_count=10, + ), + } + }, + }, + ) + assert producer_app.output_topic == "${output_topic_name}" + assert producer_app.extra_output_topics == { + "first-extra-topic": "extra-topic-1" + } + assert producer_app.input_topics == [] + assert list(producer_app.inputs) == [] + assert list(producer_app.outputs) == ["${output_topic_name}", "extra-topic-1"] diff --git a/tests/components/test_streams_app.py b/tests/components/test_streams_app.py index e76973773..307c2123e 100644 --- a/tests/components/test_streams_app.py +++ b/tests/components/test_streams_app.py @@ -1,5 +1,5 @@ from pathlib import Path -from unittest.mock import ANY, MagicMock +from unittest.mock import ANY, AsyncMock import pytest from pytest_mock import MockerFixture @@ -38,9 +38,9 @@ def test_release_name(self): @pytest.fixture() def handlers(self) -> ComponentHandlers: return ComponentHandlers( - schema_handler=MagicMock(), - connector_handler=MagicMock(), - topic_handler=MagicMock(), + schema_handler=AsyncMock(), + connector_handler=AsyncMock(), + topic_handler=AsyncMock(), ) @pytest.fixture() @@ -276,7 +276,8 @@ def test_weave_inputs_from_prev_component( assert streams_app.app.streams.input_topics == ["prev-output-topic", "b", "a"] - def test_deploy_order_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_deploy_order_when_dry_run_is_false( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -318,12 +319,12 @@ def test_deploy_order_when_dry_run_is_false( streams_app.helm, "upgrade_install" ) - mock = mocker.MagicMock() + mock = mocker.AsyncMock() mock.attach_mock(mock_create_topics, "mock_create_topics") mock.attach_mock(mock_helm_upgrade_install, "mock_helm_upgrade_install") dry_run = False - streams_app.deploy(dry_run=dry_run) + await streams_app.deploy(dry_run=dry_run) assert mock.mock_calls == [ mocker.call.mock_create_topics(to_section=streams_app.to, dry_run=dry_run), @@ -359,16 +360,18 @@ def test_deploy_order_when_dry_run_is_false( ), ] - def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): + @pytest.mark.asyncio() + async def test_destroy(self, streams_app: StreamsApp, mocker: MockerFixture): mock_helm_uninstall = mocker.patch.object(streams_app.helm, "uninstall") - streams_app.destroy(dry_run=True) + await streams_app.destroy(dry_run=True) mock_helm_uninstall.assert_called_once_with( "test-namespace", STREAMS_APP_RELEASE_NAME, True ) - def test_reset_when_dry_run_is_false( + @pytest.mark.asyncio() + async def test_reset_when_dry_run_is_false( self, streams_app: StreamsApp, mocker: MockerFixture ): cleaner = streams_app._cleaner @@ -382,7 +385,7 @@ def test_reset_when_dry_run_is_false( mock.attach_mock(mock_helm_uninstall, "helm_uninstall") dry_run = False - streams_app.reset(dry_run=dry_run) + await streams_app.reset(dry_run=dry_run) mock.assert_has_calls( [ @@ -420,7 +423,8 @@ def test_reset_when_dry_run_is_false( ] ) - def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( + @pytest.mark.asyncio() + async def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( self, streams_app: StreamsApp, mocker: MockerFixture, @@ -437,7 +441,7 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( mock.attach_mock(mock_helm_uninstall, "helm_uninstall") dry_run = False - streams_app.clean(dry_run=dry_run) + await streams_app.clean(dry_run=dry_run) mock.assert_has_calls( [ @@ -474,3 +478,57 @@ def test_should_clean_streams_app_and_deploy_clean_up_job_and_delete_clean_up( ANY, # __str__ ] ) + + @pytest.mark.asyncio() + async def test_get_input_output_topics( + self, config: KpopsConfig, handlers: ComponentHandlers + ): + streams_app = StreamsApp( + name="my-app", + config=config, + handlers=handlers, + **{ + "namespace": "test-namespace", + "app": { + "streams": {"brokers": "fake-broker:9092"}, + }, + "from": { + "topics": { + "example-input": {"type": "input"}, + "b": {"type": "input"}, + "a": {"type": "input"}, + "topic-extra2": {"role": "role2"}, + "topic-extra3": {"role": "role2"}, + "topic-extra": {"role": "role1"}, + ".*": {"type": "pattern"}, + "example.*": { + "type": "pattern", + "role": "another-pattern", + }, + } + }, + "to": { + "topics": { + "example-output": {"type": "output"}, + "extra-topic": {"role": "fake-role"}, + } + }, + }, + ) + + assert streams_app.input_topics == ["example-input", "b", "a"] + assert streams_app.extra_input_topics == { + "role1": ["topic-extra"], + "role2": ["topic-extra2", "topic-extra3"], + } + assert streams_app.output_topic == "example-output" + assert streams_app.extra_output_topics == {"fake-role": "extra-topic"} + assert list(streams_app.outputs) == ["example-output", "extra-topic"] + assert list(streams_app.inputs) == [ + "example-input", + "b", + "a", + "topic-extra2", + "topic-extra3", + "topic-extra", + ] diff --git a/tests/components/test_streams_bootstrap.py b/tests/components/test_streams_bootstrap.py index 127485e30..2587aa2e3 100644 --- a/tests/components/test_streams_bootstrap.py +++ b/tests/components/test_streams_bootstrap.py @@ -50,7 +50,8 @@ def test_default_configs(self, config: KpopsConfig, handlers: ComponentHandlers) assert streams_bootstrap.version == "2.9.0" assert streams_bootstrap.namespace == "test-namespace" - def test_should_deploy_streams_bootstrap_app( + @pytest.mark.asyncio() + async def test_should_deploy_streams_bootstrap_app( self, config: KpopsConfig, handlers: ComponentHandlers, @@ -84,7 +85,7 @@ def test_should_deploy_streams_bootstrap_app( new_callable=mocker.PropertyMock, ) - streams_bootstrap.deploy(dry_run=True) + await streams_bootstrap.deploy(dry_run=True) print_helm_diff.assert_called_once() helm_upgrade_install.assert_called_once_with( diff --git a/tests/pipeline/resources/parallel-pipeline/config.yaml b/tests/pipeline/resources/parallel-pipeline/config.yaml new file mode 100644 index 000000000..1c3b4443f --- /dev/null +++ b/tests/pipeline/resources/parallel-pipeline/config.yaml @@ -0,0 +1,15 @@ +topic_name_config: + default_error_topic_name: ${component.name}-dead-letter-topic + default_output_topic_name: ${component.name}-test-topic + +schema_registry: + enabled: true + url: "http://localhost:8081" + +kafka_connect: + url: "http://kafka_connect_url:8083" +kafka_rest: + url: "http://kafka_rest_url:8082" + +defaults_path: .. +kafka_brokers: "broker:9092" diff --git a/tests/pipeline/resources/parallel-pipeline/defaults.yaml b/tests/pipeline/resources/parallel-pipeline/defaults.yaml new file mode 100644 index 000000000..4b6cd0c91 --- /dev/null +++ b/tests/pipeline/resources/parallel-pipeline/defaults.yaml @@ -0,0 +1,27 @@ +pipeline-component: + prefix: "" + +kubernetes-app: + namespace: ${NAMESPACE} + +kafka-connector: + namespace: ${NAMESPACE} + +kafka-app: + app: + streams: + brokers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} + +streams-app: + app: + labels: + pipeline: ${pipeline.name} + to: + topics: + ${error_topic_name}: + type: error + partitions_count: 1 + ${output_topic_name}: + type: output + partitions_count: 3 diff --git a/tests/pipeline/resources/parallel-pipeline/pipeline.yaml b/tests/pipeline/resources/parallel-pipeline/pipeline.yaml new file mode 100644 index 000000000..1c461c65d --- /dev/null +++ b/tests/pipeline/resources/parallel-pipeline/pipeline.yaml @@ -0,0 +1,64 @@ +- type: producer-app + name: transaction-avro-producer-1 + to: + topics: + my-output-topic-with-multiple-producers: + type: output + partitions_count: 3 + +- type: producer-app + name: transaction-avro-producer-2 + to: + topics: + my-output-topic-with-multiple-producers: + type: output + partitions_count: 3 + +- type: producer-app + name: transaction-avro-producer-3 + to: + topics: + my-output-topic-with-multiple-producers: + type: output + partitions_count: 3 + +- type: streams-app + name: transaction-joiner + +- type: streams-app + name: fraud-detector + +- type: streams-app + name: account-linker + from: + components: + fraud-detector: + type: input + +- type: kafka-sink-connector + name: s3-connector-1 + from: + topics: + account-linker-test-topic: + type: input + app: + connector.class: io.confluent.connect.s3.S3SinkConnector + + +- type: kafka-sink-connector + name: s3-connector-2 + from: + topics: + account-linker-test-topic: + type: input + app: + connector.class: io.confluent.connect.s3.S3SinkConnector + +- type: kafka-sink-connector + name: s3-connector-3 + from: + topics: + account-linker-test-topic: + type: input + app: + connector.class: io.confluent.connect.s3.S3SinkConnector diff --git a/tests/pipeline/resources/pipeline-with-loop/defaults.yaml b/tests/pipeline/resources/pipeline-with-loop/defaults.yaml new file mode 100644 index 000000000..777933d94 --- /dev/null +++ b/tests/pipeline/resources/pipeline-with-loop/defaults.yaml @@ -0,0 +1,19 @@ +pipeline-component: + prefix: "" + +kubernetes-app: + namespace: example-namespace + +kafka-connector: + namespace: example-namespace + +kafka-app: + app: + streams: + brokers: 127.0.0.1:9092 + schemaRegistryUrl: 127.0.0.1:8081 + +streams-app: + app: + labels: + pipeline: ${pipeline.name} diff --git a/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml b/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml new file mode 100644 index 000000000..b8f2866f6 --- /dev/null +++ b/tests/pipeline/resources/pipeline-with-loop/pipeline.yaml @@ -0,0 +1,34 @@ +- type: producer-app + name: app1 + app: + image: producer-image + to: + topics: + my-output-topic: + type: output + +- type: streams-app + name: app2 + app: + image: app2-image + from: + topics: + my-output-topic: + type: input + to: + topics: + my-app2-topic: + type: output + +- type: streams-app + name: app3 + app: + image: app3-image + from: + topics: + my-app2-topic: + type: input + to: + topics: + my-output-topic: + type: output diff --git a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml index cf3b4831b..48cacc6cc 100644 --- a/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-short-topics/defaults.yaml @@ -6,9 +6,16 @@ kafka-app: app: streams: brokers: "${config.kafka_brokers}" - schema_registry_url: "${schema_registry_url}" + schema_registry_url: "${config.schema_registry.url}" version: "2.4.2" +producer-app: + to: + topics: + ${output_topic_name}: + partitions_count: 3 + + streams-app: # inherits from kafka-app app: streams: @@ -19,7 +26,7 @@ streams-app: # inherits from kafka-app type: error-topic: type: error - extra-topic: + extra-topic-output: role: role from: topics: diff --git a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/config.yaml b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/config.yaml new file mode 100644 index 000000000..be0d3fcdf --- /dev/null +++ b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/config.yaml @@ -0,0 +1,12 @@ + +schema_registry: + enabled: false + url: "http://localhost:8081" + +kafka_connect: + url: "http://kafka_connect_url:8083" +kafka_rest: + url: "http://kafka_rest_url:8082" + +defaults_path: .. +kafka_brokers: "broker:9092" diff --git a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml new file mode 100644 index 000000000..719059611 --- /dev/null +++ b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/defaults.yaml @@ -0,0 +1,27 @@ +pipeline-component: + prefix: "" + +kubernetes-app: + namespace: example-namespace + +kafka-connector: + namespace: example-namespace + +kafka-app: + app: + streams: + brokers: 127.0.0.1:9092 + schemaRegistryUrl: 127.0.0.1:8081 + +streams-app: + app: + labels: + pipeline: ${pipeline.name} + +producer: + to: + topics: + ${output_topic_name}: + type: output + configs: + cleanup.policy: compact,delete diff --git a/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml new file mode 100644 index 000000000..5e578f0a2 --- /dev/null +++ b/tests/pipeline/resources/pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml @@ -0,0 +1,8 @@ +- type: streams-app + name: app2-processor + app: + image: some-image + to: + topics: + app2-processor: + type: output diff --git a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/config.yaml b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/config.yaml new file mode 100644 index 000000000..be0d3fcdf --- /dev/null +++ b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/config.yaml @@ -0,0 +1,12 @@ + +schema_registry: + enabled: false + url: "http://localhost:8081" + +kafka_connect: + url: "http://kafka_connect_url:8083" +kafka_rest: + url: "http://kafka_rest_url:8082" + +defaults_path: .. +kafka_brokers: "broker:9092" diff --git a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml new file mode 100644 index 000000000..88eebd4c0 --- /dev/null +++ b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/defaults.yaml @@ -0,0 +1,28 @@ +pipeline-component: + prefix: "" + +kubernetes-app: + namespace: example-namespace + +kafka-connector: + namespace: example-namespace + +kafka-app: + app: + streams: + brokers: 127.0.0.1:9092 + schemaRegistryUrl: 127.0.0.1:8081 + + +streams-app: + app: + labels: + pipeline: ${pipeline.name} + +producer-app: + to: + topics: + ${output_topic_name}: + type: output + configs: + cleanup.policy: compact,delete diff --git a/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml new file mode 100644 index 000000000..03d9bee6d --- /dev/null +++ b/tests/pipeline/resources/pipelines-with-graphs/simple-pipeline/pipeline.yaml @@ -0,0 +1,6 @@ +- type: producer-app + name: app1 + +- type: streams-app + name: app2 + diff --git a/tests/pipeline/test_generate.py b/tests/pipeline/test_generate.py index 4db284208..801f1553e 100644 --- a/tests/pipeline/test_generate.py +++ b/tests/pipeline/test_generate.py @@ -1,4 +1,7 @@ +import asyncio from pathlib import Path +from unittest import mock +from unittest.mock import AsyncMock import pytest import yaml @@ -7,6 +10,7 @@ import kpops from kpops.cli.main import app +from kpops.components import PipelineComponent from kpops.pipeline import ParsingException, ValidationError runner = CliRunner() @@ -140,7 +144,7 @@ def test_substitute_in_component(self, snapshot: SnapshotTest): snapshot.assert_match(enriched_pipeline, "test-pipeline") - @pytest.mark.timeout(0.5) + @pytest.mark.timeout(2) def test_substitute_in_component_infinite_loop(self): with pytest.raises((ValueError, ParsingException)): runner.invoke( @@ -574,10 +578,10 @@ def test_short_topic_definition(self): input_components = enriched_pipeline[4]["from"]["components"] assert "type" not in output_topics["output-topic"] assert output_topics["error-topic"]["type"] == "error" - assert "type" not in output_topics["extra-topic"] + assert "type" not in output_topics["extra-topic-output"] assert "role" not in output_topics["output-topic"] assert "role" not in output_topics["error-topic"] - assert output_topics["extra-topic"]["role"] == "role" + assert output_topics["extra-topic-output"]["role"] == "role" assert "type" not in ["input-topic"] assert "type" not in input_topics["extra-topic"] @@ -629,6 +633,162 @@ def test_validate_unique_step_names(self): catch_exceptions=False, ) + def test_validate_loops_on_pipeline(self): + with pytest.raises(ValueError, match="Pipeline is not a valid DAG."): + runner.invoke( + app, + [ + "generate", + str(RESOURCE_PATH / "pipeline-with-loop/pipeline.yaml"), + "--defaults", + str(RESOURCE_PATH / "pipeline-with-loop"), + ], + catch_exceptions=False, + ) + + def test_validate_simple_graph(self): + pipeline = kpops.generate( + RESOURCE_PATH / "pipelines-with-graphs/simple-pipeline/pipeline.yaml", + defaults=RESOURCE_PATH / "pipelines-with-graphs" / "simple-pipeline", + ) + assert len(pipeline.components) == 2 + assert len(pipeline.graph.nodes) == 3 + assert len(pipeline.graph.edges) == 2 + node_components = list( + filter(lambda node_id: "component" in node_id, pipeline.graph.nodes) + ) + assert len(pipeline.components) == len(node_components) + + def test_validate_topic_and_component_same_name(self): + pipeline = kpops.generate( + RESOURCE_PATH + / "pipelines-with-graphs/same-topic-and-component-name/pipeline.yaml", + defaults=RESOURCE_PATH + / "pipelines-with-graphs" + / "same-topic-and-component-name", + ) + component, topic = list(pipeline.graph.nodes) + edges = list(pipeline.graph.edges) + assert component == f"component-{topic}" + assert (component, topic) in edges + + @pytest.mark.asyncio() + async def test_parallel_execution_graph(self): + pipeline = kpops.generate( + RESOURCE_PATH / "parallel-pipeline/pipeline.yaml", + defaults=RESOURCE_PATH / "parallel-pipeline", + config=RESOURCE_PATH / "parallel-pipeline", + ) + + called_component = AsyncMock() + + sleep_table_components = { + "transaction-avro-producer-1": 1, + "transaction-avro-producer-2": 0, + "transaction-avro-producer-3": 2, + "transaction-joiner": 3, + "fraud-detector": 2, + "account-linker": 0, + "s3-connector-1": 2, + "s3-connector-2": 1, + "s3-connector-3": 0, + } + + async def name_runner(component: PipelineComponent): + await asyncio.sleep(sleep_table_components[component.name]) + await called_component(component.name) + + execution_graph = pipeline.build_execution_graph_from( + list(pipeline.components), False, name_runner + ) + + await execution_graph + + assert called_component.mock_calls == [ + mock.call("transaction-avro-producer-2"), + mock.call("transaction-avro-producer-1"), + mock.call("transaction-avro-producer-3"), + mock.call("transaction-joiner"), + mock.call("fraud-detector"), + mock.call("account-linker"), + mock.call("s3-connector-3"), + mock.call("s3-connector-2"), + mock.call("s3-connector-1"), + ] + + @pytest.mark.asyncio() + async def test_subgraph_execution(self): + pipeline = kpops.generate( + RESOURCE_PATH / "parallel-pipeline/pipeline.yaml", + defaults=RESOURCE_PATH / "parallel-pipeline", + config=RESOURCE_PATH / "parallel-pipeline", + ) + + list_of_components = list(pipeline.components) + + called_component = AsyncMock() + + async def name_runner(component: PipelineComponent): + await called_component(component.name) + + execution_graph = pipeline.build_execution_graph_from( + [list_of_components[0], list_of_components[3], list_of_components[6]], + False, + name_runner, + ) + + await execution_graph + + assert called_component.mock_calls == [ + mock.call("transaction-avro-producer-1"), + mock.call("s3-connector-1"), + mock.call("transaction-joiner"), + ] + + @pytest.mark.asyncio() + async def test_parallel_execution_graph_reverse(self): + pipeline = kpops.generate( + RESOURCE_PATH / "parallel-pipeline/pipeline.yaml", + defaults=RESOURCE_PATH / "parallel-pipeline", + config=RESOURCE_PATH / "parallel-pipeline", + ) + + called_component = AsyncMock() + + sleep_table_components = { + "transaction-avro-producer-1": 1, + "transaction-avro-producer-2": 0, + "transaction-avro-producer-3": 2, + "transaction-joiner": 3, + "fraud-detector": 2, + "account-linker": 0, + "s3-connector-1": 2, + "s3-connector-2": 1, + "s3-connector-3": 0, + } + + async def name_runner(component: PipelineComponent): + await asyncio.sleep(sleep_table_components[component.name]) + await called_component(component.name) + + execution_graph = pipeline.build_execution_graph_from( + list(pipeline.components), True, name_runner + ) + + await execution_graph + + assert called_component.mock_calls == [ + mock.call("s3-connector-3"), + mock.call("s3-connector-2"), + mock.call("s3-connector-1"), + mock.call("account-linker"), + mock.call("fraud-detector"), + mock.call("transaction-joiner"), + mock.call("transaction-avro-producer-2"), + mock.call("transaction-avro-producer-1"), + mock.call("transaction-avro-producer-3"), + ] + def test_temp_trim_release_name(self): result = runner.invoke( app,