diff --git a/.github/actions/update-docs/action.yml b/.github/actions/update-docs/action.yml index 7ad9ab442..c748666bf 100644 --- a/.github/actions/update-docs/action.yml +++ b/.github/actions/update-docs/action.yml @@ -27,8 +27,8 @@ runs: - name: Install Python and set up Poetry uses: bakdata/ci-templates/actions/python-setup-poetry@v1.5.2 with: + python-version: "3.11" poetry-version: "1.5.1" - python-version: "3.10" - name: Install docs dependencies shell: bash diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c7f214209..3b8b2f627 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,7 @@ jobs: os: - ubuntu-22.04 - windows-2022 - python-version: ["3.10", "3.11", "3.12"] + python-version: ["3.11", "3.12"] runs-on: ${{ matrix.os }} steps: @@ -32,8 +32,8 @@ jobs: - name: Install Python and set up Poetry uses: bakdata/ci-templates/actions/python-setup-poetry@v1.5.3 with: - poetry-version: "1.7.1" python-version: ${{ matrix.python-version }} + poetry-version: "1.7.1" - name: Check Poetry lock file consistency run: poetry lock --check @@ -43,7 +43,7 @@ jobs: - name: Lint (ruff) run: | - if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.10" ]] + if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.11" ]] then poetry run ruff check . --config pyproject.toml --output-format=github --no-fix else @@ -55,7 +55,7 @@ jobs: - name: Typing (pyright) run: | - if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.10" ]] + if [[ "$RUNNER_OS" == "Linux" && "${{ matrix.python-version }}" == "3.11" ]] then echo "::add-matcher::.github/pyright-matcher.json" poetry run pre-commit run pyright --all-files @@ -93,6 +93,7 @@ jobs: needs: [test] uses: bakdata/ci-templates/.github/workflows/python-poetry-publish-snapshot.yaml@1.40.4 with: + python-version: "3.11" poetry-version: "1.7.1" secrets: pypi-token: ${{ secrets.TEST_PYPI_TOKEN }} diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index fd66ef0cd..1a9d4a2df 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -9,6 +9,7 @@ jobs: uses: bakdata/ci-templates/.github/workflows/python-poetry-publish-pypi.yaml@1.40.4 with: publish-to-test: false + python-version: "3.11" poetry-version: "1.7.1" secrets: pypi-token: "${{ secrets.PYPI_TOKEN }}" diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 88a40d1c8..3acaeccd7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -19,6 +19,7 @@ jobs: name: Release with: release-type: ${{ inputs.release-type }} + python-version: "3.11" poetry-version: "1.7.1" changelog: true changelog-config: "./.github/changelog-config.json" diff --git a/config.yaml b/config.yaml index 359b51a21..aa32d7d3c 100644 --- a/config.yaml +++ b/config.yaml @@ -1,2 +1,5 @@ kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" pipeline_base_dir: tests/pipeline +strimzi_topic: + label: + strimzi.io/cluster: my-cluster diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index 21436ded7..2aca45807 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -14,6 +14,8 @@ KPOPS_DOTENV_PATH # No default value, not required # Suffix your environment files with this value (e.g. # defaults_development.yaml for environment=development). KPOPS_ENVIRONMENT # No default value, not required +# How KPOps should operate. +KPOPS_OPERATION_MODE=managed # Paths to dir containing 'pipeline.yaml' or files named # 'pipeline.yaml'. KPOPS_PIPELINE_PATHS # No default value, required diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index da6a2d994..b4cead3ce 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -5,5 +5,6 @@ These variables take precedence over the commands' flags. If a variable is set, |KPOPS_CONFIG_PATH |. |False |Path to the dir containing config.yaml files | |KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. | |KPOPS_ENVIRONMENT | |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).| +|KPOPS_OPERATION_MODE|managed |False |How KPOps should operate. | |KPOPS_PIPELINE_PATHS| |True |Paths to dir containing 'pipeline.yaml' or files named 'pipeline.yaml'. | |KPOPS_PIPELINE_STEPS| |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index f558d4d19..15d42bfc3 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -58,3 +58,9 @@ KPOPS_HELM_DIFF_CONFIG__IGNORE # No default value, required # Whether to retain clean up jobs in the cluster or uninstall the, # after completion. KPOPS_RETAIN_CLEAN_JOBS=False +# strimzi_topic +# Configuration for Strimzi Kafka Topics. +KPOPS_STRIMZI_TOPIC # No default value, not required +# operation_mode +# The operation mode of KPOps (managed, manifest, argo). +KPOPS_OPERATION_MODE=managed diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 8685acba0..5c96440bf 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -19,3 +19,5 @@ These variables take precedence over the settings in `config.yaml`. Variables ma |KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for `Capabilities.APIVersions` |helm_config.api_version | |KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | |KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion.|retain_clean_jobs | +|KPOPS_STRIMZI_TOPIC | |False |Configuration for Strimzi Kafka Topics. |strimzi_topic | +|KPOPS_OPERATION_MODE |managed |False |The operation mode of KPOps (managed, manifest, argo). |operation_mode | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 949c42791..6ed30ef70 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -153,6 +153,25 @@ "title": "SchemaRegistryConfig", "type": "object" }, + "StrimziTopicConfig": { + "additionalProperties": false, + "description": "Configuration for Strimzi Kafka Topics.", + "properties": { + "label": { + "additionalProperties": { + "type": "string" + }, + "description": "The label to identify the KafkaTopic resources managed by the Topic Operator. This does not have to be the name of the Kafka cluster. It can be the label assigned to the KafkaTopic resource. If you deploy more than one Topic Operator, the labels must be unique for each. That is, the operators cannot manage the same resources.", + "title": "Label", + "type": "object" + } + }, + "required": [ + "label" + ], + "title": "StrimziTopicConfig", + "type": "object" + }, "TopicNameConfig": { "additionalProperties": false, "description": "Configure the topic name variables you can use in the pipeline definition.", @@ -265,6 +284,18 @@ }, "description": "Configuration for Schema Registry." }, + "strimzi_topic": { + "anyOf": [ + { + "$ref": "#/$defs/StrimziTopicConfig" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Configuration for Strimzi Kafka Topics." + }, "topic_name_config": { "allOf": [ { diff --git a/docs/docs/user/core-concepts/operation-mode.md b/docs/docs/user/core-concepts/operation-mode.md new file mode 100644 index 000000000..484f27860 --- /dev/null +++ b/docs/docs/user/core-concepts/operation-mode.md @@ -0,0 +1,119 @@ +# Operation Modes in KPOps + +KPOps supports three operation modes—`managed`, `manifest`, and `argo`. These modes determine how resources are managed and allow users to tailor their deployment strategy. + +- **Managed Mode** (default): KPOps uses Helm, and communicates with services like Kafka Rest Proxy, and Kafka Connect under the hood to manage the installation/(graceful) deletion of applications, creation/deletion of Kafka topics, creation/deletion of Connectors defined in your `pipeline.yaml`. +- **Manifest Mode**: Focuses on generating Kubernetes manifests. +- **Argo Mode**: Extends the functionality to include ArgoCD-specific hooks for certain operations, facilitating GitOps workflows with automated cleanup and reset tasks. + +--- + +## Configuring Operation Modes + +You can configure the operation mode using one of the following methods: + +1. **Command-Line Option**: Pass the `--operation-mode ` flag when running a CLI command. Refer to the [CLI commands documentation](https://bakdata.github.io/kpops/9.0/user/references/cli-commands/#kpops-deploy) for more details. + +2. **Environment Variable**: Set the operation mode by defining the `KPOPS_OPERATION_MODE` environment variable. + +--- + +## Generated Resources by Mode and Operation + +### `deploy` + +#### **Manifest Mode** + +- **streams-bootstrap Applications**: + - Depending on your pipeline configuration, Kubernetes `Job`, `Deployment`, `ConfigMap`, and `Service` resources. + - Please refer to [streams-bootstrap Helm Charts](https://github.com/bakdata/streams-bootstrap/tree/master/charts). +- **Topics**: + - Strimzi `KafkaTopic` CRDs. + +#### **Argo Mode** + +- **streams-bootstrap Applications**: + - Depending on your pipeline configuration, Kubernetes `Job`, `Deployment`, `ConfigMap`, and `Service` resources. + - Additional Argo `sync-wave` annotation to ensure Kafka topics are created first (default `sync-wave=0`) before deploying apps (lower priority `sync-wave>0`). All components of each sync wave are deployed in parallel by Argo. + - Please refer to [streams-bootstrap Helm Charts](https://github.com/bakdata/streams-bootstrap/tree/master/charts). +- **Topics**: + - Strimzi `KafkaTopic` CRDs. +- **Cleanup Jobs**: + - Kubernetes `Job` resources configured **with** ArgoCD `PostDelete` hooks, ensuring cleanup tasks are executed after ArgoCD application deletion. + +--- + +### `reset` + +#### **Manifest Mode** + +- **Topics**: + - Strimzi `KafkaTopic` CRDs. +- **Reset Jobs**: + - Kubernetes `Job` resources for resetting Kafka Streams application states. + +#### **Argo Mode** + +- **Topics**: + - Strimzi `KafkaTopic` CRDs. +- **Reset Jobs**: + - Kubernetes `Job` resources **without** ArgoCD `PostDelete` hooks, providing a simpler reset process. + +--- + +### `clean` + +#### **Manifest Mode** + +- **Clean Jobs**: + - Kubernetes `Job` resources for cleaning up temporary resources or artifacts using application container images. + +#### **Argo Mode** + +- **Not Applicable**: + - The `clean` command is not supported in Argo mode. The clean is instead achieved through cleanup job hooks during the `deploy` command. + +--- + +### `destroy` + +#### **Manifest Mode** + +- **Topics**: + - Strimzi `KafkaTopic` CRDs. + +#### **Argo Mode** + +- **Topics**: + - Strimzi `KafkaTopic` CRDs. + +--- + +## Use Cases for Each Mode + +### Manifest Mode + +- **Flexibility**: Use the generated manifests in manual workflows or integrate with any Kubernetes deployment tool. +- **Version Control**: Commit generated manifests to a Git repository for tracking changes and rollback. + +### Argo Mode + +- **GitOps Integration**: Simplifies workflows when using ArgoCD for automated deployments and lifecycle management. +- **PostDelete Hooks**: Automatically cleans up resources after deletion of ArgoCD applications. + +--- + +## Summary of Resource Generation by Operation and Mode + +| Resource Type | `deploy` | `reset` | `clean` | `destroy` | +| ------------- | -------------------------------- | ------------------- | ------------------- | ------------------- | +| Producer Apps | Manifest: Generated | N/A | N/A | N/A | +| | Argo: Generated | | | | +| Streams Apps | Manifest: Generated | N/A | N/A | N/A | +| | Argo: Generated | | | | +| Topics | Manifest: Generated | Manifest: Generated | N/A | Manifest: Generated | +| | Argo: Generated | Argo: Generated | | Argo: Generated | +| Cleanup Jobs | Manifest: N/A | N/A | Manifest: Generated | N/A | +| | Argo: With `PostDelete` hooks | N/A | N/A | N/A | +| Reset Jobs | Manifest: N/A | Manifest: Generated | N/A | N/A | +| | Argo: Without `PostDelete` hooks | | | | diff --git a/docs/docs/user/migration-guide/v8-v9.md b/docs/docs/user/migration-guide/v8-v9.md new file mode 100644 index 000000000..69004e5a9 --- /dev/null +++ b/docs/docs/user/migration-guide/v8-v9.md @@ -0,0 +1,42 @@ +# Migrate from V8 to V9 + +## [Introduce KPOps operation and manifest resources for deployment](https://github.com/bakdata/kpops/pull/541) + +The `kpops manifest` command and `kpops.manifest()` API have been **removed**. + +Resource manifesting is now integrated into the _operation_ commands (`deploy`, `destroy`, `reset`, `clean`) through the new **operation mode** feature. + +To manifest resources, you can: + +- Pass `--operation-mode manifest` when executing `kpops` commands. +- Set the operation mode by defining the `KPOPS_OPERATION_MODE` environment variable. + +## [Manifest toSection with Strimzi KafkaTopic](https://github.com/bakdata/kpops/pull/545) + +KPOps now supports generating valid Kubernetes KafkaTopic resources compatible with [Strimzi](https://github.com/strimzi/strimzi-kafka-operator/blob/main/examples/topic/kafka-topic.yaml). When using manifest or argo as the operation_mode, you must specify the Strimzi cluster label to ensure the topics are recognized by the deployed Strimzi Topic Operator. + +```diff +operation_mode: manifest + ++ strimzi_topic: ++ label: ++ strimzi.io/cluster: my-cluster + +# rest of your config +``` + + + +!!! info Standalone topic operator deployment + Refer to the [Strimzi documentation on deploying a standalone topic operator](https://strimzi.io/docs/operators/latest/deploying#deploying-the-topic-operator-standalone-str) for more details. + + + +## [Drop support for Python 3.10](https://github.com/bakdata/kpops/pull/561) + +KPOps V9 no longer supports Python 3.10. Ensure your environment is running Python 3.11 to 3.12. + +#### Action Required: + +Upgrade your Python version to a supported version (3.11 or 3.12). +Update your virtual environments and CI pipelines to reflect this change. diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index d10ba44cc..20e871574 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -20,7 +20,6 @@ $ kpops [OPTIONS] COMMAND [ARGS]... * `destroy`: Destroy pipeline steps * `generate`: Generate enriched pipeline representation * `init`: Initialize a new KPOps project. -* `manifest`: Render final resource representation * `reset`: Reset pipeline steps * `schema`: Generate JSON schema. @@ -48,6 +47,7 @@ $ kpops clean [OPTIONS] PIPELINE_PATHS... * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] * `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] +* `--operation-mode [argo|manifest|managed]`: How KPOps should operate. [env var: KPOPS_OPERATION_MODE; default: managed] * `--help`: Show this message and exit. ## `kpops deploy` @@ -74,6 +74,7 @@ $ kpops deploy [OPTIONS] PIPELINE_PATHS... * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] * `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] +* `--operation-mode [argo|manifest|managed]`: How KPOps should operate. [env var: KPOPS_OPERATION_MODE; default: managed] * `--help`: Show this message and exit. ## `kpops destroy` @@ -100,6 +101,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATHS... * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] * `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] +* `--operation-mode [argo|manifest|managed]`: How KPOps should operate. [env var: KPOPS_OPERATION_MODE; default: managed] * `--help`: Show this message and exit. ## `kpops generate` @@ -142,31 +144,7 @@ $ kpops init [OPTIONS] PATH **Options**: -* `--config-include-opt / --no-config-include-opt`: Whether to include non-required settings in the generated 'config.yaml' [default: no-config-include-opt] -* `--help`: Show this message and exit. - -## `kpops manifest` - -In addition to generate, render final resource representation for each pipeline step, e.g. Kubernetes manifests. - -**Usage**: - -```console -$ kpops manifest [OPTIONS] PIPELINE_PATHS... -``` - -**Arguments**: - -* `PIPELINE_PATHS...`: Paths to dir containing 'pipeline.yaml' or files named 'pipeline.yaml'. [env var: KPOPS_PIPELINE_PATHS;required] - -**Options**: - -* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] -* `--config DIRECTORY`: Path to the dir containing config.yaml files [env var: KPOPS_CONFIG_PATH; default: .] -* `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] -* `--filter-type [include|exclude]`: Whether the --steps option should include/exclude the steps [default: include] -* `--environment TEXT`: The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). [env var: KPOPS_ENVIRONMENT] -* `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] +* `--config-include-optional / --no-config-include-optional`: Whether to include non-required settings in the generated 'config.yaml' [default: no-config-include-optional] * `--help`: Show this message and exit. ## `kpops reset` @@ -193,6 +171,7 @@ $ kpops reset [OPTIONS] PIPELINE_PATHS... * `--dry-run / --execute`: Whether to dry run the command or execute it [default: dry-run] * `--verbose / --no-verbose`: Enable verbose printing [default: no-verbose] * `--parallel / --no-parallel`: Enable or disable parallel execution of pipeline steps. If enabled, multiple steps can be processed concurrently. If disabled, steps will be processed sequentially. [default: no-parallel] +* `--operation-mode [argo|manifest|managed]`: How KPOps should operate. [env var: KPOPS_OPERATION_MODE; default: managed] * `--help`: Show this message and exit. ## `kpops schema` diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 720d74582..1b39c8e9d 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -131,6 +131,7 @@ nav: - Migrate from v5 to v6: user/migration-guide/v5-v6.md - Migrate from v6 to v7: user/migration-guide/v6-v7.md - Migrate from v7 to v8: user/migration-guide/v7-v8.md + - Migrate from v8 to v9: user/migration-guide/v8-v9.md - CLI usage: user/references/cli-commands.md - Editor integration: user/references/editor-integration.md - CI integration: diff --git a/hooks/gen_docs/__init__.py b/hooks/gen_docs/__init__.py index 5a0d63a28..fda5d4d19 100644 --- a/hooks/gen_docs/__init__.py +++ b/hooks/gen_docs/__init__.py @@ -1,10 +1,10 @@ """Documentation generation.""" from collections.abc import Iterator -from enum import Enum +from enum import StrEnum -class IterableStrEnum(str, Enum): +class IterableStrEnum(StrEnum): """Polyfill that also introduces dict-like behavior. Introduces constructors that return a ``Iterator`` object diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 4b1297d0c..3b3e02a56 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -9,7 +9,7 @@ from dataclasses import dataclass from pathlib import Path from textwrap import fill -from typing import Any +from typing import Any, Self from pydantic_core import PydanticUndefined from pytablewriter import MarkdownTableWriter @@ -70,9 +70,7 @@ class EnvVar: corresponding_setting_name: str | None @classmethod - def from_record( - cls, record: dict[str, Any] - ) -> EnvVar: # TODO: typing.Self for Python 3.11+ + def from_record(cls, record: dict[str, Any]) -> Self: """Construct an ``EnvVar`` instance from a specific dict. Reads a dict that contains keys equivalent to the diff --git a/kpops/api/__init__.py b/kpops/api/__init__.py index f376af704..21b047ce9 100644 --- a/kpops/api/__init__.py +++ b/kpops/api/__init__.py @@ -1,10 +1,12 @@ from __future__ import annotations import asyncio +from collections.abc import Iterator from pathlib import Path from typing import TYPE_CHECKING from kpops.api.logs import log, log_action +from kpops.api.operation import OperationMode from kpops.api.options import FilterType from kpops.api.registry import Registry from kpops.component_handlers import ComponentHandlers @@ -15,6 +17,7 @@ from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper from kpops.config import KpopsConfig +from kpops.manifests.kubernetes import KubernetesManifest from kpops.pipeline import ( Pipeline, PipelineGenerator, @@ -22,7 +25,6 @@ from kpops.utils.cli_commands import init_project if TYPE_CHECKING: - from kpops.components.base_components.models.resource import Resource from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.config import KpopsConfig @@ -35,6 +37,7 @@ def generate( filter_type: FilterType = FilterType.INCLUDE, environment: str | None = None, verbose: bool = False, + operation_mode: OperationMode = OperationMode.MANAGED, ) -> Pipeline: """Generate enriched pipeline representation. @@ -45,13 +48,11 @@ def generate( :param filter_type: Whether `steps` should include/exclude the steps. :param environment: The environment to generate and deploy the pipeline to. :param verbose: Enable verbose printing. + :param operation_mode: How KPOps should operate. :return: Generated `Pipeline` object. """ kpops_config = KpopsConfig.create( - config, - dotenv, - environment, - verbose, + config, dotenv, environment, verbose, operation_mode ) pipeline = _create_pipeline(pipeline_path, kpops_config, environment) log.info(f"Picked up pipeline '{pipeline_path.parent.name}'") @@ -69,26 +70,91 @@ def generate( return pipeline -def manifest( +def manifest_deploy( pipeline_path: Path, dotenv: list[Path] | None = None, config: Path = Path(), steps: set[str] | None = None, filter_type: FilterType = FilterType.INCLUDE, environment: str | None = None, - verbose: bool = False, -) -> list[Resource]: - """Generate pipeline, return final resource representations for each step. + verbose: bool = True, + operation_mode: OperationMode = OperationMode.MANIFEST, +) -> Iterator[tuple[KubernetesManifest, ...]]: + pipeline = generate( + pipeline_path=pipeline_path, + dotenv=dotenv, + config=config, + steps=steps, + filter_type=filter_type, + environment=environment, + verbose=verbose, + operation_mode=operation_mode, + ) + for component in pipeline.components: + resource = component.manifest_deploy() + yield resource - :param pipeline_path: Path to pipeline definition yaml file. - :param dotenv: Paths to dotenv files. - :param config: Path to the dir containing config.yaml files. - :param steps: Set of steps (components) to apply the command on. - :param filter_type: Whether `steps` should include/exclude the steps. - :param environment: The environment to generate and deploy the pipeline to. - :param verbose: Enable verbose printing. - :return: Resources. - """ + +def manifest_destroy( + pipeline_path: Path, + dotenv: list[Path] | None = None, + config: Path = Path(), + steps: set[str] | None = None, + filter_type: FilterType = FilterType.INCLUDE, + environment: str | None = None, + verbose: bool = True, + operation_mode: OperationMode = OperationMode.MANIFEST, +) -> Iterator[tuple[KubernetesManifest, ...]]: + pipeline = generate( + pipeline_path=pipeline_path, + dotenv=dotenv, + config=config, + steps=steps, + filter_type=filter_type, + environment=environment, + verbose=verbose, + operation_mode=operation_mode, + ) + for component in pipeline.components: + resource = component.manifest_destroy() + yield resource + + +def manifest_reset( + pipeline_path: Path, + dotenv: list[Path] | None = None, + config: Path = Path(), + steps: set[str] | None = None, + filter_type: FilterType = FilterType.INCLUDE, + environment: str | None = None, + verbose: bool = True, + operation_mode: OperationMode = OperationMode.MANIFEST, +) -> Iterator[tuple[KubernetesManifest, ...]]: + pipeline = generate( + pipeline_path=pipeline_path, + dotenv=dotenv, + config=config, + steps=steps, + filter_type=filter_type, + environment=environment, + verbose=verbose, + operation_mode=operation_mode, + ) + for component in pipeline.components: + resource = component.manifest_reset() + yield resource + + +def manifest_clean( + pipeline_path: Path, + dotenv: list[Path] | None = None, + config: Path = Path(), + steps: set[str] | None = None, + filter_type: FilterType = FilterType.INCLUDE, + environment: str | None = None, + verbose: bool = True, + operation_mode: OperationMode = OperationMode.MANIFEST, +) -> Iterator[tuple[KubernetesManifest, ...]]: pipeline = generate( pipeline_path=pipeline_path, dotenv=dotenv, @@ -97,12 +163,11 @@ def manifest( filter_type=filter_type, environment=environment, verbose=verbose, + operation_mode=operation_mode, ) - resources: list[Resource] = [] for component in pipeline.components: - resource = component.manifest() - resources.append(resource) - return resources + resource = component.manifest_clean() + yield resource def deploy( @@ -301,12 +366,12 @@ async def async_clean(): def init( path: Path, - config_include_opt: bool = False, + config_include_optional: bool = False, ): """Initiate a default empty project. :param path: Directory in which the project should be initiated. - :param conf_incl_opt: Whether to include non-required settings + :param config_include_optional: Whether to include non-required settings in the generated config file. """ if not path.exists(): @@ -314,7 +379,7 @@ def init( elif next(path.iterdir(), False): log.warning("Please provide a path to an empty directory.") return - init_project(path, config_include_opt) + init_project(path, config_include_optional) def _create_pipeline( diff --git a/kpops/api/operation.py b/kpops/api/operation.py new file mode 100644 index 000000000..6b7558398 --- /dev/null +++ b/kpops/api/operation.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +import enum + + +class OperationMode(str, enum.Enum): + ARGO = "argo" + MANIFEST = "manifest" + MANAGED = "managed" diff --git a/kpops/api/options.py b/kpops/api/options.py index 22fda2542..19c352e1a 100644 --- a/kpops/api/options.py +++ b/kpops/api/options.py @@ -1,6 +1,6 @@ from __future__ import annotations -from enum import Enum +from enum import StrEnum from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -8,7 +8,7 @@ from kpops.pipeline import ComponentFilterPredicate -class FilterType(str, Enum): +class FilterType(StrEnum): INCLUDE = "include" EXCLUDE = "exclude" diff --git a/kpops/cli/main.py b/kpops/cli/main.py index ff0a77338..f372c7603 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -5,6 +5,8 @@ import typer import kpops.api as kpops +from kpops.api import log +from kpops.api.operation import OperationMode from kpops.api.options import FilterType from kpops.cli.utils import ( collect_pipeline_paths, @@ -110,6 +112,11 @@ "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). " ), ) +OPERATION_MODE_OPTION: OperationMode = typer.Option( + default=OperationMode.MANAGED, + envvar=f"{ENV_PREFIX}OPERATION_MODE", + help="How KPOps should operate.", +) def parse_steps(steps: str | None) -> set[str] | None: @@ -119,9 +126,9 @@ def parse_steps(steps: str | None) -> set[str] | None: @app.command(help="Initialize a new KPOps project.") def init( path: Path = PROJECT_PATH, - config_include_opt: bool = CONFIG_INCLUDE_OPTIONAL, + config_include_optional: bool = CONFIG_INCLUDE_OPTIONAL, ): - kpops.init(path, config_include_opt=config_include_opt) + kpops.init(path, config_include_optional=config_include_optional) @app.command( @@ -180,34 +187,6 @@ def generate( print_yaml(pipeline.to_yaml()) -@app.command( - short_help="Render final resource representation", - help="In addition to generate, render final resource representation for each pipeline step, e.g. Kubernetes manifests.", -) -def manifest( - pipeline_paths: list[Path] = PIPELINE_PATHS_ARG, - dotenv: list[Path] | None = DOTENV_PATH_OPTION, - config: Path = CONFIG_PATH_OPTION, - steps: str | None = PIPELINE_STEPS, - filter_type: FilterType = FILTER_TYPE, - environment: str | None = ENVIRONMENT, - verbose: bool = VERBOSE_OPTION, -): - for pipeline_file_path in collect_pipeline_paths(pipeline_paths): - resources = kpops.manifest( - pipeline_path=pipeline_file_path, - dotenv=dotenv, - config=config, - steps=parse_steps(steps), - filter_type=filter_type, - environment=environment, - verbose=verbose, - ) - for resource in resources: - for rendered_manifest in resource: - print_yaml(rendered_manifest) - - @app.command(help="Deploy pipeline steps") def deploy( pipeline_paths: list[Path] = PIPELINE_PATHS_ARG, @@ -219,19 +198,37 @@ def deploy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, parallel: bool = PARALLEL, + operation_mode: OperationMode = OPERATION_MODE_OPTION, ): - for pipeline_file_path in collect_pipeline_paths(pipeline_paths): - kpops.deploy( - pipeline_path=pipeline_file_path, - dotenv=dotenv, - config=config, - steps=parse_steps(steps), - filter_type=filter_type, - environment=environment, - dry_run=dry_run, - verbose=verbose, - parallel=parallel, - ) + match operation_mode: + case OperationMode.MANAGED: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + kpops.deploy( + pipeline_path=pipeline_file_path, + dotenv=dotenv, + config=config, + steps=parse_steps(steps), + filter_type=filter_type, + environment=environment, + dry_run=dry_run, + verbose=verbose, + parallel=parallel, + ) + case _: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + resources = kpops.manifest_deploy( + pipeline_file_path, + dotenv, + config, + parse_steps(steps), + filter_type, + environment, + verbose, + operation_mode, + ) + for resource in resources: + for rendered_manifest in resource: + print_yaml(rendered_manifest.model_dump()) @app.command(help="Destroy pipeline steps") @@ -245,19 +242,37 @@ def destroy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, parallel: bool = PARALLEL, + operation_mode: OperationMode = OPERATION_MODE_OPTION, ): - for pipeline_file_path in collect_pipeline_paths(pipeline_paths): - kpops.destroy( - pipeline_path=pipeline_file_path, - dotenv=dotenv, - config=config, - steps=parse_steps(steps), - filter_type=filter_type, - environment=environment, - dry_run=dry_run, - verbose=verbose, - parallel=parallel, - ) + match operation_mode: + case OperationMode.MANAGED: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + kpops.destroy( + pipeline_path=pipeline_file_path, + dotenv=dotenv, + config=config, + steps=parse_steps(steps), + filter_type=filter_type, + environment=environment, + dry_run=dry_run, + verbose=verbose, + parallel=parallel, + ) + case _: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + resources = kpops.manifest_destroy( + pipeline_file_path, + dotenv, + config, + parse_steps(steps), + filter_type, + environment, + verbose, + operation_mode, + ) + for resource in resources: + for rendered_manifest in resource: + print_yaml(rendered_manifest.model_dump()) @app.command(help="Reset pipeline steps") @@ -271,19 +286,37 @@ def reset( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, parallel: bool = PARALLEL, + operation_mode: OperationMode = OPERATION_MODE_OPTION, ): - for pipeline_file_path in collect_pipeline_paths(pipeline_paths): - kpops.reset( - pipeline_path=pipeline_file_path, - dotenv=dotenv, - config=config, - steps=parse_steps(steps), - filter_type=filter_type, - environment=environment, - dry_run=dry_run, - verbose=verbose, - parallel=parallel, - ) + match operation_mode: + case OperationMode.MANAGED: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + kpops.reset( + pipeline_path=pipeline_file_path, + dotenv=dotenv, + config=config, + steps=parse_steps(steps), + filter_type=filter_type, + environment=environment, + dry_run=dry_run, + verbose=verbose, + parallel=parallel, + ) + case _: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + resources = kpops.manifest_reset( + pipeline_file_path, + dotenv, + config, + parse_steps(steps), + filter_type, + environment, + verbose, + operation_mode, + ) + for resource in resources: + for rendered_manifest in resource: + print_yaml(rendered_manifest.model_dump()) @app.command(help="Clean pipeline steps") @@ -297,19 +330,42 @@ def clean( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, parallel: bool = PARALLEL, + operation_mode: OperationMode = OPERATION_MODE_OPTION, ): - for pipeline_file_path in collect_pipeline_paths(pipeline_paths): - kpops.clean( - pipeline_path=pipeline_file_path, - dotenv=dotenv, - config=config, - steps=parse_steps(steps), - filter_type=filter_type, - environment=environment, - dry_run=dry_run, - verbose=verbose, - parallel=parallel, - ) + match operation_mode: + case OperationMode.MANAGED: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + kpops.clean( + pipeline_path=pipeline_file_path, + dotenv=dotenv, + config=config, + steps=parse_steps(steps), + filter_type=filter_type, + environment=environment, + dry_run=dry_run, + verbose=verbose, + parallel=parallel, + ) + case OperationMode.MANIFEST: + for pipeline_file_path in collect_pipeline_paths(pipeline_paths): + resources = kpops.manifest_clean( + pipeline_file_path, + dotenv, + config, + parse_steps(steps), + filter_type, + environment, + verbose, + operation_mode, + ) + for resource in resources: + for rendered_manifest in resource: + print_yaml(rendered_manifest.model_dump()) + case OperationMode.ARGO: + log.warning( + "No cleanup jobs are manifested in Argo mode. The cleanup jobs with Argo hooks are manifested with 'deploy' command. \n" + " If you wish to see the cleanup job manifest use the 'manifest' operation mode." + ) def version_callback(show_version: bool) -> None: diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 5ff76fb81..d4b519c27 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -20,12 +20,11 @@ RepoAuthFlags, Version, ) -from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.manifests.kubernetes import KubernetesManifest if TYPE_CHECKING: from collections.abc import Iterable, Iterator - from kpops.components.base_components.models.resource import Resource log = logging.getLogger("Helm") @@ -161,7 +160,7 @@ def template( namespace: str, values: dict[str, Any], flags: HelmTemplateFlags | None = None, - ) -> Resource: + ) -> tuple[KubernetesManifest, ...]: """From Helm: Render chart templates locally and display the output. Any values that would normally be looked up or retrieved in-cluster will @@ -192,7 +191,7 @@ def template( command.extend(flags.to_command()) output = self.__execute(command) manifests = KubernetesManifest.from_yaml(output) - return list(manifests) + return tuple(manifests) def get_manifest(self, release_name: str, namespace: str) -> Iterable[HelmTemplate]: command = [ diff --git a/kpops/component_handlers/helm_wrapper/helm_diff.py b/kpops/component_handlers/helm_wrapper/helm_diff.py index e90edc433..5004ee52c 100644 --- a/kpops/component_handlers/helm_wrapper/helm_diff.py +++ b/kpops/component_handlers/helm_wrapper/helm_diff.py @@ -2,7 +2,7 @@ from collections.abc import Iterable, Iterator from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate -from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.manifests.kubernetes import KubernetesManifest from kpops.utils.dict_differ import Change, render_diff log = logging.getLogger("HelmDiff") @@ -16,7 +16,7 @@ def __init__(self, config: HelmDiffConfig) -> None: def calculate_changes( current_release: Iterable[HelmTemplate], new_release: Iterable[HelmTemplate], - ) -> Iterator[Change[KubernetesManifest, KubernetesManifest]]: + ) -> Iterator[Change[KubernetesManifest | None, KubernetesManifest | None]]: """Compare 2 releases and generate a Change object for each difference. :param current_release: Iterable containing HelmTemplate objects for the current release @@ -33,12 +33,15 @@ def calculate_changes( new_resource = new_release_index.pop(current_resource.filepath, None) yield Change( current_resource.manifest, - new_resource.manifest if new_resource else KubernetesManifest(), + new_resource.manifest if new_resource else None, ) # collect added files for new_resource in new_release_index.values(): - yield Change(KubernetesManifest(), new_resource.manifest) + yield Change( + None, + new_resource.manifest, + ) def log_helm_diff( self, @@ -48,8 +51,8 @@ def log_helm_diff( ) -> None: for change in self.calculate_changes(current_release, new_release): if diff := render_diff( - change.old_value.data, - change.new_value.data, + change.old_value.model_dump() if change.old_value else {}, + change.new_value.model_dump() if change.new_value else {}, ignore=self.config.ignore, ): logger.info("\n" + diff) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index b81328e46..d23f2c299 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -6,7 +6,7 @@ from typing_extensions import override from kpops.component_handlers.helm_wrapper.exception import ParseError -from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.manifests.kubernetes import KubernetesManifest from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import DescConfigModel diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index aa618f6a1..add4b8bcc 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -1,5 +1,5 @@ -from kpops.component_handlers.kubernetes.model import K8S_LABEL_MAX_LEN from kpops.component_handlers.kubernetes.utils import trim +from kpops.manifests.kubernetes import K8S_LABEL_MAX_LEN RELEASE_NAME_MAX_LEN = 53 diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index dcfd41b75..3cd27c867 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Self from kpops.component_handlers.kafka_connect.connect_wrapper import ConnectWrapper from kpops.component_handlers.kafka_connect.exception import ( @@ -12,11 +12,6 @@ from kpops.utils.dict_differ import render_diff if TYPE_CHECKING: - try: - from typing import Self # pyright: ignore[reportAttributeAccessIssue] - except ImportError: - from typing_extensions import Self - from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig from kpops.config import KpopsConfig diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 93ff76043..d975deaba 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import StrEnum from typing import Any import pydantic @@ -21,7 +21,7 @@ ) -class KafkaConnectorType(str, Enum): +class KafkaConnectorType(StrEnum): SINK = "sink" SOURCE = "source" diff --git a/kpops/component_handlers/kafka_connect/timeout.py b/kpops/component_handlers/kafka_connect/timeout.py index bb036ed29..30503b3e2 100644 --- a/kpops/component_handlers/kafka_connect/timeout.py +++ b/kpops/component_handlers/kafka_connect/timeout.py @@ -1,6 +1,6 @@ import asyncio +import builtins import logging -from asyncio import TimeoutError from collections.abc import Coroutine from typing import Any, TypeVar @@ -21,7 +21,7 @@ async def timeout(coro: Coroutine[Any, Any, T], *, secs: int = 0) -> T | None: return await task else: return await asyncio.wait_for(task, timeout=secs) - except TimeoutError: + except builtins.TimeoutError: log.exception( f"Kafka Connect operation {coro.__name__} timed out after {secs} seconds. To increase the duration, set the `timeout` option in config.yaml." ) diff --git a/kpops/component_handlers/kubernetes/model.py b/kpops/component_handlers/kubernetes/model.py deleted file mode 100644 index e2f7fece7..000000000 --- a/kpops/component_handlers/kubernetes/model.py +++ /dev/null @@ -1,33 +0,0 @@ -from __future__ import annotations - -import json -from collections import UserDict -from collections.abc import Iterator - -import yaml - -from kpops.utils.types import JsonType - -K8S_LABEL_MAX_LEN = 63 - -# https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs -K8S_CRON_JOB_NAME_MAX_LEN = 52 - - -class KubernetesManifest(UserDict[str, JsonType]): - """Representation of a Kubernetes API object as YAML/JSON mapping.""" - - @classmethod - def from_yaml( - cls, /, content: str - ) -> Iterator[KubernetesManifest]: # TODO: typing.Self for Python 3.11+ - manifests: Iterator[dict[str, JsonType]] = yaml.load_all(content, yaml.Loader) - for manifest in manifests: - yield cls(manifest) - - @classmethod - def from_json( - cls, /, content: str - ) -> KubernetesManifest: # TODO: typing.Self for Python 3.11+ - manifest: dict[str, JsonType] = json.loads(content) - return cls(manifest) diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index 5c0cf024d..db1fcf7fa 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import StrEnum from typing import Any from pydantic import BaseModel, ConfigDict @@ -28,7 +28,7 @@ class TopicResponse(BaseModel): ) -class KafkaTopicConfigSource(str, Enum): +class KafkaTopicConfigSource(StrEnum): DYNAMIC_TOPIC_CONFIG = "DYNAMIC_TOPIC_CONFIG" DEFAULT_CONFIG = "DEFAULT_CONFIG" STATIC_BROKER_CONFIG = "STATIC_BROKER_CONFIG" @@ -68,7 +68,7 @@ class TopicConfigResponse(BaseModel): ) -class KafkaBrokerConfigSource(str, Enum): +class KafkaBrokerConfigSource(StrEnum): STATIC_BROKER_CONFIG = "STATIC_BROKER_CONFIG" DYNAMIC_BROKER_CONFIG = "DYNAMIC_BROKER_CONFIG" DEFAULT_CONFIG = "DEFAULT_CONFIG" diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 89b8d212c..06ce7702d 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -7,7 +7,7 @@ from dataclasses import asdict from functools import cached_property from pathlib import Path -from typing import Any, TypeVar +from typing import Any, Self, TypeVar import pydantic import typer @@ -34,11 +34,6 @@ from kpops.utils.types import JsonType from kpops.utils.yaml import load_yaml_file, substitute_nested -try: - from typing import Self # pyright: ignore[reportAttributeAccessIssue] -except ImportError: - from typing_extensions import Self - log = logging.getLogger("BaseDefaultsComponent") diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 3990b3f1c..38d768359 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -8,6 +8,7 @@ from pydantic import Field, model_serializer from typing_extensions import override +from kpops.api import OperationMode from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler from kpops.component_handlers.helm_wrapper.helm import Helm from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff @@ -21,13 +22,13 @@ create_helm_name_override, create_helm_release_name, ) -from kpops.component_handlers.kubernetes.model import K8S_LABEL_MAX_LEN from kpops.components.base_components.kubernetes_app import ( KubernetesApp, KubernetesAppValues, ) -from kpops.components.base_components.models.resource import Resource from kpops.config import get_config +from kpops.manifests.argo import ArgoSyncWave, enrich_annotations +from kpops.manifests.kubernetes import K8S_LABEL_MAX_LEN, KubernetesManifest from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import exclude_by_name @@ -142,12 +143,17 @@ def template_flags(self) -> HelmTemplateFlags: ) @override - def manifest(self) -> Resource: + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + values = self.to_helm_values() + if get_config().operation_mode is OperationMode.ARGO: + sync_wave = ArgoSyncWave(sync_wave=1) + values = enrich_annotations(values, sync_wave.key, sync_wave.value) + return self.helm.template( self.helm_release_name, self.helm_chart, self.namespace, - self.to_helm_values(), + values, self.template_flags, ) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 5453c004b..731032879 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,7 +3,7 @@ import logging from abc import ABC from functools import cached_property -from typing import Any, Literal, NoReturn +from typing import Any, Literal, NoReturn, Self import pydantic from pydantic import Field, PrivateAttr, ValidationInfo, computed_field, field_validator @@ -27,11 +27,6 @@ from kpops.utils.docstring import describe_attr from kpops.utils.pydantic import CamelCaseConfigModel -try: - from typing import Self # pyright: ignore[reportAttributeAccessIssue] -except ImportError: - from typing_extensions import Self - log = logging.getLogger("KafkaConnector") diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index df6d7bd1e..aefa0c252 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import StrEnum from typing import Any, NewType from pydantic import ConfigDict, Field, model_validator @@ -8,7 +8,7 @@ from kpops.utils.pydantic import DescConfigModel -class InputTopicTypes(str, Enum): +class InputTopicTypes(StrEnum): """Input topic types. - INPUT: input topic diff --git a/kpops/components/base_components/models/resource.py b/kpops/components/base_components/models/resource.py index 08c01f344..e69de29bb 100644 --- a/kpops/components/base_components/models/resource.py +++ b/kpops/components/base_components/models/resource.py @@ -1,5 +0,0 @@ -from collections.abc import Mapping, Sequence -from typing import Any, TypeAlias - -# representation of final resource for component, e.g. a list of Kubernetes manifests -Resource: TypeAlias = Sequence[Mapping[str, Any]] diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 5b6798af6..b37a1da83 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -13,7 +13,6 @@ FromTopic, InputTopicTypes, ) -from kpops.components.base_components.models.resource import Resource from kpops.components.base_components.models.to_section import ( ToSection, ) @@ -22,6 +21,7 @@ OutputTopicTypes, TopicConfig, ) +from kpops.manifests.kubernetes import KubernetesManifest from kpops.utils.docstring import describe_attr @@ -229,9 +229,21 @@ def inflate(self) -> list[PipelineComponent]: """ return [self] - def manifest(self) -> Resource: - """Render final component resources, e.g. Kubernetes manifests.""" - return [] + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + """Render Kubernetes manifests for deploy.""" + return () + + def manifest_destroy(self) -> tuple[KubernetesManifest, ...]: + """Render Kubernetes manifests resources for destroy.""" + return () + + def manifest_reset(self) -> tuple[KubernetesManifest, ...]: + """Render Kubernetes manifests resources for reset.""" + return () + + def manifest_clean(self) -> tuple[KubernetesManifest, ...]: + """Render Kubernetes manifests resources for clean.""" + return () async def deploy(self, dry_run: bool) -> None: """Deploy component, e.g. to Kubernetes cluster. diff --git a/kpops/components/common/kubernetes_model.py b/kpops/components/common/kubernetes_model.py index 967182218..1b4a688b7 100644 --- a/kpops/components/common/kubernetes_model.py +++ b/kpops/components/common/kubernetes_model.py @@ -18,7 +18,7 @@ try: from typing import Self # pyright: ignore[reportAttributeAccessIssue] except ImportError: - from typing_extensions import Self + from typing import Self class ServiceType(str, enum.Enum): diff --git a/kpops/components/common/topic.py b/kpops/components/common/topic.py index 94dd7838e..ec0f99170 100644 --- a/kpops/components/common/topic.py +++ b/kpops/components/common/topic.py @@ -1,7 +1,7 @@ from __future__ import annotations from collections.abc import Iterable -from enum import Enum +from enum import StrEnum from typing import Annotated, Any import pydantic @@ -11,7 +11,7 @@ from kpops.utils.pydantic import DescConfigModel, to_str -class OutputTopicTypes(str, Enum): +class OutputTopicTypes(StrEnum): """Types of output topic. - OUTPUT: output topic diff --git a/kpops/components/streams_bootstrap/base.py b/kpops/components/streams_bootstrap/base.py index 5657eee41..03405b14a 100644 --- a/kpops/components/streams_bootstrap/base.py +++ b/kpops/components/streams_bootstrap/base.py @@ -3,23 +3,20 @@ import logging import re from abc import ABC -from typing import TYPE_CHECKING +from typing import Self import pydantic from pydantic import Field +from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import HelmRepoConfig from kpops.components.base_components import KafkaApp from kpops.components.base_components.helm_app import HelmApp from kpops.components.streams_bootstrap.model import StreamsBootstrapValues +from kpops.manifests.kubernetes import KubernetesManifest +from kpops.manifests.strimzi.kafka_topic import StrimziKafkaTopic from kpops.utils.docstring import describe_attr -if TYPE_CHECKING: - try: - from typing import Self # pyright: ignore[reportAttributeAccessIssue] - except ImportError: - from typing_extensions import Self - STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", @@ -83,3 +80,21 @@ def warning_for_latest_image_tag(self) -> Self: f"The image tag for component '{self.name}' is set or defaulted to 'latest'. Please, consider providing a stable image tag." ) return self + + @override + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + resource = super().manifest_deploy() + if self.to: + resource = resource + tuple( + StrimziKafkaTopic.from_topic(topic) for topic in self.to.kafka_topics + ) + + return resource + + @override + def manifest_destroy(self) -> tuple[KubernetesManifest, ...]: + if self.to: + return tuple( + StrimziKafkaTopic.from_topic(topic) for topic in self.to.kafka_topics + ) + return () diff --git a/kpops/components/streams_bootstrap/producer/producer_app.py b/kpops/components/streams_bootstrap/producer/producer_app.py index be445079d..4c59262a1 100644 --- a/kpops/components/streams_bootstrap/producer/producer_app.py +++ b/kpops/components/streams_bootstrap/producer/producer_app.py @@ -4,7 +4,7 @@ from pydantic import Field, ValidationError, computed_field from typing_extensions import override -from kpops.component_handlers.kubernetes.model import K8S_CRON_JOB_NAME_MAX_LEN +from kpops.api import OperationMode from kpops.component_handlers.kubernetes.utils import trim from kpops.components.base_components.kafka_app import KafkaAppCleaner from kpops.components.common.app_type import AppType @@ -17,7 +17,11 @@ StreamsBootstrap, ) from kpops.components.streams_bootstrap.producer.model import ProducerAppValues +from kpops.config import get_config from kpops.const.file_type import DEFAULTS_YAML, PIPELINE_YAML +from kpops.manifests.argo import ArgoHook, enrich_annotations +from kpops.manifests.kubernetes import K8S_CRON_JOB_NAME_MAX_LEN, KubernetesManifest +from kpops.manifests.strimzi.kafka_topic import StrimziKafkaTopic from kpops.utils.docstring import describe_attr log = logging.getLogger("ProducerApp") @@ -33,6 +37,21 @@ def helm_chart(self) -> str: f"{self.repo_config.repository_name}/{AppType.CLEANUP_PRODUCER_APP.value}" ) + @override + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + values = self.to_helm_values() + if get_config().operation_mode is OperationMode.ARGO: + post_delete = ArgoHook.POST_DELETE + values = enrich_annotations(values, post_delete.key, post_delete.value) + + return self.helm.template( + self.helm_release_name, + self.helm_chart, + self.namespace, + values, + self.template_flags, + ) + class ProducerApp(StreamsBootstrap): """Producer component. @@ -68,7 +87,9 @@ def _cleaner(self) -> ProducerAppCleaner: for name in self.model_fields_set if name not in {"_cleaner", "from_", "to"} } - return ProducerAppCleaner.model_validate(kwargs) + cleaner = ProducerAppCleaner.model_validate(kwargs) + cleaner.values.name_override = None + return cleaner @override def apply_to_outputs(self, name: str, topic: TopicConfig) -> None: @@ -139,3 +160,27 @@ async def clean(self, dry_run: bool) -> None: """Destroy and clean.""" await super().clean(dry_run) await self._cleaner.clean(dry_run) + + @override + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + manifests = super().manifest_deploy() + operation_mode = get_config().operation_mode + + if operation_mode is OperationMode.ARGO: + manifests = manifests + self._cleaner.manifest_deploy() + + return manifests + + @override + def manifest_reset(self) -> tuple[KubernetesManifest, ...]: + if self.to: + return tuple( + StrimziKafkaTopic.from_topic(topic) for topic in self.to.kafka_topics + ) + return () + + @override + def manifest_clean(self) -> tuple[KubernetesManifest, ...]: + if get_config().operation_mode is OperationMode.MANIFEST: + return self._cleaner.manifest_deploy() + return () diff --git a/kpops/components/streams_bootstrap/streams/streams_app.py b/kpops/components/streams_bootstrap/streams/streams_app.py index 6e5d8c037..55de9f648 100644 --- a/kpops/components/streams_bootstrap/streams/streams_app.py +++ b/kpops/components/streams_bootstrap/streams/streams_app.py @@ -4,6 +4,7 @@ from pydantic import Field, ValidationError, computed_field from typing_extensions import override +from kpops.api.operation import OperationMode from kpops.component_handlers.kubernetes.pvc_handler import PVCHandler from kpops.components.base_components.helm_app import HelmApp from kpops.components.base_components.kafka_app import KafkaAppCleaner @@ -15,7 +16,11 @@ from kpops.components.streams_bootstrap.streams.model import ( StreamsAppValues, ) +from kpops.config import get_config from kpops.const.file_type import DEFAULTS_YAML, PIPELINE_YAML +from kpops.manifests.argo import ArgoHook, enrich_annotations +from kpops.manifests.kubernetes import KubernetesManifest +from kpops.manifests.strimzi.kafka_topic import StrimziKafkaTopic from kpops.utils.docstring import describe_attr log = logging.getLogger("StreamsApp") @@ -48,6 +53,33 @@ async def clean(self, dry_run: bool) -> None: ): await self.clean_pvcs(dry_run) + @override + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + values = self.to_helm_values() + if get_config().operation_mode is OperationMode.ARGO: + post_delete = ArgoHook.POST_DELETE + values = enrich_annotations(values, post_delete.key, post_delete.value) + return self.helm.template( + self.helm_release_name, + self.helm_chart, + self.namespace, + values, + self.template_flags, + ) + + @override + def manifest_reset(self) -> tuple[KubernetesManifest, ...]: + self.values.kafka.delete_output = False + values = self.to_helm_values() + + return self.helm.template( + self.helm_release_name, + self.helm_chart, + self.namespace, + values, + self.template_flags, + ) + async def clean_pvcs(self, dry_run: bool) -> None: app_full_name = super(HelmApp, self).full_name pvc_handler = PVCHandler(app_full_name, self.namespace) @@ -72,7 +104,9 @@ def _cleaner(self) -> StreamsAppCleaner: for name in self.model_fields_set if name not in {"_cleaner", "from_", "to"} } - return StreamsAppCleaner.model_validate(kwargs) + cleaner = StreamsAppCleaner.model_validate(kwargs) + cleaner.values.name_override = None + return cleaner @property @override @@ -159,3 +193,26 @@ async def clean(self, dry_run: bool) -> None: """Destroy and clean.""" await super().clean(dry_run) await self._cleaner.clean(dry_run) + + @override + def manifest_deploy(self) -> tuple[KubernetesManifest, ...]: + manifests = super().manifest_deploy() + if get_config().operation_mode is OperationMode.ARGO: + manifests = manifests + self._cleaner.manifest_deploy() + + return manifests + + @override + def manifest_reset(self) -> tuple[KubernetesManifest, ...]: + resource = self._cleaner.manifest_reset() + if self.to: + resource = resource + tuple( + StrimziKafkaTopic.from_topic(topic) for topic in self.to.kafka_topics + ) + return resource + + @override + def manifest_clean(self) -> tuple[KubernetesManifest, ...]: + if get_config().operation_mode is OperationMode.MANIFEST: + return self._cleaner.manifest_deploy() + return () diff --git a/kpops/components/streams_bootstrap_v2/base.py b/kpops/components/streams_bootstrap_v2/base.py index 2fa44354b..1b5d4cd2c 100644 --- a/kpops/components/streams_bootstrap_v2/base.py +++ b/kpops/components/streams_bootstrap_v2/base.py @@ -2,7 +2,7 @@ import logging from abc import ABC -from typing import TYPE_CHECKING, Any +from typing import Any, Self import pydantic from pydantic import AliasChoices, ConfigDict, Field @@ -23,12 +23,6 @@ exclude_defaults, ) -if TYPE_CHECKING: - try: - from typing import Self # pyright: ignore[reportAttributeAccessIssue] - except ImportError: - from typing_extensions import Self - STREAMS_BOOTSTRAP_HELM_REPO = HelmRepoConfig( repository_name="bakdata-streams-bootstrap", url="https://bakdata.github.io/streams-bootstrap/", diff --git a/kpops/components/streams_bootstrap_v2/streams/model.py b/kpops/components/streams_bootstrap_v2/streams/model.py index e733bb91c..2274f2ac5 100644 --- a/kpops/components/streams_bootstrap_v2/streams/model.py +++ b/kpops/components/streams_bootstrap_v2/streams/model.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any +from typing import Any, Self import pydantic from pydantic import BaseModel, ConfigDict, Field, model_validator @@ -194,9 +194,7 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): model_config = ConfigDict(extra="allow") @model_validator(mode="after") - def validate_mandatory_fields_are_set( - self: StreamsAppAutoScaling, - ) -> StreamsAppAutoScaling: # TODO: typing.Self for Python 3.11+ + def validate_mandatory_fields_are_set(self) -> Self: if self.enabled and (self.consumer_group is None or self.lag_threshold is None): msg = ( "If app.autoscaling.enabled is set to true, " @@ -228,9 +226,7 @@ class PersistenceConfig(BaseModel): ) @model_validator(mode="after") - def validate_mandatory_fields_are_set( - self: PersistenceConfig, - ) -> PersistenceConfig: # TODO: typing.Self for Python 3.11+ + def validate_mandatory_fields_are_set(self) -> Self: if self.enabled and self.size is None: msg = ( "If app.persistence.enabled is set to true, " diff --git a/kpops/config/__init__.py b/kpops/config/__init__.py index a7d0dc281..9e4f269b8 100644 --- a/kpops/config/__init__.py +++ b/kpops/config/__init__.py @@ -4,7 +4,9 @@ from pathlib import Path from typing import ClassVar +import pydantic from pydantic import AnyHttpUrl, Field, PrivateAttr, TypeAdapter +from pydantic.json_schema import SkipJsonSchema from pydantic_settings import ( BaseSettings, PydanticBaseSettingsSource, @@ -12,12 +14,43 @@ ) from typing_extensions import override +from kpops.api.exception import ValidationError +from kpops.api.operation import OperationMode from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object from kpops.utils.pydantic import YamlConfigSettingsSource ENV_PREFIX = "KPOPS_" +log = logging.getLogger("KPOpsConfig") + + +class StrimziTopicConfig(BaseSettings): + """Configuration for Strimzi Kafka Topics.""" + + label_: dict[str, str] = Field( + alias="label", + description="The label to identify the KafkaTopic resources managed by the Topic Operator. This does not have to be the name of the Kafka cluster. It can be the label assigned to the KafkaTopic resource. If you deploy more than one Topic Operator, the labels must be unique for each. That is, the operators cannot manage the same resources.", + ) + + @property + def cluster_labels(self) -> tuple[str, str]: + """Return the defined strimzi_topic.label as a tuple.""" + return next(iter(self.label_.items())) + + @pydantic.field_validator("label_", mode="after") + @classmethod + def label_validator(cls, label: dict[str, str]) -> dict[str, str]: + if len(label) == 0: + msg = "'strimzi_topic.label' must contain a single key-value pair." + raise ValidationError(msg) + if len(label) > 1: + log.warning( + "'resource_label' only reads the first entry in the dictionary. Other defined labels will be ignored." + ) + + return label + class TopicNameConfig(BaseSettings): """Configure the topic name variables you can use in the pipeline definition.""" @@ -120,8 +153,21 @@ class KpopsConfig(BaseSettings): default=False, description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) + strimzi_topic: StrimziTopicConfig | None = Field( + default=None, + description=describe_object(StrimziTopicConfig.__doc__), + ) + operation_mode: SkipJsonSchema[OperationMode] = Field( + default=OperationMode.MANAGED, + description="The operation mode of KPOps (managed, manifest, argo).", + exclude=True, + ) - model_config = SettingsConfigDict(env_prefix=ENV_PREFIX, env_nested_delimiter="__") + model_config = SettingsConfigDict( + env_prefix=ENV_PREFIX, + env_nested_delimiter="__", + use_enum_values=True, + ) @classmethod def create( @@ -130,6 +176,7 @@ def create( dotenv: list[Path] | None = None, environment: str | None = None, verbose: bool = False, + operation_mode: OperationMode | None = None, ) -> KpopsConfig: cls.setup_logging_level(verbose) YamlConfigSettingsSource.config_dir = config_dir @@ -137,6 +184,8 @@ def create( cls._instance = KpopsConfig( _env_file=dotenv # pyright: ignore[reportCallIssue] ) + if operation_mode: + cls._instance.operation_mode = operation_mode return cls._instance @staticmethod diff --git a/kpops/const/file_type.py b/kpops/const/file_type.py index 3e170be96..b0ff54312 100644 --- a/kpops/const/file_type.py +++ b/kpops/const/file_type.py @@ -1,11 +1,11 @@ from __future__ import annotations -from enum import Enum +from enum import StrEnum FILE_EXTENSION = ".yaml" -class KpopsFileType(str, Enum): +class KpopsFileType(StrEnum): """Enum representing different types of KPOps file naming conventions. Attributes: diff --git a/tests/cli/snapshots/test_init/test_init_project/defaults.yaml b/kpops/manifests/__init__.py similarity index 100% rename from tests/cli/snapshots/test_init/test_init_project/defaults.yaml rename to kpops/manifests/__init__.py diff --git a/kpops/manifests/argo.py b/kpops/manifests/argo.py new file mode 100644 index 000000000..3ecabdaf9 --- /dev/null +++ b/kpops/manifests/argo.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import enum +from typing import Any + +from pydantic import BaseModel + + +def enrich_annotations( + helm_values: dict[str, Any], key: str, value: str +) -> dict[str, Any]: + annotations = helm_values.setdefault("annotations", {}) + annotations[key] = value + return helm_values + + +class ArgoHook(str, enum.Enum): + POST_DELETE = "PostDelete" + + @property + def key(self) -> str: + return "argocd.argoproj.io/hook" + + +class ArgoSyncWave(BaseModel): + sync_wave: int = 0 + + @property + def key(self) -> str: + return "argocd.argoproj.io/sync-wave" + + @property + def value(self) -> str: + return str(self.sync_wave) diff --git a/kpops/manifests/kubernetes.py b/kpops/manifests/kubernetes.py new file mode 100644 index 000000000..f442e6b63 --- /dev/null +++ b/kpops/manifests/kubernetes.py @@ -0,0 +1,81 @@ +from collections.abc import Iterator +from typing import Any, Self + +import pydantic +import yaml +from pydantic import ConfigDict, Field +from typing_extensions import override +from yaml.loader import Loader + +from kpops.utils.pydantic import CamelCaseConfigModel, by_alias + +K8S_LABEL_MAX_LEN = 63 +# https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs +K8S_CRON_JOB_NAME_MAX_LEN = 52 + + +class ObjectMeta(CamelCaseConfigModel): + """Metadata for all Kubernetes objects. + + https://gtsystem.github.io/lightkube-models/1.19/models/meta_v1/#objectmeta + + """ + + annotations: dict[str, str] | None = None + creation_timestamp: str | None = Field( + default=None, description="Timestamp in RFC3339 format" + ) + finalizers: list[str] | None = None + labels: dict[str, str] | None = None + name: str | None = None + namespace: str | None = None + resource_version: str | None = None + uid: str | None = None + + model_config = ConfigDict(extra="allow") + + @pydantic.model_serializer(mode="wrap", when_used="always") + def serialize_model( + self, + default_serialize_handler: pydantic.SerializerFunctionWrapHandler, + info: pydantic.SerializationInfo, + ) -> dict[str, Any]: + result = default_serialize_handler(self) + return { + by_alias(self, name): value + for name, value in result.items() + if name in self.model_fields_set + } + + +class KubernetesManifest(CamelCaseConfigModel): + api_version: str + kind: str + metadata: ObjectMeta + _required: set[str] = pydantic.PrivateAttr({"api_version", "kind"}) + + model_config = ConfigDict(extra="allow") + + @classmethod + def from_yaml(cls, /, content: str) -> Iterator[Self]: + manifests: Iterator[dict[str, Any]] = yaml.load_all(content, Loader) + for manifest in manifests: + yield cls(**manifest) + + @pydantic.model_serializer(mode="wrap", when_used="always") + def serialize_model( + self, + default_serialize_handler: pydantic.SerializerFunctionWrapHandler, + info: pydantic.SerializationInfo, + ) -> dict[str, Any]: + include = self._required | self.model_fields_set + result = default_serialize_handler(self) + return { + by_alias(self, name): value + for name, value in result.items() + if name in include + } + + @override + def model_dump(self, **_: Any) -> dict[str, Any]: + return super().model_dump(mode="json") diff --git a/tests/cli/snapshots/test_init/test_init_project/pipeline.yaml b/kpops/manifests/strimzi/__init__.py similarity index 100% rename from tests/cli/snapshots/test_init/test_init_project/pipeline.yaml rename to kpops/manifests/strimzi/__init__.py diff --git a/kpops/manifests/strimzi/kafka_topic.py b/kpops/manifests/strimzi/kafka_topic.py new file mode 100644 index 000000000..43c4536e0 --- /dev/null +++ b/kpops/manifests/strimzi/kafka_topic.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from typing import Any, Self + +from pydantic import ConfigDict, Field, model_validator + +from kpops.api.exception import ValidationError +from kpops.components.common.topic import KafkaTopic +from kpops.config import get_config +from kpops.manifests.kubernetes import KubernetesManifest, ObjectMeta +from kpops.utils.docstring import describe_attr +from kpops.utils.pydantic import CamelCaseConfigModel + + +class TopicSpec(CamelCaseConfigModel): + """Specification of a Kafka topic. + + :param partitions: The number of partitions the topic should have. This cannot be decreased after topic creation. It can be increased after topic creation, but it is important to understand the consequences that has, especially for topics with semantic partitioning. When absent this will default to the broker configuration for `num.partitions`. + :param replicas: The number of replicas the topic should have. When absent this will default to the broker configuration for `default.replication.factor`. + :param config: The topic configuration. Topic config reference: https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html + + """ + + partitions: int = Field( + default=1, ge=1, description=describe_attr("partitions", __doc__) + ) + replicas: int = Field( + default=1, ge=1, le=32767, description=describe_attr("replicas", __doc__) + ) + config: dict[str, Any] | None = Field( + default=None, description=describe_attr("config", __doc__) + ) + + model_config = ConfigDict(extra="allow") + + @model_validator(mode="before") + @classmethod + def set_defaults_if_none(cls, values: Any) -> Any: + if values.get("partitions") is None: + values["partitions"] = 1 + if values.get("replicas") is None: + values["replicas"] = 1 + return values + + +class StrimziKafkaTopic(KubernetesManifest): + """Represents a Strimzi Kafka Topic CRD. + + CRD definition: https://github.com/strimzi/strimzi-kafka-operator/blob/main/install/cluster-operator/043-Crd-kafkatopic.yaml + example: https://github.com/strimzi/strimzi-kafka-operator/blob/main/examples/topic/kafka-topic.yaml + """ + + api_version: str = "kafka.strimzi.io/v1beta2" + kind: str = "KafkaTopic" + metadata: ObjectMeta + spec: TopicSpec + status: dict[str, Any] | None = None + + @classmethod + def from_topic(cls, topic: KafkaTopic) -> Self: + strimzi_topic = get_config().strimzi_topic + if not strimzi_topic: + msg = "When manifesting KafkaTopic you must define 'strimzi_topic.resource_label' in the config.yaml" + raise ValidationError(msg) + cluster_domain, cluster_name = strimzi_topic.cluster_labels + + metadata = ObjectMeta.model_validate( + { + "name": topic.name, + "labels": {cluster_domain: cluster_name}, + } + ) + spec = TopicSpec.model_validate( + { + "partitions": topic.config.partitions_count, + "replicas": topic.config.replication_factor, + "config": topic.config.configs, + } + ) + return cls( + metadata=metadata, + spec=spec, + ) diff --git a/kpops/utils/cli_commands.py b/kpops/utils/cli_commands.py index ba1b295ad..2f994b494 100644 --- a/kpops/utils/cli_commands.py +++ b/kpops/utils/cli_commands.py @@ -52,7 +52,7 @@ def create_config(file_name: str, dir_path: Path, include_optional: bool) -> Non file_path = Path(dir_path / (file_name + ".yaml")) file_path.touch(exist_ok=False) with file_path.open(mode="w") as conf: - conf.write("# " + describe_object(KpopsConfig.__doc__)) # Write title + conf.write(f"# {describe_object(KpopsConfig.__doc__)}") # Write title non_required = extract_config_fields_for_yaml( collect_fields(KpopsConfig), False ) @@ -60,10 +60,16 @@ def create_config(file_name: str, dir_path: Path, include_optional: bool) -> Non for k in non_required: required.pop(k, None) conf.write("\n\n# Required fields\n") - conf.write(yaml.dump(required)) + conf.write(yaml.safe_dump(required)) + if include_optional: + dump = KpopsConfig(**non_required).model_dump( + mode="json", exclude_none=False + ) + for k in required: + dump.pop(k, None) conf.write("\n# Non-required fields\n") - conf.write(yaml.dump(non_required)) + conf.write(yaml.safe_dump(dump)) def init_project(path: Path, conf_incl_opt: bool): diff --git a/kpops/utils/dict_differ.py b/kpops/utils/dict_differ.py index 67b6d98c4..51d29bf7a 100644 --- a/kpops/utils/dict_differ.py +++ b/kpops/utils/dict_differ.py @@ -3,8 +3,8 @@ from collections.abc import Mapping, MutableMapping from dataclasses import dataclass from difflib import Differ -from enum import Enum -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from enum import StrEnum +from typing import TYPE_CHECKING, Any, Generic, NamedTuple, TypeVar import typer import yaml @@ -16,7 +16,7 @@ differ = Differ() -class DiffType(str, Enum): +class DiffType(StrEnum): ADD = "add" CHANGE = "change" REMOVE = "remove" @@ -30,8 +30,7 @@ def from_str(label: str) -> DiffType: _N = TypeVar("_N") -@dataclass -class Change(Generic[_O, _N]): # Generic NamedTuple requires Python 3.11+ +class Change(NamedTuple, Generic[_O, _N]): old_value: _O new_value: _N @@ -41,11 +40,11 @@ def factory( ) -> Change[_O | None, _N | None]: match type: case DiffType.ADD: - return Change(None, change) + return Change(None, change) # pyright: ignore[reportReturnType] case DiffType.REMOVE: - return Change(change, None) + return Change(change, None) # pyright: ignore[reportReturnType] case DiffType.CHANGE if isinstance(change, tuple): - return Change(*change) # pyright: ignore[reportUnknownArgumentType] + return Change(*change) # pyright: ignore[reportReturnType] msg = f"{type} is not part of {DiffType}" raise ValueError(msg) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 539e2cf3a..13b8c4b62 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,3 +1,6 @@ +# FIXME: pyright breaks here. Investigate why this is happening. +# type: ignore[reportGeneralTypeIssues] + import inspect import json import logging diff --git a/poetry.lock b/poetry.lock index a773e4d0c..54e505815 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "aiofiles" @@ -34,10 +34,8 @@ files = [ ] [package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -252,20 +250,6 @@ files = [ {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, ] -[[package]] -name = "exceptiongroup" -version = "1.0.4" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, -] - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "faker" version = "22.0.0" @@ -1279,11 +1263,9 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -1799,17 +1781,6 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "typepy" version = "1.3.1" @@ -1951,5 +1922,5 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" -python-versions = ">=3.10, <3.13" -content-hash = "e307ba934678d208018611cc9567ef9bfa779fc7c024308d2c6fc6d01a32cafe" +python-versions = ">=3.11, <3.13" +content-hash = "82d4f691096d3aaa7cea1e1469b5ebec3daca4d649376c493af809d1978198e6" \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 8ab9f772a..16559f3a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ kpops = "kpops.cli.main:app" [tool.poetry.dependencies] -python = ">=3.10, <3.13" +python = ">=3.11, <3.13" anyio = "^4.3.0" pydantic = "^2.5.3" pydantic-settings = "^2.0.3" @@ -73,16 +73,13 @@ reportUnknownLambdaType = "warning" reportUnknownVariableType = "warning" reportUnknownMemberType = "warning" -reportIncompatibleVariableOverride = false -reportIncompatibleMethodOverride = false -# FIXME: causes issues on Python 3.10 -# reportIncompatibleVariableOverride = "warning" -# reportIncompatibleMethodOverride = "warning" +reportIncompatibleVariableOverride = "warning" +reportIncompatibleMethodOverride = "warning" [tool.ruff] output-format = "grouped" show-fixes = true -target-version = "py310" +target-version = "py311" extend-exclude = ["tests/*snapshots/*"] [tool.ruff.lint] diff --git a/tests/cli/snapshots/test_init/test_init_project/config_exclude_opt.yaml b/tests/cli/snapshots/test_init/test_init_project_exclude_optional/config.yaml similarity index 100% rename from tests/cli/snapshots/test_init/test_init_project/config_exclude_opt.yaml rename to tests/cli/snapshots/test_init/test_init_project_exclude_optional/config.yaml diff --git a/tests/cli/snapshots/test_init/test_init_project_exclude_optional/defaults.yaml b/tests/cli/snapshots/test_init/test_init_project_exclude_optional/defaults.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/tests/cli/snapshots/test_init/test_init_project_exclude_optional/pipeline.yaml b/tests/cli/snapshots/test_init/test_init_project_exclude_optional/pipeline.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/tests/cli/snapshots/test_init/test_init_project/config_include_opt.yaml b/tests/cli/snapshots/test_init/test_init_project_include_optional/config.yaml similarity index 91% rename from tests/cli/snapshots/test_init/test_init_project/config_include_opt.yaml rename to tests/cli/snapshots/test_init/test_init_project_include_optional/config.yaml index 3c86a269a..984340481 100644 --- a/tests/cli/snapshots/test_init/test_init_project/config_include_opt.yaml +++ b/tests/cli/snapshots/test_init/test_init_project_include_optional/config.yaml @@ -9,7 +9,8 @@ helm_config: api_version: null context: null debug: false -helm_diff_config: {} +helm_diff_config: + ignore: [] kafka_connect: timeout: 30 url: http://localhost:8083/ @@ -22,6 +23,7 @@ schema_registry: enabled: false timeout: 30 url: http://localhost:8081/ +strimzi_topic: null topic_name_config: default_error_topic_name: ${pipeline.name}-${component.name}-error default_output_topic_name: ${pipeline.name}-${component.name} diff --git a/tests/cli/snapshots/test_init/test_init_project_include_optional/defaults.yaml b/tests/cli/snapshots/test_init/test_init_project_include_optional/defaults.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/tests/cli/snapshots/test_init/test_init_project_include_optional/pipeline.yaml b/tests/cli/snapshots/test_init/test_init_project_include_optional/pipeline.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/tests/cli/test_init.py b/tests/cli/test_init.py index 3109d16fc..6a5c61303 100644 --- a/tests/cli/test_init.py +++ b/tests/cli/test_init.py @@ -1,5 +1,6 @@ from pathlib import Path +import pytest from pytest_snapshot.plugin import Snapshot from typer.testing import CliRunner @@ -22,23 +23,24 @@ def test_create_config(tmp_path: Path): assert len(opt_conf.read_text()) > len(req_conf.read_text()) -def test_init_project(tmp_path: Path, snapshot: Snapshot): +@pytest.mark.usefixtures("mock_env", "load_yaml_file_clear_cache", "clear_kpops_config") +def test_init_project_exclude_optional(tmp_path: Path, snapshot: Snapshot): + req_path = tmp_path / "req" + req_path.mkdir() + kpops.init(req_path, config_include_optional=False) + snapshot.assert_match(Path(req_path / "config.yaml").read_text(), "config.yaml") + snapshot.assert_match(Path(req_path / "pipeline.yaml").read_text(), "pipeline.yaml") + snapshot.assert_match(Path(req_path / "defaults.yaml").read_text(), "defaults.yaml") + + +def test_init_project_include_optional(tmp_path: Path, snapshot: Snapshot): opt_path = tmp_path / "opt" opt_path.mkdir() - kpops.init(opt_path, config_include_opt=False) - snapshot.assert_match( - Path(opt_path / "config.yaml").read_text(), "config_exclude_opt.yaml" - ) + kpops.init(opt_path, config_include_optional=True) + snapshot.assert_match(Path(opt_path / "config.yaml").read_text(), "config.yaml") snapshot.assert_match(Path(opt_path / "pipeline.yaml").read_text(), "pipeline.yaml") snapshot.assert_match(Path(opt_path / "defaults.yaml").read_text(), "defaults.yaml") - req_path = tmp_path / "req" - req_path.mkdir() - kpops.init(req_path, config_include_opt=True) - snapshot.assert_match( - Path(req_path / "config.yaml").read_text(), "config_include_opt.yaml" - ) - def test_init_project_from_cli_with_bad_path(tmp_path: Path): bad_path = Path(tmp_path / "random_file.yaml") diff --git a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py index 0e05ad09f..b33411001 100644 --- a/tests/component_handlers/helm_wrapper/test_dry_run_handler.py +++ b/tests/component_handlers/helm_wrapper/test_dry_run_handler.py @@ -8,7 +8,7 @@ from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler from kpops.component_handlers.helm_wrapper.model import HelmTemplate -from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.manifests.kubernetes import KubernetesManifest log = Logger("TestLogger") @@ -35,7 +35,14 @@ def test_should_print_helm_diff_when_release_is_new( ): helm_mock.get_manifest.return_value = iter(()) new_release = iter( - [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))] + [ + HelmTemplate( + Path("path.yaml"), + KubernetesManifest.model_validate( + {"apiVersion": "v1", "kind": "Deployment", "metadata": {}} + ), + ) + ] ) mock_load_manifest = mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest", @@ -61,12 +68,24 @@ def test_should_print_helm_diff_when_release_exists( caplog: LogCaptureFixture, ): current_release = [ - HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1})) + HelmTemplate( + Path("path.yaml"), + KubernetesManifest.model_validate( + {"apiVersion": "v1", "kind": "Deployment", "metadata": {}} + ), + ) ] helm_mock.get_manifest.return_value = iter(current_release) new_release = iter( - [HelmTemplate(Path("path.yaml"), KubernetesManifest({"a": 1}))] + [ + HelmTemplate( + Path("path.yaml"), + KubernetesManifest.model_validate( + {"apiVersion": "v1", "kind": "Deployment", "metadata": {}} + ), + ) + ] ) mock_load_manifest = mocker.patch( "kpops.component_handlers.helm_wrapper.dry_run_handler.Helm.load_manifest", diff --git a/tests/component_handlers/helm_wrapper/test_helm_diff.py b/tests/component_handlers/helm_wrapper/test_helm_diff.py index ce64ec4ae..8373b5f0c 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_diff.py +++ b/tests/component_handlers/helm_wrapper/test_helm_diff.py @@ -6,7 +6,7 @@ from kpops.component_handlers.helm_wrapper.helm_diff import HelmDiff from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig, HelmTemplate -from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.manifests.kubernetes import KubernetesManifest, ObjectMeta from kpops.utils.dict_differ import Change logger = logging.getLogger("TestHelmDiff") @@ -18,11 +18,34 @@ def helm_diff(self) -> HelmDiff: return HelmDiff(HelmDiffConfig()) def test_calculate_changes_unchanged(self, helm_diff: HelmDiff): - templates = [HelmTemplate(Path("a.yaml"), KubernetesManifest())] + templates = [ + HelmTemplate( + Path("a.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({}), + } + ), + ) + ] assert list(helm_diff.calculate_changes(templates, templates)) == [ Change( - old_value={}, - new_value={}, + old_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({}), + } + ), + new_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({}), + } + ), ), ] @@ -31,26 +54,86 @@ def test_calculate_changes_matching(self, helm_diff: HelmDiff): assert list( helm_diff.calculate_changes( [ - HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1})), - HelmTemplate(Path("b.yaml"), KubernetesManifest({"b": 1})), + HelmTemplate( + Path("a.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "1"}), + } + ), + ), + HelmTemplate( + Path("b.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"b": "1"}), + } + ), + ), ], [ - HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 2})), - HelmTemplate(Path("c.yaml"), KubernetesManifest({"c": 1})), + HelmTemplate( + Path("a.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "2"}), + } + ), + ), + HelmTemplate( + Path("c.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"c": "1"}), + } + ), + ), ], ) ) == [ Change( - old_value={"a": 1}, - new_value={"a": 2}, + old_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "1"}), + } + ), + new_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "2"}), + } + ), ), Change( - old_value={"b": 1}, - new_value={}, + old_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"b": "1"}), + } + ), + new_value=None, ), Change( - old_value={}, - new_value={"c": 1}, + old_value=None, + new_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"c": "1"}), + } + ), ), ] @@ -58,12 +141,30 @@ def test_calculate_changes_new_release(self, helm_diff: HelmDiff): # test no current release assert list( helm_diff.calculate_changes( - (), [HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1}))] + (), + [ + HelmTemplate( + Path("a.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "1"}), + } + ), + ) + ], ) ) == [ Change( - old_value={}, - new_value={"a": 1}, + old_value=None, + new_value=KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "1"}), + } + ), ), ] @@ -71,6 +172,24 @@ def test_log_helm_diff(self, helm_diff: HelmDiff, caplog: LogCaptureFixture): helm_diff.log_helm_diff( logger, (), - [HelmTemplate(Path("a.yaml"), KubernetesManifest({"a": 1}))], + [ + HelmTemplate( + Path("a.yaml"), + KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": ObjectMeta.model_validate({"a": "1"}), + } + ), + ) + ], ) - assert caplog.messages == ["\n\x1b[32m+ a: 1\n\x1b[0m"] + assert caplog.messages == [ + "\n" + "\x1b[32m+ apiVersion: v1\n" + "\x1b[0m\x1b[32m+ kind: Deployment\n" + "\x1b[0m\x1b[32m+ metadata:\n" + "\x1b[0m\x1b[32m+ a: '1'\n" + "\x1b[0m" + ] diff --git a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py index 23c855251..3d80e59b2 100644 --- a/tests/component_handlers/helm_wrapper/test_helm_wrapper.py +++ b/tests/component_handlers/helm_wrapper/test_helm_wrapper.py @@ -13,12 +13,12 @@ HelmConfig, HelmTemplateFlags, HelmUpgradeInstallFlags, - KubernetesManifest, ParseError, RepoAuthFlags, Version, ) from kpops.components.common.app_type import AppType +from kpops.manifests.kubernetes import KubernetesManifest class TestHelmWrapper: @@ -292,7 +292,7 @@ def test_validate_console_output(self): def test_helm_template(self): path = Path("test2.yaml") - manifest = KubernetesManifest( + manifest = KubernetesManifest.model_validate( { "apiVersion": "v1", "kind": "ServiceAccount", @@ -309,12 +309,16 @@ def test_load_manifest_with_no_notes(self): MANIFEST: --- # Source: chart/templates/test3a.yaml - data: - - a: 1 - - b: 2 + apiVersion: v1 + kind: Pod + metadata: + name: test-3a --- # Source: chart/templates/test3b.yaml - foo: bar + apiVersion: v1 + kind: Pod + metadata: + name: test-3b """ ) helm_templates = list(Helm.load_manifest(stdout)) @@ -323,11 +327,21 @@ def test_load_manifest_with_no_notes(self): isinstance(helm_template, HelmTemplate) for helm_template in helm_templates ) assert helm_templates[0].filepath == Path("chart/templates/test3a.yaml") - assert helm_templates[0].manifest == KubernetesManifest( - {"data": [{"a": 1}, {"b": 2}]} + assert helm_templates[0].manifest == KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Pod", + "metadata": {"name": "test-3a"}, + } ) assert helm_templates[1].filepath == Path("chart/templates/test3b.yaml") - assert helm_templates[1].manifest == KubernetesManifest({"foo": "bar"}) + assert helm_templates[1].manifest == KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Pod", + "metadata": {"name": "test-3b"}, + } + ) def test_raise_parse_error_when_helm_content_is_invalid(self): stdout = dedent( @@ -372,12 +386,16 @@ def test_load_manifest(self): MANIFEST: --- # Source: chart/templates/test3a.yaml - data: - - a: 1 - - b: 2 + apiVersion: v1 + kind: Pod + metadata: + name: test-3a --- # Source: chart/templates/test3b.yaml - foo: bar + apiVersion: v1 + kind: Pod + metadata: + name: test-3b NOTES: 1. Get the application URL by running these commands: @@ -393,20 +411,31 @@ def test_load_manifest(self): isinstance(helm_template, HelmTemplate) for helm_template in helm_templates ) assert helm_templates[0].filepath == Path("chart/templates/test3a.yaml") - assert helm_templates[0].manifest == KubernetesManifest( - {"data": [{"a": 1}, {"b": 2}]} + assert helm_templates[0].manifest == KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Pod", + "metadata": {"name": "test-3a"}, + } ) assert helm_templates[1].filepath == Path("chart/templates/test3b.yaml") - assert helm_templates[1].manifest == KubernetesManifest({"foo": "bar"}) + assert helm_templates[1].manifest == KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Pod", + "metadata": {"name": "test-3b"}, + } + ) def test_helm_get_manifest(self, helm: Helm, mock_execute: MagicMock): mock_execute.return_value = dedent( """ --- # Source: chart/templates/test.yaml - data: - - a: 1 - - b: 2 + apiVersion: v1 + kind: Pod + metadata: + name: my-pod """ ) helm_templates = list(helm.get_manifest("test-release", "test-namespace")) @@ -422,8 +451,12 @@ def test_helm_get_manifest(self, helm: Helm, mock_execute: MagicMock): ) assert len(helm_templates) == 1 assert helm_templates[0].filepath == Path("chart/templates/test.yaml") - assert helm_templates[0].manifest == KubernetesManifest( - {"data": [{"a": 1}, {"b": 2}]} + assert helm_templates[0].manifest == KubernetesManifest.model_validate( + { + "apiVersion": "v1", + "kind": "Pod", + "metadata": {"name": "my-pod"}, + } ) mock_execute.side_effect = ReleaseNotFoundException() diff --git a/tests/component_handlers/kubernetes/model.py b/tests/component_handlers/kubernetes/model.py index 334c1f937..06c492def 100644 --- a/tests/component_handlers/kubernetes/model.py +++ b/tests/component_handlers/kubernetes/model.py @@ -2,7 +2,7 @@ import pytest -from kpops.component_handlers.kubernetes.model import KubernetesManifest +from kpops.manifests.kubernetes import KubernetesManifest class TestKubernetesManifest: @@ -23,7 +23,7 @@ class TestKubernetesManifest: ), [ KubernetesManifest( - { + **{ "apiVersion": "v1", "kind": "ServiceAccount", "metadata": {"labels": {"foo": "bar"}}, diff --git a/tests/components/test_helm_app.py b/tests/components/test_helm_app.py index 0d0649747..f72514299 100644 --- a/tests/components/test_helm_app.py +++ b/tests/components/test_helm_app.py @@ -9,8 +9,8 @@ HelmUpgradeInstallFlags, RepoAuthFlags, ) -from kpops.component_handlers.kubernetes.model import K8S_LABEL_MAX_LEN from kpops.components.base_components.helm_app import HelmApp, HelmAppValues +from kpops.manifests.kubernetes import K8S_LABEL_MAX_LEN from kpops.utils.colorify import magentaify diff --git a/tests/conftest.py b/tests/conftest.py index a819c26af..777c87416 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,6 @@ import pytest -from kpops.config import KpopsConfig from kpops.utils.environment import ENV, Environment from kpops.utils.yaml import load_yaml_file @@ -56,8 +55,10 @@ def custom_components() -> Iterator[None]: @pytest.fixture(scope="module") def clear_kpops_config() -> Iterator[None]: - yield + from kpops.config import KpopsConfig + KpopsConfig._instance = None + yield KUBECONFIG = """ diff --git a/tests/manifests/__init__.py b/tests/manifests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/manifests/strimzi/__init__.py b/tests/manifests/strimzi/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/manifests/strimzi/test_kafka_topic.py b/tests/manifests/strimzi/test_kafka_topic.py new file mode 100644 index 000000000..a519c2e94 --- /dev/null +++ b/tests/manifests/strimzi/test_kafka_topic.py @@ -0,0 +1,77 @@ +from unittest.mock import MagicMock + +import pytest +from pydantic import ValidationError as PydanticValidationError +from pytest_mock import MockerFixture + +from kpops.api.exception import ValidationError +from kpops.components.common.topic import KafkaTopic, TopicConfig +from kpops.manifests.strimzi.kafka_topic import StrimziKafkaTopic, TopicSpec + + +@pytest.fixture +def kafka_topic() -> KafkaTopic: + return KafkaTopic( + name="test-topic", + config=TopicConfig.model_validate( + { + "partitions_count": 3, + "replication_factor": 2, + "configs": {"cleanup.policy": "compact"}, + }, + ), + ) + + +def test_topic_spec_defaults(): + spec = TopicSpec() + assert spec.partitions == 1 + assert spec.replicas == 1 + assert spec.config is None + + +def test_topic_spec_custom_values(): + spec = TopicSpec(partitions=3, replicas=2, config={"retention.ms": "60000"}) + assert spec.partitions == 3 + assert spec.replicas == 2 + assert spec.config == {"retention.ms": "60000"} + + +def test_topic_spec_validation(): + with pytest.raises(PydanticValidationError): + TopicSpec(partitions=0) # Less than 1, should raise validation error + + with pytest.raises(PydanticValidationError): + TopicSpec(replicas=40000) # Exceeds max value, should raise validation error + + +def test_strimzi_kafka_topic_from_topic(kafka_topic: KafkaTopic, mocker: MockerFixture): + mock_config = MagicMock() + mock_config.strimzi_topic.cluster_labels = ("bakdata.com/cluster", "my-cluster") + mocker.patch( + "kpops.manifests.strimzi.kafka_topic.get_config", return_value=mock_config + ) + + strimzi_topic = StrimziKafkaTopic.from_topic(kafka_topic) + + # Check metadata + assert strimzi_topic.metadata.name == kafka_topic.name + assert strimzi_topic.metadata.labels == {"bakdata.com/cluster": "my-cluster"} + + # Check spec + assert strimzi_topic.spec.partitions == kafka_topic.config.partitions_count + assert strimzi_topic.spec.replicas == kafka_topic.config.replication_factor + assert strimzi_topic.spec.config == kafka_topic.config.configs + + +def test_strimzi_kafka_topic_missing_config(kafka_topic, mocker): + mock_config = MagicMock() + mock_config.strimzi_topic = None + mocker.patch( + "kpops.manifests.strimzi.kafka_topic.get_config", return_value=mock_config + ) + + with pytest.raises( + ValidationError, match="must define 'strimzi_topic.resource_label'" + ): + StrimziKafkaTopic.from_topic(kafka_topic) diff --git a/tests/manifests/test_argo_enricher.py b/tests/manifests/test_argo_enricher.py new file mode 100644 index 000000000..0ee4165a1 --- /dev/null +++ b/tests/manifests/test_argo_enricher.py @@ -0,0 +1,66 @@ +from typing import Any + +import pytest + +from kpops.manifests.argo import ArgoHook, ArgoSyncWave, enrich_annotations + + +@pytest.fixture +def empty_manifest() -> dict[str, Any]: + return {} + + +@pytest.fixture +def manifest_with_annotations() -> dict[str, Any]: + return {"annotations": {"existing-annotation": "annotation-value"}} + + +def test_argo_hook_enrich_empty_manifest(empty_manifest: dict[str, Any]): + hook = ArgoHook.POST_DELETE + enriched_manifest = enrich_annotations(empty_manifest, hook.key, hook.value) + assert enriched_manifest["annotations"][hook.key] == hook.value + assert len(enriched_manifest["annotations"]) == 1 + + +def test_argo_hook_enrich_existing_annotations( + manifest_with_annotations: dict[str, Any], +): + hook = ArgoHook.POST_DELETE + enriched_manifest = enrich_annotations( + manifest_with_annotations, hook.key, hook.value + ) + assert enriched_manifest["annotations"][hook.key] == hook.value + assert enriched_manifest["annotations"]["existing-annotation"] == "annotation-value" + + +def test_argo_sync_wave_enrich_empty_manifest(empty_manifest): + sync_wave = ArgoSyncWave(sync_wave=1) + enriched_manifest = enrich_annotations( + empty_manifest, sync_wave.key, sync_wave.value + ) + assert enriched_manifest["annotations"][sync_wave.key] == sync_wave.value + assert len(enriched_manifest["annotations"]) == 1 + + +def test_argo_sync_wave_enrich_existing_annotations( + manifest_with_annotations: dict[str, Any], +): + sync_wave = ArgoSyncWave(sync_wave=2) + enriched_manifest = enrich_annotations( + manifest_with_annotations, sync_wave.key, sync_wave.value + ) + assert enriched_manifest["annotations"][sync_wave.key] == sync_wave.value + assert enriched_manifest["annotations"]["existing-annotation"] == "annotation-value" + + +def test_argo_sync_wave_multiple_enrichments(empty_manifest: dict[str, Any]): + sync_wave_1 = ArgoSyncWave(sync_wave=1) + sync_wave_2 = ArgoSyncWave(sync_wave=2) + enriched_manifest = enrich_annotations( + empty_manifest, sync_wave_1.key, sync_wave_1.value + ) + enriched_manifest = enrich_annotations( + enriched_manifest, sync_wave_2.key, sync_wave_2.value + ) + assert enriched_manifest["annotations"][sync_wave_1.key] == sync_wave_2.value + assert len(enriched_manifest["annotations"]) == 1 diff --git a/tests/manifests/test_kubernetes_model.py b/tests/manifests/test_kubernetes_model.py new file mode 100644 index 000000000..1b8aad46e --- /dev/null +++ b/tests/manifests/test_kubernetes_model.py @@ -0,0 +1,127 @@ +from textwrap import dedent + +import pytest + +from kpops.manifests.kubernetes import KubernetesManifest, ObjectMeta + + +class TestCRD(KubernetesManifest): + api_version: str = "v1" + kind: str = "TestCRD" + + +@pytest.fixture +def crd_manifest() -> TestCRD: + return TestCRD(metadata=ObjectMeta.model_validate({"foo": "bar"})) + + +@pytest.fixture +def example_manifest() -> KubernetesManifest: + """Fixture providing an example KubernetesManifest instance.""" + metadata = ObjectMeta( + name="example", + namespace="default", + labels={"app": "test"}, + ) + return KubernetesManifest( + api_version="v1", + kind="Deployment", + metadata=metadata, + ) + + +def test_serialize_model_include_required_fields(crd_manifest: TestCRD): + """Test that the serialize_model method excludes unset fields.""" + serialized = crd_manifest.model_dump() + expected_serialized = { + "apiVersion": "v1", + "kind": "TestCRD", + "metadata": {"foo": "bar"}, + } + assert serialized == expected_serialized + + +def test_serialize_model_excludes_none(example_manifest: KubernetesManifest): + """Test that the serialize_model method excludes unset fields.""" + serialized = example_manifest.model_dump() + expected_serialized = { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": { + "name": "example", + "namespace": "default", + "labels": {"app": "test"}, + }, + } + assert serialized == expected_serialized + + +def test_serialize_model_includes_required_fields(): + """Test that required fields are always included in serialization.""" + metadata = ObjectMeta(name="example", namespace="default") + manifest = KubernetesManifest(api_version="v1", kind="Pod", metadata=metadata) + serialized = manifest.model_dump() + assert "apiVersion" in serialized + assert "kind" in serialized + assert "metadata" in serialized + + +def test_from_yaml_parsing(): + """Test the from_yaml method parses YAML into KubernetesManifest objects.""" + yaml_content = dedent( + """ + --- + apiVersion: v1 + kind: Service + metadata: + name: test-service + namespace: test-namespace + + --- + apiVersion: v1 + kind: Pod + metadata: + name: test-pod + namespace: test-namespace + """ + ) + manifests = list(KubernetesManifest.from_yaml(yaml_content)) + assert len(manifests) == 2 + assert manifests[0].api_version == "v1" + assert manifests[0].kind == "Service" + assert manifests[0].metadata.name == "test-service" + assert manifests[1].kind == "Pod" + assert manifests[1].metadata.name == "test-pod" + + +def test_model_dump_json_output(example_manifest: KubernetesManifest): + """Test the model_dump method for JSON output.""" + dumped = example_manifest.model_dump() + expected_dumped = { + "apiVersion": "v1", + "kind": "Deployment", + "metadata": { + "name": "example", + "namespace": "default", + "labels": {"app": "test"}, + }, + } + assert dumped == expected_dumped + + +def test_objectmeta_serialization(): + """Test ObjectMeta serialization with optional fields.""" + metadata = ObjectMeta( + name="example", + namespace="default", + labels={"app": "test"}, + annotations=None, # This field should be included + ) + serialized = metadata.model_dump() + expected_serialized = { + "annotations": None, + "name": "example", + "namespace": "default", + "labels": {"app": "test"}, + } + assert serialized == expected_serialized diff --git a/tests/pipeline/resources/manifest-pipeline/defaults.yaml b/tests/pipeline/resources/manifest-pipeline/defaults.yaml new file mode 100644 index 000000000..d24ae3b07 --- /dev/null +++ b/tests/pipeline/resources/manifest-pipeline/defaults.yaml @@ -0,0 +1,22 @@ +streams-bootstrap: + version: "3.0.3" + values: + kafka: + bootstrapServers: ${config.kafka_brokers} + schemaRegistryUrl: ${config.schema_registry.url} + +producer-app: {} # inherits from streams-bootstrap + +streams-app: # inherits from streams-bootstrap + values: + prometheus: + jmx: + enabled: false + to: + topics: + ${error_topic_name}: + type: error + value_schema: com.bakdata.kafka.DeadLetter + partitions_count: 1 + configs: + cleanup.policy: compact,delete diff --git a/tests/pipeline/resources/manifest-pipeline/pipeline.yaml b/tests/pipeline/resources/manifest-pipeline/pipeline.yaml new file mode 100644 index 000000000..cf5f7342c --- /dev/null +++ b/tests/pipeline/resources/manifest-pipeline/pipeline.yaml @@ -0,0 +1,40 @@ +- type: my-producer-app + values: + image: "my-registry/my-producer-image" + imageTag: "1.0.0" + + to: + topics: + my-producer-app-output-topic: + type: output + my-labeled-producer-app-topic-output: + label: my-producer-app-output-topic-label + + +- type: my-streams-app + values: + image: "my-registry/my-streams-app-image" + imageTag: "1.0.0" + kafka: + applicationId: "my-streams-app-id" + + from: + topics: + my-input-topic: + type: input + my-labeled-input-topic: + label: my-input-topic-label + my-input-pattern: + type: pattern + my-labeled-input-pattern: + type: pattern + label: my-input-topic-labeled-pattern + + to: + topics: + my-output-topic: + type: output + my-error-topic: + type: error + my-labeled-topic-output: + label: my-output-topic-label diff --git a/tests/pipeline/snapshots/test_manifest/test_deploy_argo_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_deploy_argo_mode/manifest.yaml new file mode 100644 index 000000000..336d3d726 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_deploy_argo_mode/manifest.yaml @@ -0,0 +1,291 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + annotations: + argocd.argoproj.io/sync-wave: '1' + labels: + app: resources-manifest-pipeline-my-producer-app + chart: producer-app-3.0.3 + release: resources-manifest-pipeline-my-producer-app + name: resources-manifest-pipeline-my-producer-app +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-producer-app + release: resources-manifest-pipeline-my-producer-app + spec: + containers: + - env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_OUTPUT_TOPIC + value: my-producer-app-output-topic + - name: APP_LABELED_OUTPUT_TOPICS + value: my-producer-app-output-topic-label=my-labeled-producer-app-topic-output, + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-producer-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-producer-app + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + annotations: + argocd.argoproj.io/hook: PostDelete + labels: + app: resources-manifest-pipeline-my-producer-app-clean + chart: producer-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-producer-app-clean + name: resources-manifest-pipeline-my-producer-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-producer-app-clean + release: resources-manifest-pipeline-my-producer-app-clean + spec: + containers: + - args: + - clean + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_OUTPUT_TOPIC + value: my-producer-app-output-topic + - name: APP_LABELED_OUTPUT_TOPICS + value: my-producer-app-output-topic-label=my-labeled-producer-app-topic-output, + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-producer-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-producer-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: + argocd.argoproj.io/sync-wave: '1' + consumerGroup: my-streams-app-id + labels: + app: resources-manifest-pipeline-my-streams-app + chart: streams-app-3.0.3 + release: resources-manifest-pipeline-my-streams-app + name: resources-manifest-pipeline-my-streams-app +spec: + replicas: 1 + selector: + matchLabels: + app: resources-manifest-pipeline-my-streams-app + release: resources-manifest-pipeline-my-streams-app + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app + release: resources-manifest-pipeline-my-streams-app + spec: + containers: + - env: + - name: ENV_PREFIX + value: APP_ + - name: APP_VOLATILE_GROUP_INSTANCE_ID + value: 'true' + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-Dcom.sun.management.jmxremote.port=5555 -Dcom.sun.management.jmxremote.authenticate=false + -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + terminationGracePeriodSeconds: 300 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + annotations: + argocd.argoproj.io/hook: PostDelete + labels: + app: resources-manifest-pipeline-my-streams-app-clean + chart: streams-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-streams-app-clean + name: resources-manifest-pipeline-my-streams-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + release: resources-manifest-pipeline-my-streams-app-clean + spec: + containers: + - args: + - reset + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + diff --git a/tests/pipeline/snapshots/test_manifest/test_deploy_manifest_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_deploy_manifest_mode/manifest.yaml new file mode 100644 index 000000000..7b6d5b8aa --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_deploy_manifest_mode/manifest.yaml @@ -0,0 +1,180 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-producer-app + chart: producer-app-3.0.3 + release: resources-manifest-pipeline-my-producer-app + name: resources-manifest-pipeline-my-producer-app +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-producer-app + release: resources-manifest-pipeline-my-producer-app + spec: + containers: + - env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_OUTPUT_TOPIC + value: my-producer-app-output-topic + - name: APP_LABELED_OUTPUT_TOPICS + value: my-producer-app-output-topic-label=my-labeled-producer-app-topic-output, + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-producer-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-producer-app + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: + consumerGroup: my-streams-app-id + labels: + app: resources-manifest-pipeline-my-streams-app + chart: streams-app-3.0.3 + release: resources-manifest-pipeline-my-streams-app + name: resources-manifest-pipeline-my-streams-app +spec: + replicas: 1 + selector: + matchLabels: + app: resources-manifest-pipeline-my-streams-app + release: resources-manifest-pipeline-my-streams-app + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app + release: resources-manifest-pipeline-my-streams-app + spec: + containers: + - env: + - name: ENV_PREFIX + value: APP_ + - name: APP_VOLATILE_GROUP_INSTANCE_ID + value: 'true' + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-Dcom.sun.management.jmxremote.port=5555 -Dcom.sun.management.jmxremote.authenticate=false + -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + terminationGracePeriodSeconds: 300 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_clean_argo_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_clean_argo_mode/manifest.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_clean_manifest_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_clean_manifest_mode/manifest.yaml new file mode 100644 index 000000000..e5a2a9a14 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_clean_manifest_mode/manifest.yaml @@ -0,0 +1,104 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-producer-app-clean + chart: producer-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-producer-app-clean + name: resources-manifest-pipeline-my-producer-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-producer-app-clean + release: resources-manifest-pipeline-my-producer-app-clean + spec: + containers: + - args: + - clean + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_OUTPUT_TOPIC + value: my-producer-app-output-topic + - name: APP_LABELED_OUTPUT_TOPICS + value: my-producer-app-output-topic-label=my-labeled-producer-app-topic-output, + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-producer-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-producer-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + chart: streams-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-streams-app-clean + name: resources-manifest-pipeline-my-streams-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + release: resources-manifest-pipeline-my-streams-app-clean + spec: + containers: + - args: + - reset + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_clean_python_api/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_clean_python_api/manifest.yaml new file mode 100644 index 000000000..e5a2a9a14 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_clean_python_api/manifest.yaml @@ -0,0 +1,104 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-producer-app-clean + chart: producer-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-producer-app-clean + name: resources-manifest-pipeline-my-producer-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-producer-app-clean + release: resources-manifest-pipeline-my-producer-app-clean + spec: + containers: + - args: + - clean + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_OUTPUT_TOPIC + value: my-producer-app-output-topic + - name: APP_LABELED_OUTPUT_TOPICS + value: my-producer-app-output-topic-label=my-labeled-producer-app-topic-output, + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-producer-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-producer-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + chart: streams-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-streams-app-clean + name: resources-manifest-pipeline-my-streams-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + release: resources-manifest-pipeline-my-streams-app-clean + spec: + containers: + - args: + - reset + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_deploy_python_api/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_deploy_python_api/manifest.yaml new file mode 100644 index 000000000..7b6d5b8aa --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_deploy_python_api/manifest.yaml @@ -0,0 +1,180 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-producer-app + chart: producer-app-3.0.3 + release: resources-manifest-pipeline-my-producer-app + name: resources-manifest-pipeline-my-producer-app +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-producer-app + release: resources-manifest-pipeline-my-producer-app + spec: + containers: + - env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_OUTPUT_TOPIC + value: my-producer-app-output-topic + - name: APP_LABELED_OUTPUT_TOPICS + value: my-producer-app-output-topic-label=my-labeled-producer-app-topic-output, + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-producer-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-producer-app + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: + consumerGroup: my-streams-app-id + labels: + app: resources-manifest-pipeline-my-streams-app + chart: streams-app-3.0.3 + release: resources-manifest-pipeline-my-streams-app + name: resources-manifest-pipeline-my-streams-app +spec: + replicas: 1 + selector: + matchLabels: + app: resources-manifest-pipeline-my-streams-app + release: resources-manifest-pipeline-my-streams-app + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app + release: resources-manifest-pipeline-my-streams-app + spec: + containers: + - env: + - name: ENV_PREFIX + value: APP_ + - name: APP_VOLATILE_GROUP_INSTANCE_ID + value: 'true' + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-Dcom.sun.management.jmxremote.port=5555 -Dcom.sun.management.jmxremote.authenticate=false + -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + terminationGracePeriodSeconds: 300 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_argo_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_argo_mode/manifest.yaml new file mode 100644 index 000000000..a27b34a41 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_argo_mode/manifest.yaml @@ -0,0 +1,73 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_manifest_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_manifest_mode/manifest.yaml new file mode 100644 index 000000000..a27b34a41 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_manifest_mode/manifest.yaml @@ -0,0 +1,73 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_python_api/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_python_api/manifest.yaml new file mode 100644 index 000000000..a27b34a41 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_destroy_python_api/manifest.yaml @@ -0,0 +1,73 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_reset_argo_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_reset_argo_mode/manifest.yaml new file mode 100644 index 000000000..fb5a29c61 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_reset_argo_mode/manifest.yaml @@ -0,0 +1,131 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + chart: streams-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-streams-app-clean + name: resources-manifest-pipeline-my-streams-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + release: resources-manifest-pipeline-my-streams-app-clean + spec: + containers: + - args: + - reset + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_reset_manifest_mode/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_reset_manifest_mode/manifest.yaml new file mode 100644 index 000000000..fb5a29c61 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_reset_manifest_mode/manifest.yaml @@ -0,0 +1,131 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + chart: streams-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-streams-app-clean + name: resources-manifest-pipeline-my-streams-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + release: resources-manifest-pipeline-my-streams-app-clean + spec: + containers: + - args: + - reset + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_manifest_reset_python_api/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_manifest_reset_python_api/manifest.yaml new file mode 100644 index 000000000..fb5a29c61 --- /dev/null +++ b/tests/pipeline/snapshots/test_manifest/test_manifest_reset_python_api/manifest.yaml @@ -0,0 +1,131 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: batch/v1 +kind: Job +metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + chart: streams-app-cleanup-job-3.0.3 + release: resources-manifest-pipeline-my-streams-app-clean + name: resources-manifest-pipeline-my-streams-app-clean +spec: + backoffLimit: 6 + template: + metadata: + labels: + app: resources-manifest-pipeline-my-streams-app-clean + release: resources-manifest-pipeline-my-streams-app-clean + spec: + containers: + - args: + - reset + env: + - name: ENV_PREFIX + value: APP_ + - name: APP_BOOTSTRAP_SERVERS + value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 + - name: APP_SCHEMA_REGISTRY_URL + value: http://localhost:8081/ + - name: APP_INPUT_TOPICS + value: my-input-topic + - name: APP_INPUT_PATTERN + value: my-input-pattern + - name: APP_OUTPUT_TOPIC + value: my-output-topic + - name: APP_ERROR_TOPIC + value: resources-manifest-pipeline-my-streams-app-error + - name: APP_LABELED_OUTPUT_TOPICS + value: my-output-topic-label=my-labeled-topic-output, + - name: APP_LABELED_INPUT_TOPICS + value: my-input-topic-label=my-labeled-input-topic, + - name: APP_LABELED_INPUT_PATTERNS + value: my-input-topic-labeled-pattern=my-labeled-input-pattern, + - name: APP_APPLICATION_ID + value: my-streams-app-id + - name: JAVA_TOOL_OPTIONS + value: '-XX:MaxRAMPercentage=75.0 ' + image: my-registry/my-streams-app-image:1.0.0 + imagePullPolicy: Always + name: resources-manifest-pipeline-my-streams-app-clean + resources: + limits: + cpu: 500m + memory: 2G + requests: + cpu: 200m + memory: 300Mi + restartPolicy: OnFailure + ttlSecondsAfterFinished: 30 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-manifest-pipeline-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/snapshots/test_manifest/test_python_api/resources b/tests/pipeline/snapshots/test_manifest/test_python_api/resources deleted file mode 100644 index eca5af9eb..000000000 --- a/tests/pipeline/snapshots/test_manifest/test_python_api/resources +++ /dev/null @@ -1,165 +0,0 @@ -- !!python/object:kpops.component_handlers.kubernetes.model.KubernetesManifest - data: - apiVersion: batch/v1 - kind: Job - metadata: - labels: - app: resources-custom-config-app1 - chart: producer-app-2.9.0 - release: resources-custom-config-app1 - name: resources-custom-config-app1 - spec: - backoffLimit: 6 - template: - metadata: - labels: - app: resources-custom-config-app1 - release: resources-custom-config-app1 - spec: - affinity: null - containers: - - env: - - name: ENV_PREFIX - value: APP_ - - name: APP_BROKERS - value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 - - name: APP_SCHEMA_REGISTRY_URL - value: http://localhost:8081/ - - name: APP_DEBUG - value: 'false' - - name: APP_OUTPUT_TOPIC - value: resources-custom-config-app1 - - name: JAVA_TOOL_OPTIONS - value: '-XX:MaxRAMPercentage=75.0 ' - image: producerApp:latest - imagePullPolicy: Always - name: resources-custom-config-app1 - resources: - limits: - cpu: 500m - memory: 2G - requests: - cpu: 200m - memory: 2G - restartPolicy: OnFailure ---- -- !!python/object:kpops.component_handlers.kubernetes.model.KubernetesManifest - data: - apiVersion: v1 - data: - jmx-kafka-streams-app-prometheus.yml: "jmxUrl: service:jmx:rmi:///jndi/rmi://localhost:5555/jmxrmi\n\ - lowercaseOutputName: true\nlowercaseOutputLabelNames: true\nssl: false\nrules:\n\ - \ - pattern: \".*\"\n" - kind: ConfigMap - metadata: - labels: - app: resources-custom-config-app2 - chart: streams-app-2.9.0 - heritage: Helm - release: resources-custom-config-app2 - name: resources-custom-config-app2-jmx-configmap -- !!python/object:kpops.component_handlers.kubernetes.model.KubernetesManifest - data: - apiVersion: apps/v1 - kind: Deployment - metadata: - labels: - app: resources-custom-config-app2 - chart: streams-app-2.9.0 - pipeline: resources-custom-config - release: resources-custom-config-app2 - name: resources-custom-config-app2 - spec: - replicas: 1 - selector: - matchLabels: - app: resources-custom-config-app2 - release: resources-custom-config-app2 - template: - metadata: - annotations: - prometheus.io/port: '5556' - prometheus.io/scrape: 'true' - labels: - app: resources-custom-config-app2 - pipeline: resources-custom-config - release: resources-custom-config-app2 - spec: - affinity: - podAntiAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - - podAffinityTerm: - labelSelector: - matchExpressions: - - key: app - operator: In - values: - - resources-custom-config-app2 - topologyKey: kubernetes.io/hostname - weight: 1 - containers: - - env: - - name: ENV_PREFIX - value: APP_ - - name: STREAMS_LARGE_MESSAGE_ID_GENERATOR - value: com.bakdata.kafka.MurmurHashIdGenerator - - name: KAFKA_JMX_PORT - value: '5555' - - name: APP_VOLATILE_GROUP_INSTANCE_ID - value: 'true' - - name: APP_BROKERS - value: http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092 - - name: APP_SCHEMA_REGISTRY_URL - value: http://localhost:8081/ - - name: APP_DEBUG - value: 'false' - - name: APP_INPUT_TOPICS - value: resources-custom-config-app1 - - name: APP_OUTPUT_TOPIC - value: resources-custom-config-app2 - - name: APP_ERROR_TOPIC - value: resources-custom-config-app2-error - - name: JAVA_TOOL_OPTIONS - value: '-Dcom.sun.management.jmxremote.port=5555 -Dcom.sun.management.jmxremote.authenticate=false - -Dcom.sun.management.jmxremote.ssl=false -XX:MaxRAMPercentage=75.0 ' - image: some-image:latest - imagePullPolicy: Always - name: resources-custom-config-app2 - ports: - - containerPort: 5555 - name: jmx - resources: - limits: - cpu: 500m - memory: 2G - requests: - cpu: 200m - memory: 300Mi - - command: - - java - - -XX:+UnlockExperimentalVMOptions - - -XX:+UseCGroupMemoryLimitForHeap - - -XX:MaxRAMFraction=1 - - -XshowSettings:vm - - -jar - - jmx_prometheus_httpserver.jar - - '5556' - - /etc/jmx-streams-app/jmx-kafka-streams-app-prometheus.yml - image: solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143 - name: prometheus-jmx-exporter - ports: - - containerPort: 5556 - resources: - limits: - cpu: 300m - memory: 2G - requests: - cpu: 100m - memory: 500Mi - volumeMounts: - - mountPath: /etc/jmx-streams-app - name: jmx-config - volumes: - - configMap: - name: resources-custom-config-app2-jmx-configmap - name: jmx-config diff --git a/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml b/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml index 84354ab8e..605d5909a 100644 --- a/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml +++ b/tests/pipeline/snapshots/test_manifest/test_streams_bootstrap/manifest.yaml @@ -70,6 +70,30 @@ spec: successfulJobsHistoryLimit: 1 suspend: false +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-producer-app-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-producer-app-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + --- apiVersion: v1 data: @@ -222,3 +246,52 @@ spec: name: resources-streams-bootstrap-my-streams-app name: config +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-output-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-error-topic +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: my-labeled-topic-output +spec: + config: {} + partitions: 1 + replicas: 1 + +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: my-cluster + name: resources-streams-bootstrap-my-streams-app-error +spec: + config: + cleanup.policy: compact,delete + partitions: 1 + replicas: 1 + diff --git a/tests/pipeline/test_manifest.py b/tests/pipeline/test_manifest.py index 84b742ae0..87fe9b5a0 100644 --- a/tests/pipeline/test_manifest.py +++ b/tests/pipeline/test_manifest.py @@ -1,8 +1,9 @@ +from collections.abc import Iterator from pathlib import Path from unittest.mock import ANY, MagicMock import pytest -import yaml +from _pytest.capture import CaptureFixture from pytest_mock import MockerFixture from pytest_snapshot.plugin import Snapshot from typer.testing import CliRunner @@ -12,6 +13,8 @@ from kpops.component_handlers.helm_wrapper.helm import Helm from kpops.component_handlers.helm_wrapper.model import HelmConfig, Version from kpops.const.file_type import PIPELINE_YAML +from kpops.manifests.kubernetes import KubernetesManifest +from kpops.utils.yaml import print_yaml MANIFEST_YAML = "manifest.yaml" @@ -42,10 +45,12 @@ def test_default_config(self, mock_execute: MagicMock): result = runner.invoke( app, [ - "manifest", + "deploy", str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--environment", "development", + "--operation-mode", + "manifest", ], catch_exceptions=False, ) @@ -72,12 +77,14 @@ def test_custom_config(self, mock_execute: MagicMock): result = runner.invoke( app, [ - "manifest", + "deploy", str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--config", str(RESOURCE_PATH / "custom-config"), "--environment", "development", + "--operation-mode", + "manifest", ], catch_exceptions=False, ) @@ -106,33 +113,212 @@ def test_manifest_command(self, snapshot: Snapshot): result = runner.invoke( app, [ - "manifest", + "deploy", str(RESOURCE_PATH / "custom-config/pipeline.yaml"), "--environment", "development", + "--operation-mode", + "manifest", ], catch_exceptions=False, ) assert result.exit_code == 0, result.stdout snapshot.assert_match(result.stdout, MANIFEST_YAML) - def test_python_api(self, snapshot: Snapshot): - resources = kpops.manifest( - RESOURCE_PATH / "custom-config/pipeline.yaml", + def test_manifest_deploy_python_api( + self, capsys: CaptureFixture, snapshot: Snapshot + ): + generator = kpops.manifest_deploy( + RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML, environment="development", ) - assert isinstance(resources, list) + assert isinstance(generator, Iterator) + resources = list(generator) assert len(resources) == 2 - snapshot.assert_match(yaml.dump_all(resources), "resources") + for resource in resources: + for manifest in resource: + assert isinstance(manifest, KubernetesManifest) + print_yaml(manifest.model_dump()) + + captured = capsys.readouterr() + snapshot.assert_match(captured.out, MANIFEST_YAML) def test_streams_bootstrap(self, snapshot: Snapshot): result = runner.invoke( app, [ - "manifest", + "deploy", str(RESOURCE_PATH / "streams-bootstrap" / PIPELINE_YAML), + "--operation-mode", + "manifest", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_deploy_manifest_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "deploy", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "manifest", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_deploy_argo_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "deploy", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "argo", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_destroy_manifest_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "destroy", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "manifest", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_destroy_argo_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "destroy", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "argo", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_destroy_python_api( + self, capsys: CaptureFixture, snapshot: Snapshot + ): + generator = kpops.manifest_destroy( + RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML, + environment="development", + ) + assert isinstance(generator, Iterator) + resources = list(generator) + assert len(resources) == 2 + for resource in resources: + for manifest in resource: + assert isinstance(manifest, KubernetesManifest) + print_yaml(manifest.model_dump()) + + captured = capsys.readouterr() + snapshot.assert_match(captured.out, MANIFEST_YAML) + + def test_manifest_reset_manifest_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "reset", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "manifest", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_reset_argo_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "reset", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "argo", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_reset_python_api( + self, capsys: CaptureFixture, snapshot: Snapshot + ): + generator = kpops.manifest_reset( + RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML, + environment="development", + ) + assert isinstance(generator, Iterator) + resources = list(generator) + assert len(resources) == 2 + for resource in resources: + for manifest in resource: + assert isinstance(manifest, KubernetesManifest) + print_yaml(manifest.model_dump()) + + captured = capsys.readouterr() + snapshot.assert_match(captured.out, MANIFEST_YAML) + + def test_manifest_clean_manifest_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "clean", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "manifest", ], catch_exceptions=False, ) assert result.exit_code == 0, result.stdout snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_clean_argo_mode(self, snapshot: Snapshot): + result = runner.invoke( + app, + [ + "clean", + str(RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML), + "--operation-mode", + "argo", + ], + catch_exceptions=False, + ) + assert result.exit_code == 0, result.stdout + snapshot.assert_match(result.stdout, MANIFEST_YAML) + + def test_manifest_clean_python_api( + self, capsys: CaptureFixture, snapshot: Snapshot + ): + generator = kpops.manifest_clean( + RESOURCE_PATH / "manifest-pipeline" / PIPELINE_YAML, + environment="development", + ) + assert isinstance(generator, Iterator) + resources = list(generator) + assert len(resources) == 2 + for resource in resources: + for manifest in resource: + assert isinstance(manifest, KubernetesManifest) + print_yaml(manifest.model_dump()) + + captured = capsys.readouterr() + snapshot.assert_match(captured.out, MANIFEST_YAML) diff --git a/tests/test_kpops_config.py b/tests/test_kpops_config.py index 0f35ddf37..e86096b21 100644 --- a/tests/test_kpops_config.py +++ b/tests/test_kpops_config.py @@ -1,14 +1,17 @@ import re from pathlib import Path +import pydantic import pytest -from pydantic import AnyHttpUrl, AnyUrl, TypeAdapter, ValidationError +from pydantic import AnyHttpUrl, AnyUrl, TypeAdapter +from kpops.api.exception import ValidationError from kpops.config import ( KafkaConnectConfig, KafkaRestConfig, KpopsConfig, SchemaRegistryConfig, + StrimziTopicConfig, get_config, set_config, ) @@ -43,7 +46,7 @@ def test_kpops_config_with_default_values(): def test_kpops_config_with_different_invalid_urls(): - with pytest.raises(ValidationError): + with pytest.raises(pydantic.ValidationError): KpopsConfig( kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( @@ -51,7 +54,7 @@ def test_kpops_config_with_different_invalid_urls(): ), ) - with pytest.raises(ValidationError): + with pytest.raises(pydantic.ValidationError): KpopsConfig( kafka_brokers="http://broker:9092", kafka_rest=KafkaRestConfig( @@ -59,7 +62,7 @@ def test_kpops_config_with_different_invalid_urls(): ), ) - with pytest.raises(ValidationError): + with pytest.raises(pydantic.ValidationError): KpopsConfig( kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( @@ -69,6 +72,7 @@ def test_kpops_config_with_different_invalid_urls(): ) +@pytest.mark.usefixtures("clear_kpops_config") def test_global_kpops_config_not_initialized_error(): with pytest.raises( RuntimeError, @@ -90,3 +94,16 @@ def test_set_global_kpops_config(): ) set_config(config) assert get_config() == config + + +def test_strimzi_topic_config_valid(): + config = StrimziTopicConfig.model_validate({"label": {"key": "value"}}) + assert config.cluster_labels == ("key", "value") + + +def test_strimzi_topic_config_empty_label(): + with pytest.raises( + ValidationError, + match="'strimzi_topic.label' must contain a single key-value pair.", + ): + StrimziTopicConfig.model_validate({"label": {}})