From 7a20bdbc08bf84333ac473e22223c812aa2cb689 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 28 Aug 2023 21:04:46 +0300 Subject: [PATCH 01/96] refactor(PipelineConfig): migrate to pydantic v2 --- kpops/cli/pipeline_config.py | 44 +++++--- kpops/cli/settings_sources.py | 1 + kpops/utils/yaml_loading.py | 2 +- poetry.lock | 202 ++++++++++++++++++++++++++-------- pyproject.toml | 3 +- 5 files changed, 190 insertions(+), 62 deletions(-) create mode 100644 kpops/cli/settings_sources.py diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index f03f419aa..775dbfeb9 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -1,6 +1,7 @@ from pathlib import Path -from pydantic import BaseConfig, BaseSettings, Field +from pydantic import ConfigDict, Field, BaseConfig +from pydantic_settings import BaseSettings, SettingsConfigDict, PydanticBaseSettingsSource from pydantic.env_settings import SettingsSourceCallable from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig @@ -27,22 +28,27 @@ class PipelineConfig(BaseSettings): defaults_path: Path = Field( default=Path("."), - example="defaults", + examples=["defaults", "."], description="The path to the folder containing the defaults.yaml file and the environment defaults files. " "Paths can either be absolute or relative to `config.yaml`", ) environment: str = Field( default=..., - env=f"{ENV_PREFIX}ENVIRONMENT", - example="development", + validation_alias="ENVIRONMENT", + examples=[ + "development", + "production", + ], description="The environment you want to generate and deploy the pipeline to. " "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", ) brokers: str = Field( default=..., - env=f"{ENV_PREFIX}KAFKA_BROKERS", + validation_alias="KAFKA_BROKERS", + examples=[ + "broker1:9092,broker2:9092,broker3:9092", + ], description="The comma separated Kafka brokers address.", - example="broker1:9092,broker2:9092,broker3:9092", ) defaults_filename_prefix: str = Field( default="defaults", @@ -54,25 +60,31 @@ class PipelineConfig(BaseSettings): ) schema_registry_url: str | None = Field( default=None, - example="http://localhost:8081", - env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", + examples=[ + "http://localhost:8081", + ], + validation_alias="SCHEMA_REGISTRY_URL", description="Address of the Schema Registry.", ) kafka_rest_host: str | None = Field( default=None, - env=f"{ENV_PREFIX}REST_PROXY_HOST", - example="http://localhost:8082", + validation_alias="REST_PROXY_HOST", + examples=[ + "http://localhost:8082", + ], description="Address of the Kafka REST Proxy.", ) kafka_connect_host: str | None = Field( default=None, - env=f"{ENV_PREFIX}CONNECT_HOST", - example="http://localhost:8083", + validation_alias="CONNECT_HOST", + examples=[ + "http://localhost:8083", + ], description="Address of Kafka Connect.", ) timeout: int = Field( default=300, - env=f"{ENV_PREFIX}TIMEOUT", + validation_alias="TIMEOUT", description="The timeout in seconds that specifies when actions like deletion or deploy timeout.", ) create_namespace: bool = Field( @@ -89,10 +101,14 @@ class PipelineConfig(BaseSettings): ) retain_clean_jobs: bool = Field( default=False, - env=f"{ENV_PREFIX}RETAIN_CLEAN_JOBS", + validation_alias="RETAIN_CLEAN_JOBS", description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) + model_config = SettingsConfigDict( + env_prefix='my_prefix_' + ) + class Config(BaseConfig): config_path = Path("config.yaml") env_file = ".env" diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py new file mode 100644 index 000000000..dd9f0fead --- /dev/null +++ b/kpops/cli/settings_sources.py @@ -0,0 +1 @@ +"""Will hold the custom YAML settings source""" diff --git a/kpops/utils/yaml_loading.py b/kpops/utils/yaml_loading.py index cb9536200..adafbb884 100644 --- a/kpops/utils/yaml_loading.py +++ b/kpops/utils/yaml_loading.py @@ -20,7 +20,7 @@ def generate_hashkey( def load_yaml_file( file_path: Path, *, substitution: Mapping[str, Any] | None = None ) -> dict | list[dict]: - with open(file_path) as yaml_file: + with file_path.open() as yaml_file: return yaml.load(substitute(yaml_file.read(), substitution), Loader=yaml.Loader) diff --git a/poetry.lock b/poetry.lock index 339e37345..f17e19206 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,17 @@ files = [ {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, ] +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + [[package]] name = "anyio" version = "3.6.2" @@ -882,56 +893,155 @@ files = [ [[package]] name = "pydantic" -version = "1.10.8" -description = "Data validation and settings management using python type hints" +version = "2.3.0" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-2.3.0-py3-none-any.whl", hash = "sha256:45b5e446c6dfaad9444819a293b921a40e1db1aa61ea08aede0522529ce90e81"}, + {file = "pydantic-2.3.0.tar.gz", hash = "sha256:1607cc106602284cd4a00882986570472f193fde9cb1259bceeaedb26aa79a6d"}, ] [package.dependencies] -python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.6.3" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.6.3" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.6.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a0ddaa723c48af27d19f27f1c73bdc615c73686d763388c8683fe34ae777bad"}, + {file = "pydantic_core-2.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5cfde4fab34dd1e3a3f7f3db38182ab6c95e4ea91cf322242ee0be5c2f7e3d2f"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a7027bfc6b108e17c3383959485087d5942e87eb62bbac69829eae9bc1f7"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84e87c16f582f5c753b7f39a71bd6647255512191be2d2dbf49458c4ef024588"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:522a9c4a4d1924facce7270c84b5134c5cabcb01513213662a2e89cf28c1d309"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaafc776e5edc72b3cad1ccedb5fd869cc5c9a591f1213aa9eba31a781be9ac1"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a750a83b2728299ca12e003d73d1264ad0440f60f4fc9cee54acc489249b728"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e8b374ef41ad5c461efb7a140ce4730661aadf85958b5c6a3e9cf4e040ff4bb"}, + {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b594b64e8568cf09ee5c9501ede37066b9fc41d83d58f55b9952e32141256acd"}, + {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2a20c533cb80466c1d42a43a4521669ccad7cf2967830ac62c2c2f9cece63e7e"}, + {file = "pydantic_core-2.6.3-cp310-none-win32.whl", hash = "sha256:04fe5c0a43dec39aedba0ec9579001061d4653a9b53a1366b113aca4a3c05ca7"}, + {file = "pydantic_core-2.6.3-cp310-none-win_amd64.whl", hash = "sha256:6bf7d610ac8f0065a286002a23bcce241ea8248c71988bda538edcc90e0c39ad"}, + {file = "pydantic_core-2.6.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bcc1ad776fffe25ea5c187a028991c031a00ff92d012ca1cc4714087e575973"}, + {file = "pydantic_core-2.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:df14f6332834444b4a37685810216cc8fe1fe91f447332cd56294c984ecbff1c"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b7486d85293f7f0bbc39b34e1d8aa26210b450bbd3d245ec3d732864009819"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a892b5b1871b301ce20d40b037ffbe33d1407a39639c2b05356acfef5536d26a"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:883daa467865e5766931e07eb20f3e8152324f0adf52658f4d302242c12e2c32"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4eb77df2964b64ba190eee00b2312a1fd7a862af8918ec70fc2d6308f76ac64"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce8c84051fa292a5dc54018a40e2a1926fd17980a9422c973e3ebea017aa8da"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22134a4453bd59b7d1e895c455fe277af9d9d9fbbcb9dc3f4a97b8693e7e2c9b"}, + {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:02e1c385095efbd997311d85c6021d32369675c09bcbfff3b69d84e59dc103f6"}, + {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d79f1f2f7ebdb9b741296b69049ff44aedd95976bfee38eb4848820628a99b50"}, + {file = "pydantic_core-2.6.3-cp311-none-win32.whl", hash = "sha256:430ddd965ffd068dd70ef4e4d74f2c489c3a313adc28e829dd7262cc0d2dd1e8"}, + {file = "pydantic_core-2.6.3-cp311-none-win_amd64.whl", hash = "sha256:84f8bb34fe76c68c9d96b77c60cef093f5e660ef8e43a6cbfcd991017d375950"}, + {file = "pydantic_core-2.6.3-cp311-none-win_arm64.whl", hash = "sha256:5a2a3c9ef904dcdadb550eedf3291ec3f229431b0084666e2c2aa8ff99a103a2"}, + {file = "pydantic_core-2.6.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8421cf496e746cf8d6b677502ed9a0d1e4e956586cd8b221e1312e0841c002d5"}, + {file = "pydantic_core-2.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bb128c30cf1df0ab78166ded1ecf876620fb9aac84d2413e8ea1594b588c735d"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a822f630712817b6ecc09ccc378192ef5ff12e2c9bae97eb5968a6cdf3b862"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240a015102a0c0cc8114f1cba6444499a8a4d0333e178bc504a5c2196defd456"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f90e5e3afb11268628c89f378f7a1ea3f2fe502a28af4192e30a6cdea1e7d5e"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:340e96c08de1069f3d022a85c2a8c63529fd88709468373b418f4cf2c949fb0e"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1480fa4682e8202b560dcdc9eeec1005f62a15742b813c88cdc01d44e85308e5"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f14546403c2a1d11a130b537dda28f07eb6c1805a43dae4617448074fd49c282"}, + {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a87c54e72aa2ef30189dc74427421e074ab4561cf2bf314589f6af5b37f45e6d"}, + {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f93255b3e4d64785554e544c1c76cd32f4a354fa79e2eeca5d16ac2e7fdd57aa"}, + {file = "pydantic_core-2.6.3-cp312-none-win32.whl", hash = "sha256:f70dc00a91311a1aea124e5f64569ea44c011b58433981313202c46bccbec0e1"}, + {file = "pydantic_core-2.6.3-cp312-none-win_amd64.whl", hash = "sha256:23470a23614c701b37252618e7851e595060a96a23016f9a084f3f92f5ed5881"}, + {file = "pydantic_core-2.6.3-cp312-none-win_arm64.whl", hash = "sha256:1ac1750df1b4339b543531ce793b8fd5c16660a95d13aecaab26b44ce11775e9"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a53e3195f134bde03620d87a7e2b2f2046e0e5a8195e66d0f244d6d5b2f6d31b"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:f2969e8f72c6236c51f91fbb79c33821d12a811e2a94b7aa59c65f8dbdfad34a"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:672174480a85386dd2e681cadd7d951471ad0bb028ed744c895f11f9d51b9ebe"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:002d0ea50e17ed982c2d65b480bd975fc41086a5a2f9c924ef8fc54419d1dea3"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ccc13afee44b9006a73d2046068d4df96dc5b333bf3509d9a06d1b42db6d8bf"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:439a0de139556745ae53f9cc9668c6c2053444af940d3ef3ecad95b079bc9987"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63b7545d489422d417a0cae6f9898618669608750fc5e62156957e609e728a5"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b44c42edc07a50a081672e25dfe6022554b47f91e793066a7b601ca290f71e42"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c721bfc575d57305dd922e6a40a8fe3f762905851d694245807a351ad255c58"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e4a2cf8c4543f37f5dc881de6c190de08096c53986381daebb56a355be5dfe6"}, + {file = "pydantic_core-2.6.3-cp37-none-win32.whl", hash = "sha256:d9b4916b21931b08096efed090327f8fe78e09ae8f5ad44e07f5c72a7eedb51b"}, + {file = "pydantic_core-2.6.3-cp37-none-win_amd64.whl", hash = "sha256:a8acc9dedd304da161eb071cc7ff1326aa5b66aadec9622b2574ad3ffe225525"}, + {file = "pydantic_core-2.6.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5e9c068f36b9f396399d43bfb6defd4cc99c36215f6ff33ac8b9c14ba15bdf6b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e61eae9b31799c32c5f9b7be906be3380e699e74b2db26c227c50a5fc7988698"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85463560c67fc65cd86153a4975d0b720b6d7725cf7ee0b2d291288433fc21b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9616567800bdc83ce136e5847d41008a1d602213d024207b0ff6cab6753fe645"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e9b65a55bbabda7fccd3500192a79f6e474d8d36e78d1685496aad5f9dbd92c"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f468d520f47807d1eb5d27648393519655eadc578d5dd862d06873cce04c4d1b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9680dd23055dd874173a3a63a44e7f5a13885a4cfd7e84814be71be24fba83db"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a718d56c4d55efcfc63f680f207c9f19c8376e5a8a67773535e6f7e80e93170"}, + {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8ecbac050856eb6c3046dea655b39216597e373aa8e50e134c0e202f9c47efec"}, + {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:788be9844a6e5c4612b74512a76b2153f1877cd845410d756841f6c3420230eb"}, + {file = "pydantic_core-2.6.3-cp38-none-win32.whl", hash = "sha256:07a1aec07333bf5adebd8264047d3dc518563d92aca6f2f5b36f505132399efc"}, + {file = "pydantic_core-2.6.3-cp38-none-win_amd64.whl", hash = "sha256:621afe25cc2b3c4ba05fff53525156d5100eb35c6e5a7cf31d66cc9e1963e378"}, + {file = "pydantic_core-2.6.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:813aab5bfb19c98ae370952b6f7190f1e28e565909bfc219a0909db168783465"}, + {file = "pydantic_core-2.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:50555ba3cb58f9861b7a48c493636b996a617db1a72c18da4d7f16d7b1b9952b"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e20f8baedd7d987bd3f8005c146e6bcbda7cdeefc36fad50c66adb2dd2da48"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0a5d7edb76c1c57b95df719af703e796fc8e796447a1da939f97bfa8a918d60"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f06e21ad0b504658a3a9edd3d8530e8cea5723f6ea5d280e8db8efc625b47e49"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea053cefa008fda40f92aab937fb9f183cf8752e41dbc7bc68917884454c6362"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:171a4718860790f66d6c2eda1d95dd1edf64f864d2e9f9115840840cf5b5713f"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ed7ceca6aba5331ece96c0e328cd52f0dcf942b8895a1ed2642de50800b79d3"}, + {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:acafc4368b289a9f291e204d2c4c75908557d4f36bd3ae937914d4529bf62a76"}, + {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1aa712ba150d5105814e53cb141412217146fedc22621e9acff9236d77d2a5ef"}, + {file = "pydantic_core-2.6.3-cp39-none-win32.whl", hash = "sha256:44b4f937b992394a2e81a5c5ce716f3dcc1237281e81b80c748b2da6dd5cf29a"}, + {file = "pydantic_core-2.6.3-cp39-none-win_amd64.whl", hash = "sha256:9b33bf9658cb29ac1a517c11e865112316d09687d767d7a0e4a63d5c640d1b17"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d7050899026e708fb185e174c63ebc2c4ee7a0c17b0a96ebc50e1f76a231c057"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99faba727727b2e59129c59542284efebbddade4f0ae6a29c8b8d3e1f437beb7"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fa159b902d22b283b680ef52b532b29554ea2a7fc39bf354064751369e9dbd7"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:046af9cfb5384f3684eeb3f58a48698ddab8dd870b4b3f67f825353a14441418"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:930bfe73e665ebce3f0da2c6d64455098aaa67e1a00323c74dc752627879fc67"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85cc4d105747d2aa3c5cf3e37dac50141bff779545ba59a095f4a96b0a460e70"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b25afe9d5c4f60dcbbe2b277a79be114e2e65a16598db8abee2a2dcde24f162b"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e49ce7dc9f925e1fb010fc3d555250139df61fa6e5a0a95ce356329602c11ea9"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2dd50d6a1aef0426a1d0199190c6c43ec89812b1f409e7fe44cb0fbf6dfa733c"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6595b0d8c8711e8e1dc389d52648b923b809f68ac1c6f0baa525c6440aa0daa"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ef724a059396751aef71e847178d66ad7fc3fc969a1a40c29f5aac1aa5f8784"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3c8945a105f1589ce8a693753b908815e0748f6279959a4530f6742e1994dcb6"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c8c6660089a25d45333cb9db56bb9e347241a6d7509838dbbd1931d0e19dbc7f"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:692b4ff5c4e828a38716cfa92667661a39886e71136c97b7dac26edef18767f7"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f1a5d8f18877474c80b7711d870db0eeef9442691fcdb00adabfc97e183ee0b0"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3796a6152c545339d3b1652183e786df648ecdf7c4f9347e1d30e6750907f5bb"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b962700962f6e7a6bd77e5f37320cabac24b4c0f76afeac05e9f93cf0c620014"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ea80269077003eaa59723bac1d8bacd2cd15ae30456f2890811efc1e3d4413"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c0ebbebae71ed1e385f7dfd9b74c1cff09fed24a6df43d326dd7f12339ec34"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:252851b38bad3bfda47b104ffd077d4f9604a10cb06fe09d020016a25107bf98"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6656a0ae383d8cd7cc94e91de4e526407b3726049ce8d7939049cbfa426518c8"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9140ded382a5b04a1c030b593ed9bf3088243a0a8b7fa9f071a5736498c5483"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d38bbcef58220f9c81e42c255ef0bf99735d8f11edef69ab0b499da77105158a"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c9d469204abcca28926cbc28ce98f28e50e488767b084fb3fbdf21af11d3de26"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48c1ed8b02ffea4d5c9c220eda27af02b8149fe58526359b3c07eb391cb353a2"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2b1bfed698fa410ab81982f681f5b1996d3d994ae8073286515ac4d165c2e7"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf9d42a71a4d7a7c1f14f629e5c30eac451a6fc81827d2beefd57d014c006c4a"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4292ca56751aebbe63a84bbfc3b5717abb09b14d4b4442cc43fd7c49a1529efd"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7dc2ce039c7290b4ef64334ec7e6ca6494de6eecc81e21cb4f73b9b39991408c"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:615a31b1629e12445c0e9fc8339b41aaa6cc60bd53bf802d5fe3d2c0cda2ae8d"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1fa1f6312fb84e8c281f32b39affe81984ccd484da6e9d65b3d18c202c666149"}, + {file = "pydantic_core-2.6.3.tar.gz", hash = "sha256:1508f37ba9e3ddc0189e6ff4e2228bd2d3c3a4641cbe8c07177162f76ed696c7"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.0.3" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_settings-2.0.3-py3-none-any.whl", hash = "sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625"}, + {file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"}, +] + +[package.dependencies] +pydantic = ">=2.0.1" +python-dotenv = ">=0.21.0" [[package]] name = "pyflakes" @@ -1661,13 +1771,13 @@ typer = ">=0.4.0,<=0.7.0" [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] @@ -1809,4 +1919,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "a97ca69785c7aeb0e04fad209cfd5694c54b5993faca02540679072c7179a875" +content-hash = "6637234a3c7d3bbcb0f7dcfc4b8ac27bc8b17498a7d0dd12419b5b8e02e62158" diff --git a/pyproject.toml b/pyproject.toml index a177064d0..5fb13367b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,8 @@ kpops = "kpops.cli.main:app" [tool.poetry.dependencies] python = "^3.10" -pydantic = { extras = ["dotenv"], version = "^1.10.8" } +pydantic = { extras = ["dotenv"], version = "^2.3.0" } +pydantic-settings = "^2.0.3" rich = "^12.4.4" PyYAML = "^6.0" typer = { extras = ["all"], version = "^0.6.1" } From 07a5e0fbd5930215ecdcef6efb894d6cdb249ed4 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 28 Aug 2023 21:08:17 +0300 Subject: [PATCH 02/96] refactor(helm): migrate to pydantic v2 --- kpops/component_handlers/helm_wrapper/model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index a8aaf8906..101046459 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -15,7 +15,7 @@ class HelmDiffConfig(BaseModel): ignore: set[str] = Field( default_factory=set, description="Set of keys that should not be checked.", - example="- name\n- imageTag", + examples=["- name\n- imageTag"], ) @@ -95,7 +95,7 @@ class HelmConfig(BaseModel): context: str | None = Field( default=None, description=describe_attr("context", __doc__), - example="dev-storage", + examples=["dev-storage"], ) debug: bool = Field( default=False, From cd91350b22239dfb85a9bc792fb36311aefff29b Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 28 Aug 2023 22:04:08 +0300 Subject: [PATCH 03/96] refactor(kafka connect): migrate to pydantic v2 --- kpops/component_handlers/kafka_connect/model.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 9feed448f..4fa2a565c 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -16,7 +16,12 @@ class KafkaConnectorConfig(BaseModel): """Settings specific to Kafka Connectors""" connector_class: str - name: str = Field(default=..., hidden_from_schema=True) + name: str = Field( + default=..., + json_schema_extra={ + "hidden_from_schema": True, + }, + ) class Config(DescConfig): extra = Extra.allow From c8dd3ababc2a848cab0161f3c3311a6deaa296dd Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 28 Aug 2023 22:04:51 +0300 Subject: [PATCH 04/96] refactor(base defaults component): migrate to pydantic v2 --- .../base_components/base_defaults_component.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 99dec42f2..0caab5150 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -43,26 +43,34 @@ class BaseDefaultsComponent(BaseModel): default=False, description=describe_attr("enrich", __doc__), exclude=True, - hidden_from_schema=True, + json_schema_extra={ + "hidden_from_schema": True, + }, ) config: PipelineConfig = Field( default=..., description=describe_attr("config", __doc__), exclude=True, - hidden_from_schema=True, + json_schema_extra={ + "hidden_from_schema": True, + }, ) handlers: ComponentHandlers = Field( default=..., description=describe_attr("handlers", __doc__), exclude=True, - hidden_from_schema=True, + json_schema_extra={ + "hidden_from_schema": True, + }, ) validate_: bool = Field( alias="validate", default=True, description=describe_attr("validate", __doc__), exclude=True, - hidden_from_schema=True, + json_schema_extra={ + "hidden_from_schema": True, + }, ) class Config(DescConfig): From c29d5aab4687cc65ca3625577095aada40a31c02 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 28 Aug 2023 22:05:02 +0300 Subject: [PATCH 05/96] refactor(gen schema): migrate to pydantic v2 --- kpops/utils/gen_schema.py | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index f66e575fd..fb7d72f88 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,10 +1,12 @@ import logging from enum import Enum -from typing import Annotated, Any, Literal, Sequence, Union +from typing import Annotated, Any, Final, Literal, Sequence, Union -from pydantic import BaseConfig, Field, schema, schema_json_of -from pydantic.fields import FieldInfo, ModelField -from pydantic.schema import SkipField +from pydantic import BaseConfig, Field, schema_json_of +from pydantic.v1 import schema +from pydantic.fields import FieldInfo +from pydantic.v1.fields import ModelField +from pydantic.v1.schema import SkipField from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes @@ -21,8 +23,8 @@ class SchemaScope(str, Enum): # adapted from https://github.com/tiangolo/fastapi/issues/1378#issuecomment-764966955 -def field_schema(field: ModelField, **kwargs: Any) -> Any: - if field.field_info.extra.get("hidden_from_schema"): +def field_schema(field, **kwargs: Any) -> Any: + if field.field_info.json_schema_extra.get("hidden_from_schema"): raise SkipField(f"{field.name} field is being hidden") else: return original_field_schema(field, **kwargs) @@ -102,18 +104,15 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: - component.__fields__["type"] = ModelField( - name="type", + component.model_fields["type"] = FieldInfo( + serialization_alias="type", type_=Literal[component.type], # type: ignore - required=False, default=component.type, - final=True, - field_info=FieldInfo( - title="Component type", - description=describe_object(component.__doc__), - ), - model_config=BaseConfig, - class_validators=None, + # final=True, + title="Component type", + description=describe_object(component.__doc__), + # model_config=BaseConfig, + # class_validators=None, ) AnnotatedPipelineComponents = Annotated[ From a1084aee79da9a1b6182aa35717632a7dd8b7dec Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 29 Aug 2023 17:39:27 +0300 Subject: [PATCH 06/96] refactor(components): migrate to pydantic v2 --- kpops/components/base_components/models/from_section.py | 2 +- kpops/components/base_components/models/to_section.py | 2 +- kpops/components/streams_bootstrap/streams/model.py | 5 ++--- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index a3188a17b..96a7b8cca 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -35,7 +35,7 @@ class Config(DescConfig): extra = Extra.forbid use_enum_values = True - @root_validator + @root_validator(skip_on_failure=True) def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: """Ensure that cls.role is used correctly, assign type if needed""" if values["type"] == InputTopicTypes.INPUT and values["role"]: diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index cbad0987a..72be26eea 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -63,7 +63,7 @@ class Config(DescConfig): allow_population_by_field_name = True use_enum_values = True - @root_validator + @root_validator(skip_on_failure=True) def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: """Ensure that cls.role is used correctly, assign type if needed""" if values["type"] and values["role"]: diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index aabbe8237..0b7ed67de 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -75,8 +75,8 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: def dict( self, *, - include: None | AbstractSet[int | str] | Mapping[int | str, Any] = None, - exclude: None | AbstractSet[int | str] | Mapping[int | str, Any] = None, + include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None, + exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None, by_alias: bool = False, skip_defaults: bool | None = None, exclude_unset: bool = False, @@ -96,7 +96,6 @@ def dict( include=include, exclude=exclude, by_alias=by_alias, - skip_defaults=skip_defaults, exclude_unset=exclude_unset, # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values exclude_defaults=True, From fe1147a76e878b2f8cb4c61ffd3df13f3b0aa92a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 10:17:58 +0300 Subject: [PATCH 07/96] refactor: config classes --- kpops/utils/pydantic.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 2eb0fa641..5d8ffb834 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,7 +1,7 @@ from typing import Any import humps -from pydantic import BaseConfig, BaseModel +from pydantic import BaseConfig, BaseModel, ConfigDict from kpops.utils.docstring import describe_object @@ -20,13 +20,16 @@ def to_dot(s: str) -> str: """Convert snake_case to dot.notation.""" return s.replace("_", ".") +def schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: + schema["description"] = describe_object(model.__doc__) -class CamelCaseConfig(BaseConfig): - alias_generator = to_camel - allow_population_by_field_name = True +class CamelCaseConfigModel(BaseModel): + model_config = ConfigDict( + alias_generator=to_camel, + populate_by_name=True, + ) - -class DescConfig(BaseConfig): - @classmethod - def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: - schema["description"] = describe_object(model.__doc__) +class DescConfigModel(BaseModel): + model_config = ConfigDict( + json_schema_extra=schema_extra + ) From 808107bb30cc69b217fc607b84255ef953775f86 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 10:22:51 +0300 Subject: [PATCH 08/96] refactor: kafka connect models --- .../component_handlers/kafka_connect/model.py | 28 ++++++------------- 1 file changed, 9 insertions(+), 19 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 4fa2a565c..d18917a14 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,10 +1,10 @@ from enum import Enum from typing import Any, Literal -from pydantic import BaseConfig, BaseModel, Extra, Field, validator +from pydantic import BaseConfig, BaseModel, ConfigDict, Extra, Field, validator from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfig, DescConfig, to_dot +from kpops.utils.pydantic import CamelCaseConfigModel, to_dot class KafkaConnectorType(str, Enum): @@ -23,15 +23,11 @@ class KafkaConnectorConfig(BaseModel): }, ) - class Config(DescConfig): - extra = Extra.allow - alias_generator = to_dot - - @override - @classmethod - def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: - super().schema_extra(schema, model) - schema["additionalProperties"] = {"type": "string"} + model_config = ConfigDict( + extra=Extra.allow, + alias_generator=to_dot, + json_schema_extra={"additional_properties": {"type": "string"}}, + ) @validator("connector_class") def connector_class_must_contain_dot(cls, connector_class: str) -> str: @@ -78,24 +74,18 @@ class KafkaConnectConfigErrorResponse(BaseModel): configs: list[KafkaConnectConfigDescription] -class KafkaConnectResetterConfig(BaseModel): +class KafkaConnectResetterConfig(CamelCaseConfigModel): brokers: str connector: str delete_consumer_group: bool | None = None offset_topic: str | None = None - class Config(CamelCaseConfig): - pass - -class KafkaConnectResetterValues(BaseModel): +class KafkaConnectResetterValues(CamelCaseConfigModel): connector_type: Literal["source", "sink"] config: KafkaConnectResetterConfig name_override: str - class Config(CamelCaseConfig): - pass - @override def dict(self, **_) -> dict[str, Any]: return super().dict(by_alias=True, exclude_none=True) From 9b033255c82a907fc8dfededcf39792f57a8710d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 10:23:18 +0300 Subject: [PATCH 09/96] refactor(base_defaults_component): config --- .../base_components/base_defaults_component.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 0caab5150..54751dc5f 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -4,10 +4,10 @@ from collections.abc import Sequence from functools import cached_property from pathlib import Path -from typing import TypeVar +from typing import TypeVar, ClassVar import typer -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers @@ -15,7 +15,7 @@ from kpops.utils.dict_ops import update_nested from kpops.utils.docstring import describe_attr from kpops.utils.environment import ENV -from kpops.utils.pydantic import DescConfig, to_dash +from kpops.utils.pydantic import DescConfigModel, to_dash from kpops.utils.yaml_loading import load_yaml_file try: @@ -26,7 +26,7 @@ log = logging.getLogger("BaseDefaultsComponent") -class BaseDefaultsComponent(BaseModel): +class BaseDefaultsComponent(DescConfigModel): """Base for all components, handles defaults. Component defaults are usually provided in a yaml file called @@ -39,6 +39,11 @@ class BaseDefaultsComponent(BaseModel): :param validate: Whether to run custom validation on the component, defaults to True """ + model_config = ConfigDict( + arbitrary_types_allowed=True, + ignored_types=(cached_property, cached_classproperty), + ) + enrich: bool = Field( default=False, description=describe_attr("enrich", __doc__), @@ -73,10 +78,6 @@ class BaseDefaultsComponent(BaseModel): }, ) - class Config(DescConfig): - arbitrary_types_allowed = True - keep_untouched = (cached_property, cached_classproperty) - def __init__(self, **kwargs) -> None: if kwargs.get("enrich", True): kwargs = self.extend_with_defaults(**kwargs) From 0c292a80a03ba4b6e4742ad7b22fb56aaebda810 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 10:25:38 +0300 Subject: [PATCH 10/96] refactor(helm_handler): config --- .../component_handlers/helm_wrapper/model.py | 25 ++++++------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 101046459..53bd0cfe0 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -3,12 +3,12 @@ from typing import Iterator import yaml -from pydantic import BaseConfig, BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Extra, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.exception import ParseError from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class HelmDiffConfig(BaseModel): @@ -19,7 +19,7 @@ class HelmDiffConfig(BaseModel): ) -class RepoAuthFlags(BaseModel): +class RepoAuthFlags(DescConfigModel): """Authorisation-related flags for `helm repo` :param username: Username, defaults to None @@ -46,9 +46,6 @@ class RepoAuthFlags(BaseModel): default=False, description=describe_attr("insecure_skip_tls_verify", __doc__) ) - class Config(DescConfig): - pass - def to_command(self) -> list[str]: command: list[str] = [] if self.username: @@ -64,7 +61,7 @@ def to_command(self) -> list[str]: return command -class HelmRepoConfig(BaseModel): +class HelmRepoConfig(DescConfigModel): """Helm repository configuration :param repository_name: Name of the Helm repository @@ -80,11 +77,8 @@ class HelmRepoConfig(BaseModel): default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__) ) - class Config(DescConfig): - pass - -class HelmConfig(BaseModel): +class HelmConfig(DescConfigModel): """Global Helm configuration :param context: Name of kubeconfig context (`--kube-context`) @@ -107,10 +101,6 @@ class HelmConfig(BaseModel): description=describe_attr("api_version", __doc__), ) - class Config(DescConfig): - pass - - class HelmFlags(RepoAuthFlags): set_file: dict[str, Path] = Field(default_factory=dict) create_namespace: bool = False @@ -120,8 +110,9 @@ class HelmFlags(RepoAuthFlags): wait: bool = True wait_for_jobs: bool = False - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) @override def to_command(self) -> list[str]: From afa8ad3bac7092b0721a1eb75bbb37266a6cd052 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 10:28:43 +0300 Subject: [PATCH 11/96] refactor(component_handlers): config --- kpops/component_handlers/topic/model.py | 32 +++++++++++-------- .../base_defaults_component.py | 4 +-- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index b58445f81..80544180b 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import BaseConfig, BaseModel, Extra +from pydantic import BaseModel, ConfigDict, Extra class TopicSpec(BaseModel): @@ -43,8 +43,9 @@ class KafkaTopicConfigSynonyms(BaseModel): value: str source: KafkaTopicConfigSource - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class KafkaTopicConfig(BaseModel): @@ -53,15 +54,17 @@ class KafkaTopicConfig(BaseModel): value: str name: str - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class TopicConfigResponse(BaseModel): data: list[KafkaTopicConfig] - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class KafkaBrokerConfigSource(str, Enum): @@ -75,8 +78,9 @@ class KafkaBrokerConfigSynonyms(BaseModel): value: str | None source: KafkaBrokerConfigSource - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class KafkaBrokerConfig(BaseModel): @@ -85,12 +89,14 @@ class KafkaBrokerConfig(BaseModel): value: str | None name: str - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class BrokerConfigResponse(BaseModel): data: list[KafkaBrokerConfig] - class Config(BaseConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 54751dc5f..f7f0ca4ac 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -4,10 +4,10 @@ from collections.abc import Sequence from functools import cached_property from pathlib import Path -from typing import TypeVar, ClassVar +from typing import TypeVar import typer -from pydantic import BaseModel, ConfigDict, Field +from pydantic import ConfigDict, Field from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers From 120ba5af1b88bb60ecd0d939a8ad64e0b680d52a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 11:43:37 +0300 Subject: [PATCH 12/96] refactor: migrate components to pydantic v2 --- kpops/components/base_components/kafka_app.py | 11 ++++++----- kpops/components/base_components/kubernetes_app.py | 11 ++++++----- .../components/base_components/pipeline_component.py | 11 ++++++----- kpops/components/streams_bootstrap/producer/model.py | 3 --- kpops/components/streams_bootstrap/streams/model.py | 10 ++-------- 5 files changed, 20 insertions(+), 26 deletions(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c6eb8be38..a131ade29 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -2,7 +2,7 @@ import logging -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Extra, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( @@ -15,12 +15,12 @@ KubernetesAppConfig, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KafkaApp") -class KafkaStreamsConfig(BaseModel): +class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): """Kafka Streams config :param brokers: Brokers @@ -32,8 +32,9 @@ class KafkaStreamsConfig(BaseModel): default=None, description=describe_attr("schema_registry_url", __doc__) ) - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class KafkaAppConfig(KubernetesAppConfig): diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index a18943d9e..34c07491d 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any -from pydantic import BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Extra, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -20,7 +20,7 @@ from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KubernetesAppComponent") @@ -29,11 +29,12 @@ ) -class KubernetesAppConfig(BaseModel): +class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): """Settings specific to Kubernetes Apps""" - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) class KubernetesApp(PipelineComponent): diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index b60faff5e..b88c0ab3e 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,6 +1,6 @@ from __future__ import annotations -from pydantic import Extra, Field +from pydantic import ConfigDict, Extra, Field from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, @@ -16,10 +16,10 @@ ToSection, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel -class PipelineComponent(BaseDefaultsComponent): +class PipelineComponent(BaseDefaultsComponent, DescConfigModel): """Base class for all components :param name: Component name @@ -48,8 +48,9 @@ class PipelineComponent(BaseDefaultsComponent): description=describe_attr("to", __doc__), ) - class Config(DescConfig): - extra = Extra.allow + model_config=ConfigDict( + extra = Extra.allow, + ) def __init__(self, **kwargs) -> None: super().__init__(**kwargs) diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 3c4ae6e46..6d792367a 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -31,6 +31,3 @@ class ProducerValues(KafkaAppConfig): streams: ProducerStreamsConfig = Field( default=..., description=describe_attr("streams", __doc__) ) - - class Config(BaseConfig): - extra = Extra.allow diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 0b7ed67de..6fbdc4e5f 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -9,7 +9,7 @@ KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel class StreamsConfig(KafkaStreamsConfig): @@ -103,7 +103,7 @@ def dict( ) -class StreamsAppAutoScaling(BaseModel): +class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config :param enabled: Whether to enable auto-scaling using KEDA., defaults to False @@ -183,9 +183,6 @@ class StreamsAppAutoScaling(BaseModel): description=describe_attr("topics", __doc__), ) - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow - class StreamsAppConfig(KafkaAppConfig): """StreamsBoostrap app configurations. @@ -204,6 +201,3 @@ class StreamsAppConfig(KafkaAppConfig): default=None, description=describe_attr("autoscaling", __doc__), ) - - class Config(BaseConfig): - extra = Extra.allow From 11947d8e495c3230d62706be9b5e119e422b86bb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 13:54:04 +0300 Subject: [PATCH 13/96] refactor: Add YAML settings source --- kpops/cli/settings_sources.py | 37 ++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index dd9f0fead..fabebac05 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -1 +1,36 @@ -"""Will hold the custom YAML settings source""" +from pathlib import Path +from typing import Any + +from pydantic.fields import FieldInfo +from pydantic_settings import PydanticBaseSettingsSource + +from kpops.utils.yaml_loading import load_yaml_file + + +class YamlConfigSettingsSource(PydanticBaseSettingsSource): + """Loads variables from a YAML file at the project's root.""" + + path_to_config = Path("config.yaml") + + def get_field_value( + self, field: FieldInfo, field_name: str, # noqa: + ) -> tuple[Any, str, bool]: + if self.path_to_config.exists() and isinstance((file_content_yaml := load_yaml_file(self.path_to_config)), dict): + field_value = file_content_yaml.get(field_name) + return field_value, field_name, False + return None, field_name, False + + def __call__(self) -> dict[str, Any]: + d: dict[str, Any] = {} + + for field_name, field in self.settings_cls.model_fields.items(): + field_value, field_key, value_is_complex = self.get_field_value( + field, field_name, + ) + field_value = self.prepare_field_value( + field_name, field, field_value, value_is_complex, + ) + if field_value is not None: + d[field_key] = field_value + + return d From 94bcd340c0ebe11d72d4cb0397e434ab0c160c49 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 13:54:53 +0300 Subject: [PATCH 14/96] refactor: use `model_config` instead of `Config` --- kpops/components/base_components/kafka_app.py | 2 +- .../base_components/kubernetes_app.py | 2 +- .../base_components/models/from_section.py | 19 +++++++++------- .../base_components/models/to_section.py | 22 ++++++++++--------- .../streams_bootstrap/producer/model.py | 2 +- .../streams_bootstrap/streams/model.py | 4 ++-- kpops/utils/gen_schema.py | 7 +++--- kpops/utils/pydantic.py | 2 +- 8 files changed, 32 insertions(+), 28 deletions(-) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index a131ade29..4e5d4a711 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -2,7 +2,7 @@ import logging -from pydantic import BaseModel, ConfigDict, Extra, Field +from pydantic import ConfigDict, Extra, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 34c07491d..19a17c242 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any -from pydantic import BaseModel, ConfigDict, Extra, Field +from pydantic import ConfigDict, Extra, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 96a7b8cca..1d38e93bb 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,11 +1,11 @@ from enum import Enum from typing import Any, NewType -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import BaseModel, ConfigDict, Extra, Field, root_validator from kpops.components.base_components.models import TopicName from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class InputTopicTypes(str, Enum): @@ -31,9 +31,10 @@ class FromTopic(BaseModel): ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) - class Config(DescConfig): - extra = Extra.forbid - use_enum_values = True + model_config = ConfigDict( + extra = Extra.forbid, + use_enum_values = True, + ) @root_validator(skip_on_failure=True) def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: @@ -46,7 +47,7 @@ def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: ComponentName = NewType("ComponentName", str) -class FromSection(BaseModel): +class FromSection(DescConfigModel): """Holds multiple input topics :param topics: Input topics @@ -62,5 +63,7 @@ class FromSection(BaseModel): description=describe_attr("components", __doc__), ) - class Config(DescConfig): - extra = Extra.forbid + model_config = ConfigDict( + extra = Extra.forbid, + use_enum_values = True, + ) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 72be26eea..0706d2f71 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -1,11 +1,11 @@ from enum import Enum from typing import Any -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import ConfigDict, Extra, Field, root_validator from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class OutputTopicTypes(str, Enum): @@ -18,7 +18,7 @@ class OutputTopicTypes(str, Enum): ERROR = "error" -class TopicConfig(BaseModel): +class TopicConfig(DescConfigModel): """Configure an output topic :param type: Topic type @@ -58,10 +58,11 @@ class TopicConfig(BaseModel): ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) - class Config(DescConfig): - extra = Extra.forbid - allow_population_by_field_name = True - use_enum_values = True + model_config = ConfigDict( + extra = Extra.forbid, + use_enum_values = True, + populate_by_name = True, + ) @root_validator(skip_on_failure=True) def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: @@ -71,7 +72,7 @@ def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: return values -class ToSection(BaseModel): +class ToSection(DescConfigModel): """Holds multiple output topics :param topics: Output topics @@ -85,5 +86,6 @@ class ToSection(BaseModel): default={}, description=describe_attr("models", __doc__) ) - class Config(DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra = Extra.forbid, + ) diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 6d792367a..4be4ec295 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,4 +1,4 @@ -from pydantic import BaseConfig, Extra, Field +from pydantic import Field from kpops.components.base_components.kafka_app import ( KafkaAppConfig, diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 6fbdc4e5f..7ce6e1f57 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,6 +1,6 @@ -from typing import AbstractSet, Any, Mapping +from typing import Any -from pydantic import BaseConfig, BaseModel, Extra, Field +from pydantic import Field from typing_extensions import override from kpops.components.base_components.base_defaults_component import deduplicate diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index fb7d72f88..29105c523 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,11 +1,10 @@ import logging from enum import Enum -from typing import Annotated, Any, Final, Literal, Sequence, Union +from typing import Annotated, Any, Literal, Sequence, Union -from pydantic import BaseConfig, Field, schema_json_of -from pydantic.v1 import schema +from pydantic import Field, schema_json_of from pydantic.fields import FieldInfo -from pydantic.v1.fields import ModelField +from pydantic.v1 import schema from pydantic.v1.schema import SkipField from kpops.cli.pipeline_config import PipelineConfig diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 5d8ffb834..94e21bd94 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,7 +1,7 @@ from typing import Any import humps -from pydantic import BaseConfig, BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict from kpops.utils.docstring import describe_object From 0b69e793a40897d729c5599441cdc9c038afb5bb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 30 Aug 2023 21:55:56 +0300 Subject: [PATCH 15/96] fix: env vars --- kpops/cli/pipeline_config.py | 64 +++++++++++++----------------------- 1 file changed, 23 insertions(+), 41 deletions(-) diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 775dbfeb9..9b8b416ed 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -1,11 +1,10 @@ from pathlib import Path -from pydantic import ConfigDict, Field, BaseConfig -from pydantic_settings import BaseSettings, SettingsConfigDict, PydanticBaseSettingsSource -from pydantic.env_settings import SettingsSourceCallable +from pydantic import AliasChoices, Field +from pydantic_settings import BaseSettings, PydanticBaseSettingsSource +from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig -from kpops.utils.yaml_loading import load_yaml_file ENV_PREFIX = "KPOPS_" @@ -34,7 +33,6 @@ class PipelineConfig(BaseSettings): ) environment: str = Field( default=..., - validation_alias="ENVIRONMENT", examples=[ "development", "production", @@ -44,7 +42,6 @@ class PipelineConfig(BaseSettings): ) brokers: str = Field( default=..., - validation_alias="KAFKA_BROKERS", examples=[ "broker1:9092,broker2:9092,broker3:9092", ], @@ -63,12 +60,11 @@ class PipelineConfig(BaseSettings): examples=[ "http://localhost:8081", ], - validation_alias="SCHEMA_REGISTRY_URL", description="Address of the Schema Registry.", ) kafka_rest_host: str | None = Field( default=None, - validation_alias="REST_PROXY_HOST", + validation_alias=AliasChoices(f"{ENV_PREFIX}rest_proxy_host", "kafka_rest_host"), examples=[ "http://localhost:8082", ], @@ -76,7 +72,7 @@ class PipelineConfig(BaseSettings): ) kafka_connect_host: str | None = Field( default=None, - validation_alias="CONNECT_HOST", + validation_alias=AliasChoices(f"{ENV_PREFIX}connect_host", "kafka_connect_host"), examples=[ "http://localhost:8083", ], @@ -84,7 +80,6 @@ class PipelineConfig(BaseSettings): ) timeout: int = Field( default=300, - validation_alias="TIMEOUT", description="The timeout in seconds that specifies when actions like deletion or deploy timeout.", ) create_namespace: bool = Field( @@ -101,36 +96,23 @@ class PipelineConfig(BaseSettings): ) retain_clean_jobs: bool = Field( default=False, - validation_alias="RETAIN_CLEAN_JOBS", description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - - model_config = SettingsConfigDict( - env_prefix='my_prefix_' - ) - - class Config(BaseConfig): - config_path = Path("config.yaml") - env_file = ".env" - env_file_encoding = "utf-8" - - @classmethod - def customise_sources( - cls, - init_settings: SettingsSourceCallable, - env_settings: SettingsSourceCallable, - file_secret_settings: SettingsSourceCallable, - ): - return ( - init_settings, - yaml_config_settings_source, - env_settings, - file_secret_settings, - ) - - -def yaml_config_settings_source(settings: PipelineConfig) -> dict | list: - path_to_config = settings.Config.config_path - if path_to_config.exists(): - return load_yaml_file(path_to_config) - return {} + + @classmethod + def settings_customise_sources( + cls, + settings_cls: type[BaseSettings], + init_settings: PydanticBaseSettingsSource, + env_settings: PydanticBaseSettingsSource, + dotenv_settings: PydanticBaseSettingsSource, + file_secret_settings: PydanticBaseSettingsSource, + ): + return ( + init_settings, + YamlConfigSettingsSource(settings_cls), + dotenv_settings, + env_settings, + file_secret_settings, + ) + From 5fe318c0df6f612aa9c1b0ad9c6120042c7245a4 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 31 Aug 2023 14:18:20 +0300 Subject: [PATCH 16/96] WIP --- kpops/cli/main.py | 3 ++- kpops/cli/settings_sources.py | 3 +++ kpops/component_handlers/kafka_connect/model.py | 12 +++++++----- kpops/component_handlers/topic/model.py | 6 +++--- .../base_components/base_defaults_component.py | 4 ++-- .../base_components/models/from_section.py | 1 - .../components/base_components/pipeline_component.py | 4 ++-- kpops/pipeline_generator/pipeline.py | 6 ++---- kpops/utils/gen_schema.py | 2 +- 9 files changed, 22 insertions(+), 19 deletions(-) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index f69da7421..3abccb10a 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -12,6 +12,7 @@ from kpops.cli.custom_formatter import CustomFormatter from kpops.cli.pipeline_config import ENV_PREFIX, PipelineConfig from kpops.cli.registry import Registry +from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( KafkaConnectHandler, @@ -195,7 +196,7 @@ def create_pipeline_config( config: Path, defaults: Optional[Path], verbose: bool ) -> PipelineConfig: setup_logging_level(verbose) - PipelineConfig.Config.config_path = config + YamlConfigSettingsSource.path_to_config = config if defaults: pipeline_config = PipelineConfig(defaults_path=defaults) else: diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index fabebac05..d096a76dd 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -20,6 +20,9 @@ def get_field_value( return field_value, field_name, False return None, field_name, False + def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any: + return value + def __call__(self) -> dict[str, Any]: d: dict[str, Any] = {} diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index d18917a14..ec865c4ad 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -4,7 +4,7 @@ from pydantic import BaseConfig, BaseModel, ConfigDict, Extra, Field, validator from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfigModel, to_dot +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot class KafkaConnectorType(str, Enum): @@ -12,7 +12,7 @@ class KafkaConnectorType(str, Enum): SOURCE = "source" -class KafkaConnectorConfig(BaseModel): +class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors""" connector_class: str @@ -26,6 +26,7 @@ class KafkaConnectorConfig(BaseModel): model_config = ConfigDict( extra=Extra.allow, alias_generator=to_dot, + #TODO(sujuka99): combine with ``json_schema_extra`` of ``DescCohnfigModel`` json_schema_extra={"additional_properties": {"type": "string"}}, ) @@ -53,10 +54,11 @@ class KafkaConnectResponse(BaseModel): name: str config: dict[str, str] tasks: list[ConnectorTask] - type: str | None + type: str | None = None - class Config(BaseConfig): - extra = Extra.forbid + model_config = ConfigDict( + extra=Extra.forbid + ) class KafkaConnectConfigError(BaseModel): diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index 80544180b..699b83923 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -5,9 +5,9 @@ class TopicSpec(BaseModel): topic_name: str - partitions_count: int | None - replication_factor: int | None - configs: list[dict[str, str]] | None + partitions_count: int | None = None + replication_factor: int | None = None + configs: list[dict[str, str]] | None = None class TopicResponse(BaseModel): diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index f7f0ca4ac..eec843851 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -7,7 +7,7 @@ from typing import TypeVar import typer -from pydantic import ConfigDict, Field +from pydantic import AliasChoices, ConfigDict, Field from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers @@ -69,7 +69,7 @@ class BaseDefaultsComponent(DescConfigModel): }, ) validate_: bool = Field( - alias="validate", + validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), exclude=True, diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 1d38e93bb..91f474610 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -65,5 +65,4 @@ class FromSection(DescConfigModel): model_config = ConfigDict( extra = Extra.forbid, - use_enum_values = True, ) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index b88c0ab3e..d62b10a5b 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,6 +1,6 @@ from __future__ import annotations -from pydantic import ConfigDict, Extra, Field +from pydantic import AliasChoices, ConfigDict, Extra, Field from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, @@ -39,7 +39,7 @@ class PipelineComponent(BaseDefaultsComponent, DescConfigModel): ) from_: FromSection | None = Field( default=None, - alias="from", + validation_alias=AliasChoices("from", "from_"), title="From", description=describe_attr("from_", __doc__), ) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 71665e500..a2846e656 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -273,9 +273,7 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( - json.loads( # HACK: serialize types on Pydantic model export, which are not serialized by .dict(); e.g. pathlib.Path - self.components.json(exclude_none=True, by_alias=True) - ) + self.components.model_dump(by_alias=True, exclude_none=True, mode="json") ) def __len__(self) -> int: @@ -301,7 +299,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.json()), existing_substitution=component_substitution + json.loads(config.model_dump_json()), existing_substitution=component_substitution ) return json.loads( diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 29105c523..94755e08f 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -104,7 +104,7 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: component.model_fields["type"] = FieldInfo( - serialization_alias="type", + alias="type", type_=Literal[component.type], # type: ignore default=component.type, # final=True, From c6436a52e506fbfd8508729c331796f458e01e6d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 1 Sep 2023 09:35:41 +0300 Subject: [PATCH 17/96] fix: WIP model serialization --- .../components/base_components/pipeline_component.py | 1 + kpops/pipeline_generator/pipeline.py | 11 +++++------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index d62b10a5b..7c0431912 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -39,6 +39,7 @@ class PipelineComponent(BaseDefaultsComponent, DescConfigModel): ) from_: FromSection | None = Field( default=None, + serialization_alias="from", validation_alias=AliasChoices("from", "from_"), title="From", description=describe_attr("from_", __doc__), diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index a2846e656..bd2c7aa4d 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -8,7 +8,7 @@ from pathlib import Path import yaml -from pydantic import BaseModel +from pydantic import BaseModel, SerializeAsAny from rich.console import Console from rich.syntax import Syntax @@ -34,7 +34,7 @@ class ValidationError(Exception): class PipelineComponents(BaseModel): """Stores the pipeline components""" - components: list[PipelineComponent] = [] + components: list[SerializeAsAny[PipelineComponent]] = [] @property def last(self) -> PipelineComponent: @@ -46,7 +46,7 @@ def find(self, component_name: str) -> PipelineComponent: return component raise ValueError(f"Component {component_name} not found") - def add(self, component: PipelineComponent) -> None: + def add(self, component: SerializeAsAny[PipelineComponent]) -> None: self._populate_component_name(component) self.components.append(component) @@ -195,7 +195,6 @@ def apply_component( **component_data, ) component = self.enrich_component(component) - # inflate & enrich components for inflated_component in component.inflate(): # TODO: recursively enriched_component = self.enrich_component(inflated_component) @@ -238,7 +237,7 @@ def enrich_component( env_component_as_dict = update_nested_pair( self.env_components_index.get(component.name, {}), # HACK: Pydantic .dict() doesn't create jsonable dict - json.loads(component.json(by_alias=True)), + component.model_dump(by_alias=True), ) # HACK: make sure component type is set for inflated components, because property is not serialized by Pydantic env_component_as_dict["type"] = component.type @@ -273,7 +272,7 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( - self.components.model_dump(by_alias=True, exclude_none=True, mode="json") + self.components.model_dump(by_alias=True, exclude_none=True) ) def __len__(self) -> int: From 922ee1d67bd700e1bf935eccd2d2c1dd421f5187 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Sun, 10 Sep 2023 13:21:16 +0300 Subject: [PATCH 18/96] fix: model serialization WIP --- .../resources/variables/config_env_vars.env | 25 ----------------- .../variables/temp_config_env_vars.csv | 1 + kpops/cli/pipeline_config.py | 11 +++++--- kpops/cli/settings_sources.py | 20 +++++++++---- .../component_handlers/helm_wrapper/model.py | 5 ++-- .../component_handlers/kafka_connect/model.py | 6 ++-- kpops/component_handlers/topic/model.py | 24 ++++++++-------- kpops/components/base_components/kafka_app.py | 4 +-- .../base_components/kubernetes_app.py | 4 +-- .../base_components/models/from_section.py | 6 ++-- .../base_components/models/to_section.py | 8 +++--- .../base_components/pipeline_component.py | 4 +-- .../streams_bootstrap/streams/model.py | 28 ++++--------------- kpops/pipeline_generator/pipeline.py | 10 +++---- kpops/utils/pydantic.py | 7 +++-- 15 files changed, 67 insertions(+), 96 deletions(-) create mode 100644 docs/docs/resources/variables/temp_config_env_vars.csv diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 308fb6334..143ec8ba4 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -4,28 +4,3 @@ # alternative to the settings in `config.yaml`. Variables marked as # required can instead be set in the pipeline config. # -# environment -# The environment you want to generate and deploy the pipeline to. -# Suffix your environment files with this value (e.g. -# defaults_development.yaml for environment=development). -KPOPS_ENVIRONMENT # No default value, required -# brokers -# The comma separated Kafka brokers address. -KPOPS_KAFKA_BROKERS # No default value, required -# schema_registry_url -# Address of the Schema Registry. -KPOPS_SCHEMA_REGISTRY_URL # No default value, not required -# kafka_rest_host -# Address of the Kafka REST Proxy. -KPOPS_REST_PROXY_HOST # No default value, not required -# kafka_connect_host -# Address of Kafka Connect. -KPOPS_CONNECT_HOST # No default value, not required -# timeout -# The timeout in seconds that specifies when actions like deletion or -# deploy timeout. -KPOPS_TIMEOUT=300 -# retain_clean_jobs -# Whether to retain clean up jobs in the cluster or uninstall the, -# after completion. -KPOPS_RETAIN_CLEAN_JOBS=False diff --git a/docs/docs/resources/variables/temp_config_env_vars.csv b/docs/docs/resources/variables/temp_config_env_vars.csv new file mode 100644 index 000000000..b40ee402d --- /dev/null +++ b/docs/docs/resources/variables/temp_config_env_vars.csv @@ -0,0 +1 @@ +Name,Default Value,Required,Description,Setting name diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 9b8b416ed..c37c880f0 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -64,7 +64,9 @@ class PipelineConfig(BaseSettings): ) kafka_rest_host: str | None = Field( default=None, - validation_alias=AliasChoices(f"{ENV_PREFIX}rest_proxy_host", "kafka_rest_host"), + validation_alias=AliasChoices( + f"{ENV_PREFIX}rest_proxy_host", "kafka_rest_host" + ), examples=[ "http://localhost:8082", ], @@ -72,7 +74,9 @@ class PipelineConfig(BaseSettings): ) kafka_connect_host: str | None = Field( default=None, - validation_alias=AliasChoices(f"{ENV_PREFIX}connect_host", "kafka_connect_host"), + validation_alias=AliasChoices( + f"{ENV_PREFIX}connect_host", "kafka_connect_host" + ), examples=[ "http://localhost:8083", ], @@ -98,7 +102,7 @@ class PipelineConfig(BaseSettings): default=False, description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - + @classmethod def settings_customise_sources( cls, @@ -115,4 +119,3 @@ def settings_customise_sources( env_settings, file_secret_settings, ) - diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index d096a76dd..d07f080ef 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -13,14 +13,20 @@ class YamlConfigSettingsSource(PydanticBaseSettingsSource): path_to_config = Path("config.yaml") def get_field_value( - self, field: FieldInfo, field_name: str, # noqa: + self, + field: FieldInfo, + field_name: str, # noqa: ) -> tuple[Any, str, bool]: - if self.path_to_config.exists() and isinstance((file_content_yaml := load_yaml_file(self.path_to_config)), dict): + if self.path_to_config.exists() and isinstance( + (file_content_yaml := load_yaml_file(self.path_to_config)), dict + ): field_value = file_content_yaml.get(field_name) return field_value, field_name, False return None, field_name, False - def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any: + def prepare_field_value( + self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool + ) -> Any: return value def __call__(self) -> dict[str, Any]: @@ -28,10 +34,14 @@ def __call__(self) -> dict[str, Any]: for field_name, field in self.settings_cls.model_fields.items(): field_value, field_key, value_is_complex = self.get_field_value( - field, field_name, + field, + field_name, ) field_value = self.prepare_field_value( - field_name, field, field_value, value_is_complex, + field_name, + field, + field_value, + value_is_complex, ) if field_value is not None: d[field_key] = field_value diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 53bd0cfe0..9b09f7136 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -101,6 +101,7 @@ class HelmConfig(DescConfigModel): description=describe_attr("api_version", __doc__), ) + class HelmFlags(RepoAuthFlags): set_file: dict[str, Path] = Field(default_factory=dict) create_namespace: bool = False @@ -110,8 +111,8 @@ class HelmFlags(RepoAuthFlags): wait: bool = True wait_for_jobs: bool = False - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) @override diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index ec865c4ad..381ac993c 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -26,7 +26,7 @@ class KafkaConnectorConfig(DescConfigModel): model_config = ConfigDict( extra=Extra.allow, alias_generator=to_dot, - #TODO(sujuka99): combine with ``json_schema_extra`` of ``DescCohnfigModel`` + # TODO(sujuka99): combine with ``json_schema_extra`` of ``DescCohnfigModel`` json_schema_extra={"additional_properties": {"type": "string"}}, ) @@ -56,9 +56,7 @@ class KafkaConnectResponse(BaseModel): tasks: list[ConnectorTask] type: str | None = None - model_config = ConfigDict( - extra=Extra.forbid - ) + model_config = ConfigDict(extra=Extra.forbid) class KafkaConnectConfigError(BaseModel): diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index 699b83923..b551c9f23 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -43,8 +43,8 @@ class KafkaTopicConfigSynonyms(BaseModel): value: str source: KafkaTopicConfigSource - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) @@ -54,16 +54,16 @@ class KafkaTopicConfig(BaseModel): value: str name: str - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) class TopicConfigResponse(BaseModel): data: list[KafkaTopicConfig] - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) @@ -78,8 +78,8 @@ class KafkaBrokerConfigSynonyms(BaseModel): value: str | None source: KafkaBrokerConfigSource - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) @@ -89,14 +89,14 @@ class KafkaBrokerConfig(BaseModel): value: str | None name: str - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) class BrokerConfigResponse(BaseModel): data: list[KafkaBrokerConfig] - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 4e5d4a711..008e36223 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -32,8 +32,8 @@ class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): default=None, description=describe_attr("schema_registry_url", __doc__) ) - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 19a17c242..e9578dd1c 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -32,8 +32,8 @@ class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): """Settings specific to Kubernetes Apps""" - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra="allow", ) diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 91f474610..1c79893f6 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -32,8 +32,8 @@ class FromTopic(BaseModel): role: str | None = Field(default=None, description=describe_attr("role", __doc__)) model_config = ConfigDict( - extra = Extra.forbid, - use_enum_values = True, + extra=Extra.forbid, + use_enum_values=True, ) @root_validator(skip_on_failure=True) @@ -64,5 +64,5 @@ class FromSection(DescConfigModel): ) model_config = ConfigDict( - extra = Extra.forbid, + extra=Extra.forbid, ) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 0706d2f71..2b20ca68b 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -59,9 +59,9 @@ class TopicConfig(DescConfigModel): role: str | None = Field(default=None, description=describe_attr("role", __doc__)) model_config = ConfigDict( - extra = Extra.forbid, - use_enum_values = True, - populate_by_name = True, + extra=Extra.forbid, + use_enum_values=True, + populate_by_name=True, ) @root_validator(skip_on_failure=True) @@ -87,5 +87,5 @@ class ToSection(DescConfigModel): ) model_config = ConfigDict( - extra = Extra.forbid, + extra=Extra.forbid, ) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 7c0431912..9e1f33c95 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -49,8 +49,8 @@ class PipelineComponent(BaseDefaultsComponent, DescConfigModel): description=describe_attr("to", __doc__), ) - model_config=ConfigDict( - extra = Extra.allow, + model_config = ConfigDict( + extra=Extra.allow, ) def __init__(self, **kwargs) -> None: diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 7ce6e1f57..d8932b312 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,6 +1,6 @@ from typing import Any -from pydantic import Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.components.base_components.base_defaults_component import deduplicate @@ -72,34 +72,16 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: ) @override - def dict( + def model_dump( self, - *, - include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None, - exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None, - by_alias: bool = False, - skip_defaults: bool | None = None, - exclude_unset: bool = False, **kwargs, ) -> dict: - """Generate a dictionary representation of the model - - Optionally, specify which fields to include or exclude. - - :param include: Fields to include - :param include: Fields to exclude - :param by_alias: Use the fields' aliases in the dictionary - :param skip_defaults: Whether to skip defaults - :param exclude_unset: Whether to exclude unset fields - """ - return super().dict( - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, + breakpoint() + return super().model_dump( # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values exclude_defaults=True, exclude_none=True, + **kwargs, ) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index bd2c7aa4d..2a2696e1a 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -16,6 +16,7 @@ from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested @@ -46,7 +47,7 @@ def find(self, component_name: str) -> PipelineComponent: return component raise ValueError(f"Component {component_name} not found") - def add(self, component: SerializeAsAny[PipelineComponent]) -> None: + def add(self, component: PipelineComponent) -> None: self._populate_component_name(component) self.components.append(component) @@ -236,8 +237,7 @@ def enrich_component( component.validate_ = True env_component_as_dict = update_nested_pair( self.env_components_index.get(component.name, {}), - # HACK: Pydantic .dict() doesn't create jsonable dict - component.model_dump(by_alias=True), + component.model_dump(mode="json", by_alias=True), ) # HACK: make sure component type is set for inflated components, because property is not serialized by Pydantic env_component_as_dict["type"] = component.type @@ -272,7 +272,7 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( - self.components.model_dump(by_alias=True, exclude_none=True) + self.components.model_dump(mode="jsonb", by_alias=True, exclude_none=True) ) def __len__(self) -> int: @@ -298,7 +298,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.model_dump_json()), existing_substitution=component_substitution + config.model_dump(mode="json"), existing_substitution=component_substitution ) return json.loads( diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 94e21bd94..1dc5063cd 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -20,16 +20,17 @@ def to_dot(s: str) -> str: """Convert snake_case to dot.notation.""" return s.replace("_", ".") + def schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: schema["description"] = describe_object(model.__doc__) + class CamelCaseConfigModel(BaseModel): model_config = ConfigDict( alias_generator=to_camel, populate_by_name=True, ) + class DescConfigModel(BaseModel): - model_config = ConfigDict( - json_schema_extra=schema_extra - ) + model_config = ConfigDict(json_schema_extra=schema_extra) From 5e7fec8f3e274af09f36111992ec27351cc534b1 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 11 Sep 2023 23:50:01 +0300 Subject: [PATCH 19/96] fix: StreamsConfig serialization WIP --- .../streams_bootstrap/streams/model.py | 38 ++++++++++++------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index d8932b312..2e356c6de 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,6 +1,7 @@ from typing import Any -from pydantic import ConfigDict, Field +from pydantic import ConfigDict, Field, model_serializer +from pydantic.alias_generators import to_snake from typing_extensions import override from kpops.components.base_components.base_defaults_component import deduplicate @@ -71,18 +72,29 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) - @override - def model_dump( - self, - **kwargs, - ) -> dict: - breakpoint() - return super().model_dump( - # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values - exclude_defaults=True, - exclude_none=True, - **kwargs, - ) + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler) -> dict[str, Any]: + result = handler(self) + extra_fields = set() + if self.model_extra is not None: + extra_fields = set(self.model_extra.keys()) + fields = extra_fields.union(self.model_fields_set) + filtered_result_extra_set = {k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields)} + unfiltered_result = {k: v for k, v in result.items() if result[k] or k in extra_fields} + return filtered_result_extra_set + + # @override + # def model_dump( + # self, + # **kwargs, + # ) -> dict: + # breakpoint() + # return super().model_dump( + # # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values + # exclude_defaults=True, + # exclude_none=True, + # **kwargs, + # ) class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): From b3344572083e32d978cb8a17010b29365c95cd56 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 11 Sep 2023 23:50:54 +0300 Subject: [PATCH 20/96] chore: formatting --- kpops/components/streams_bootstrap/streams/model.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 2e356c6de..7e4baa854 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -79,8 +79,12 @@ def serialize_model(self, handler) -> dict[str, Any]: if self.model_extra is not None: extra_fields = set(self.model_extra.keys()) fields = extra_fields.union(self.model_fields_set) - filtered_result_extra_set = {k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields)} - unfiltered_result = {k: v for k, v in result.items() if result[k] or k in extra_fields} + filtered_result_extra_set = { + k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) + } + unfiltered_result = { + k: v for k, v in result.items() if result[k] or k in extra_fields + } return filtered_result_extra_set # @override From dea8a78f8afddd87622dc54f90a0994d804bbb2f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 12 Sep 2023 17:45:38 +0300 Subject: [PATCH 21/96] fix: model serialization WIP --- .../component_handlers/helm_wrapper/model.py | 2 +- .../component_handlers/kafka_connect/model.py | 4 ++-- kpops/component_handlers/topic/model.py | 12 +++++------ kpops/components/base_components/kafka_app.py | 2 +- .../base_components/kubernetes_app.py | 15 +++++++++++--- .../base_components/models/from_section.py | 15 +++++++------- .../base_components/models/to_section.py | 12 +++++------ .../base_components/pipeline_component.py | 5 ++--- .../streams_bootstrap/producer/model.py | 6 +++++- .../streams_bootstrap/streams/model.py | 20 ++++--------------- kpops/pipeline_generator/pipeline.py | 2 +- 11 files changed, 48 insertions(+), 47 deletions(-) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 9b09f7136..0d2836941 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -112,7 +112,7 @@ class HelmFlags(RepoAuthFlags): wait_for_jobs: bool = False model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) @override diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 381ac993c..e55dd51a6 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -24,7 +24,7 @@ class KafkaConnectorConfig(DescConfigModel): ) model_config = ConfigDict( - extra=Extra.allow, + extra="allow", alias_generator=to_dot, # TODO(sujuka99): combine with ``json_schema_extra`` of ``DescCohnfigModel`` json_schema_extra={"additional_properties": {"type": "string"}}, @@ -56,7 +56,7 @@ class KafkaConnectResponse(BaseModel): tasks: list[ConnectorTask] type: str | None = None - model_config = ConfigDict(extra=Extra.forbid) + model_config = ConfigDict(extra="forbid") class KafkaConnectConfigError(BaseModel): diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index b551c9f23..27ce813b7 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -44,7 +44,7 @@ class KafkaTopicConfigSynonyms(BaseModel): source: KafkaTopicConfigSource model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) @@ -55,7 +55,7 @@ class KafkaTopicConfig(BaseModel): name: str model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) @@ -63,7 +63,7 @@ class TopicConfigResponse(BaseModel): data: list[KafkaTopicConfig] model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) @@ -79,7 +79,7 @@ class KafkaBrokerConfigSynonyms(BaseModel): source: KafkaBrokerConfigSource model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) @@ -90,7 +90,7 @@ class KafkaBrokerConfig(BaseModel): name: str model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) @@ -98,5 +98,5 @@ class BrokerConfigResponse(BaseModel): data: list[KafkaBrokerConfig] model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 008e36223..c95b8817a 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -33,7 +33,7 @@ class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): ) model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index e9578dd1c..e9d512ccb 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -162,7 +162,7 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) + return self.app.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True) def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component @@ -195,10 +195,19 @@ def validate_kubernetes_name(name: str) -> None: raise ValueError(f"The component name {name} is invalid for Kubernetes.") @override - def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: + def model_dump(self, *, exclude=None, **kwargs) -> dict[str, Any]: # HACK: workaround for Pydantic to exclude cached properties during model export if exclude is None: exclude = set() exclude.add("helm") exclude.add("helm_diff") - return super().dict(exclude=exclude, **kwargs) + return super().model_dump(exclude=exclude, **kwargs) + + # @model_serializer(mode="wrap", when_used="always") + # def serialize_model(self, handler) -> dict[str, Any]: + # # breakpoint() + # result = handler(self) + # filtered_result = { + # k: v for k, v in result.items() if k != "helm" and k != "helm_diff" + # } + # return filtered_result diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 1c79893f6..ceab067eb 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any, NewType -from pydantic import BaseModel, ConfigDict, Extra, Field, root_validator +from pydantic import BaseModel, ConfigDict, Extra, Field, model_validator, root_validator from kpops.components.base_components.models import TopicName from kpops.utils.docstring import describe_attr @@ -18,7 +18,7 @@ class InputTopicTypes(str, Enum): PATTERN = "pattern" -class FromTopic(BaseModel): +class FromTopic(DescConfigModel): """Input topic :param type: Topic type, defaults to None @@ -32,14 +32,15 @@ class FromTopic(BaseModel): role: str | None = Field(default=None, description=describe_attr("role", __doc__)) model_config = ConfigDict( - extra=Extra.forbid, + extra="forbid", use_enum_values=True, ) - @root_validator(skip_on_failure=True) - def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + @classmethod + def extra_topic_role(cls, values: Any) -> Any: """Ensure that cls.role is used correctly, assign type if needed""" - if values["type"] == InputTopicTypes.INPUT and values["role"]: + if values.type == InputTopicTypes.INPUT and values.role: raise ValueError("Define role only if `type` is `pattern` or `None`") return values @@ -64,5 +65,5 @@ class FromSection(DescConfigModel): ) model_config = ConfigDict( - extra=Extra.forbid, + extra="forbid", ) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 2b20ca68b..785c1285b 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any -from pydantic import ConfigDict, Extra, Field, root_validator +from pydantic import ConfigDict, Extra, Field, model_validator, root_validator from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.utils.docstring import describe_attr @@ -59,15 +59,15 @@ class TopicConfig(DescConfigModel): role: str | None = Field(default=None, description=describe_attr("role", __doc__)) model_config = ConfigDict( - extra=Extra.forbid, + extra="forbid", use_enum_values=True, populate_by_name=True, ) - @root_validator(skip_on_failure=True) - def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + def extra_topic_role(cls, values: Any) -> Any: """Ensure that cls.role is used correctly, assign type if needed""" - if values["type"] and values["role"]: + if values.type and values.role: raise ValueError("Define `role` only if `type` is undefined") return values @@ -87,5 +87,5 @@ class ToSection(DescConfigModel): ) model_config = ConfigDict( - extra=Extra.forbid, + extra="forbid", ) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 9e1f33c95..db059febe 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -16,10 +16,9 @@ ToSection, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfigModel -class PipelineComponent(BaseDefaultsComponent, DescConfigModel): +class PipelineComponent(BaseDefaultsComponent): """Base class for all components :param name: Component name @@ -50,7 +49,7 @@ class PipelineComponent(BaseDefaultsComponent, DescConfigModel): ) model_config = ConfigDict( - extra=Extra.allow, + extra="allow", ) def __init__(self, **kwargs) -> None: diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 4be4ec295..7f4062e0a 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,4 +1,4 @@ -from pydantic import Field +from pydantic import ConfigDict, Field from kpops.components.base_components.kafka_app import ( KafkaAppConfig, @@ -31,3 +31,7 @@ class ProducerValues(KafkaAppConfig): streams: ProducerStreamsConfig = Field( default=..., description=describe_attr("streams", __doc__) ) + + model_config = ConfigDict( + extra="allow" + ) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 7e4baa854..dca2491a9 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -75,6 +75,8 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: @model_serializer(mode="wrap", when_used="always") def serialize_model(self, handler) -> dict[str, Any]: result = handler(self) + # if dict(result.items()).get("extraInputTopics"): + # breakpoint() extra_fields = set() if self.model_extra is not None: extra_fields = set(self.model_extra.keys()) @@ -82,24 +84,8 @@ def serialize_model(self, handler) -> dict[str, Any]: filtered_result_extra_set = { k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) } - unfiltered_result = { - k: v for k, v in result.items() if result[k] or k in extra_fields - } return filtered_result_extra_set - # @override - # def model_dump( - # self, - # **kwargs, - # ) -> dict: - # breakpoint() - # return super().model_dump( - # # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values - # exclude_defaults=True, - # exclude_none=True, - # **kwargs, - # ) - class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config @@ -180,6 +166,7 @@ class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): default=[], description=describe_attr("topics", __doc__), ) + model_config = ConfigDict(extra="allow") class StreamsAppConfig(KafkaAppConfig): @@ -199,3 +186,4 @@ class StreamsAppConfig(KafkaAppConfig): default=None, description=describe_attr("autoscaling", __doc__), ) + model_config = ConfigDict(extra="allow") diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index 2a2696e1a..d995f7deb 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -272,7 +272,7 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( - self.components.model_dump(mode="jsonb", by_alias=True, exclude_none=True) + self.components.model_dump(mode="json", by_alias=True, exclude_none=True) ) def __len__(self) -> int: From 3576aa7e9ca94324158b8e63891c8d126b0ea3cb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 3 Oct 2023 17:01:50 +0300 Subject: [PATCH 22/96] tests: check if serialization works --- .../base_components/models/from_section.py | 2 +- tests/defaults.yaml | 2 + tests/test_model_serialization.py | 110 ++++++++++++++++++ 3 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 tests/defaults.yaml create mode 100644 tests/test_model_serialization.py diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index ceab067eb..f181db359 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any, NewType -from pydantic import BaseModel, ConfigDict, Extra, Field, model_validator, root_validator +from pydantic import ConfigDict, Extra, Field, model_validator from kpops.components.base_components.models import TopicName from kpops.utils.docstring import describe_attr diff --git a/tests/defaults.yaml b/tests/defaults.yaml new file mode 100644 index 000000000..09fd863b3 --- /dev/null +++ b/tests/defaults.yaml @@ -0,0 +1,2 @@ +streams-app: + namespace: "namespace" diff --git a/tests/test_model_serialization.py b/tests/test_model_serialization.py new file mode 100644 index 000000000..61e13bfeb --- /dev/null +++ b/tests/test_model_serialization.py @@ -0,0 +1,110 @@ +from pathlib import Path +from unittest.mock import MagicMock +import pytest +import yaml +from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig +from kpops.component_handlers import ComponentHandlers +from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig +from kpops.components.streams_bootstrap.streams.model import StreamsConfig, StreamsAppConfig +from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp + +@pytest.fixture() +def streams_config() -> StreamsConfig: + return StreamsConfig( + brokers="", + extra_input_patterns={ + "eip1k": "eip1v", + "eip2k": "eip2v", + }, + extra_input_topics={ + "eit1k": ["eit1v"], + "eit2k": ["eit2v"], + }, + ) + +@pytest.fixture() +def streams_app_config(streams_config: StreamsConfig) -> StreamsAppConfig: + return StreamsAppConfig(streams=streams_config) + +STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" +STREAMS_APP_CLEAN_NAME = "test-streams-app-with-long-name-0123456789abcd-clean" +DEFAULTS_PATH: Path = Path(__file__).parent + +@pytest.fixture +def handlers() -> ComponentHandlers: + return ComponentHandlers( + schema_handler=MagicMock(), + connector_handler=MagicMock(), + topic_handler=MagicMock(), + ) + +@pytest.fixture +def config() -> PipelineConfig: + return PipelineConfig( + defaults_path=DEFAULTS_PATH, + environment="development", + topic_name_config=TopicNameConfig( + default_error_topic_name="${component_type}-error-topic", + default_output_topic_name="${component_type}-output-topic", + ), + helm_diff_config=HelmDiffConfig(), + ) + +@pytest.fixture() +def streams_app(streams_app_config: StreamsAppConfig, config: PipelineConfig, handlers: ComponentHandlers) -> StreamsApp: + return StreamsApp( + name=STREAMS_APP_NAME, + namespace="namespace", + config=config, + handlers=handlers, + app=streams_app_config, + ) + +def test_streams_config(streams_config: StreamsConfig): + assert streams_config.model_dump() == { + "brokers": "", + "extra_input_patterns": { + "eip1k": "eip1v", + "eip2k": "eip2v", + }, + "extra_input_topics": { + "eit1k": ["eit1v"], + "eit2k": ["eit2v"], + }, + } + +def test_streams_app_config(streams_app_config: StreamsAppConfig): + assert streams_app_config.model_dump() == { + "autoscaling": None, + "name_override": None, + "streams": { + "brokers": "", + "extra_input_patterns": { + "eip1k": "eip1v", + "eip2k": "eip2v", + }, + "extra_input_topics": { + "eit1k": ["eit1v"], + "eit2k": ["eit2v"], + }, + } + } + +def test_streams_app(streams_app): + assert streams_app.model_dump() == { + "app": { + "autoscaling": None, + "name_override": None, + "streams": { + "brokers": "", + "extra_input_patterns": { + "eip1k": "eip1v", + "eip2k": "eip2v", + }, + "extra_input_topics": { + "eit1k": ["eit1v"], + "eit2k": ["eit2v"], + }, + } + }, + } From 5a069e3cdd4a3ed801cdf6336d2640c7daf959c0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 09:45:32 +0300 Subject: [PATCH 23/96] fix: hardcode checks for extra topics --- .../streams_bootstrap/streams/model.py | 42 ++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index dca2491a9..e5ad8f1f0 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -3,6 +3,7 @@ from pydantic import ConfigDict, Field, model_serializer from pydantic.alias_generators import to_snake from typing_extensions import override +from pydantic_core.core_schema import SerializationInfo from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( @@ -73,7 +74,40 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: ) @model_serializer(mode="wrap", when_used="always") - def serialize_model(self, handler) -> dict[str, Any]: + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + # class _SerInfoClone: + # def __init__( + # self, + # include, + # exclude, + # mode, + # by_alias, + # exclude_unset, + # exclude_default, + # exclude_none, + # round_trip, + # ): + # self.include = include + # self.exclude = exclude + # self.mode = mode + # self.by_alias = by_alias + # self.exclude_unset = exclude_unset + # self.exclude_default = exclude_default + # self.exclude_none = exclude_none + # self.round_trip = round_trip + + # info2: _SerInfoClone = _SerInfoClone( + # info.include, + # info.exclude, + # info.mode, + # info.by_alias, + # True, + # True, + # True, + # info.round_trip, + # ) + # breakpoint() + # return handler(self, info2) result = handler(self) # if dict(result.items()).get("extraInputTopics"): # breakpoint() @@ -81,6 +115,12 @@ def serialize_model(self, handler) -> dict[str, Any]: if self.model_extra is not None: extra_fields = set(self.model_extra.keys()) fields = extra_fields.union(self.model_fields_set) + if self.extra_input_topics: + fields.add("extra_input_topics") + if self.extra_input_patterns: + fields.add("extra_input_patterns") + if self.extra_output_topics: + fields.add("extra_output_topics") filtered_result_extra_set = { k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) } From 332a4a71165e903a5342d5d797a2884711e63699 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 10:25:48 +0300 Subject: [PATCH 24/96] WIP --- kpops/components/streams_bootstrap/streams/model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index e5ad8f1f0..b6880334e 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -114,6 +114,7 @@ def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: extra_fields = set() if self.model_extra is not None: extra_fields = set(self.model_extra.keys()) + # fields = extra_fields.union(self.model_fields_set) fields = extra_fields.union(self.model_fields_set) if self.extra_input_topics: fields.add("extra_input_topics") From b4f6aa0efdffcd6cbdfeef23143c3b9aaff189f6 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 11:51:24 +0300 Subject: [PATCH 25/96] refactor: update connector app validator --- kpops/component_handlers/kafka_connect/model.py | 12 ++++++------ .../components/base_components/kafka_connector.py | 14 ++++++++------ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index e55dd51a6..4ca11f6d5 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any, Literal -from pydantic import BaseConfig, BaseModel, ConfigDict, Extra, Field, validator +from pydantic import BaseConfig, BaseModel, ConfigDict, Extra, Field, validator, field_validator from typing_extensions import override from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot @@ -16,8 +16,8 @@ class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors""" connector_class: str - name: str = Field( - default=..., + name: str | None = Field( + default=None, json_schema_extra={ "hidden_from_schema": True, }, @@ -30,7 +30,7 @@ class KafkaConnectorConfig(DescConfigModel): json_schema_extra={"additional_properties": {"type": "string"}}, ) - @validator("connector_class") + @field_validator("connector_class") def connector_class_must_contain_dot(cls, connector_class: str) -> str: if "." not in connector_class: raise ValueError(f"Invalid connector class {connector_class}") @@ -41,8 +41,8 @@ def class_name(self) -> str: return self.connector_class.split(".")[-1] @override - def dict(self, **_) -> dict[str, Any]: - return super().dict(by_alias=True, exclude_none=True) + def model_dump(self, **_) -> dict[str, Any]: + return super().model_dump(by_alias=True, exclude_none=True) class ConnectorTask(BaseModel): diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index c97cc987f..97dfd9fd3 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any, NoReturn -from pydantic import Field, validator +from pydantic import Field, field_validator, FieldValidationInfo from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -71,19 +71,21 @@ class KafkaConnector(PipelineComponent, ABC): description=describe_attr("resetter_values", __doc__), ) - @validator("app", pre=True) + @field_validator("app") + @classmethod def connector_config_should_have_component_name( cls, app: KafkaConnectorConfig | dict[str, str], - values: dict[str, Any], - ) -> dict[str, str]: + info: FieldValidationInfo, + ) -> Any: if isinstance(app, KafkaConnectorConfig): - app = app.dict() - component_name = values["prefix"] + values["name"] + app = app.model_dump() + component_name = info.data["prefix"] + info.data["name"] connector_name: str | None = app.get("name") if connector_name is not None and connector_name != component_name: raise ValueError("Connector name should be the same as component name") app["name"] = component_name + app = KafkaConnectorConfig(**app) return app @cached_property From 789b1ea86853b182d81c85c1c8e0ce9086eaf50e Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 13:51:09 +0300 Subject: [PATCH 26/96] chore: update deprecated functions to current ones --- .../kafka_connect/connect_wrapper.py | 11 +++++++---- .../kafka_connect/kafka_connect_handler.py | 6 +++--- kpops/component_handlers/kafka_connect/model.py | 4 ++-- kpops/component_handlers/topic/handler.py | 4 ++-- kpops/component_handlers/topic/proxy_wrapper.py | 2 +- kpops/components/base_components/kafka_connector.py | 6 +++--- kpops/components/base_components/kubernetes_app.py | 6 +++--- .../pipeline-with-env-defaults/defaults.yaml | 1 + 8 files changed, 22 insertions(+), 18 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 9a3dd307e..7dff05c2d 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -46,7 +46,7 @@ def create_connector( :param connector_config: The config of the connector :return: The current connector info if successful """ - config_json = connector_config.dict() + config_json = connector_config.model_dump() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( url=f"{self._host}/connectors", headers=HEADERS, json=connect_data @@ -63,13 +63,16 @@ def create_connector( self.create_connector(connector_config) raise KafkaConnectError(response) - def get_connector(self, connector_name: str) -> KafkaConnectResponse: + def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: """ Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector """ + if connector_name is None: + msg = "Connector name not set" + raise Exception(msg) response = httpx.get( url=f"{self._host}/connectors/{connector_name}", headers=HEADERS ) @@ -97,7 +100,7 @@ def update_connector_config( :return: Information about the connector after the change has been made. """ connector_name = connector_config.name - config_json = connector_config.dict() + config_json = connector_config.model_dump() response = httpx.put( url=f"{self._host}/connectors/{connector_name}/config", headers=HEADERS, @@ -131,7 +134,7 @@ def validate_connector_config( response = httpx.put( url=f"{self._host}/connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, - json=connector_config.dict(), + json=connector_config.model_dump(), ) if response.status_code == httpx.codes.OK: diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 14f5af076..4b8ccca47 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -95,14 +95,14 @@ def __dry_run_connector_creation( connector = self._connect_wrapper.get_connector(connector_name) log.info(f"Connector Creation: connector {connector_name} already exists.") - if diff := render_diff(connector.config, connector_config.dict()): + if diff := render_diff(connector.config, connector_config.model_dump()): log.info(f"Updating config:\n{diff}") - log.debug(connector_config.dict()) + log.debug(connector_config.model_dump()) log.debug(f"PUT /connectors/{connector_name}/config HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.host}") except ConnectorNotFoundException: - diff = render_diff({}, connector_config.dict()) + diff = render_diff({}, connector_config.model_dump()) log.info( f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" ) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 4ca11f6d5..5e9bfcfd1 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -87,5 +87,5 @@ class KafkaConnectResetterValues(CamelCaseConfigModel): name_override: str @override - def dict(self, **_) -> dict[str, Any]: - return super().dict(by_alias=True, exclude_none=True) + def model_dump(self, **_) -> dict[str, Any]: + return super().model_dump(by_alias=True, exclude_none=True) diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 1df0d106a..386b8b512 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -131,7 +131,7 @@ def __dry_run_topic_creation( log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") log.debug(f"Host: {self.proxy_wrapper.host}") log.debug(HEADERS) - log.debug(topic_spec.dict()) + log.debug(topic_spec.model_dump()) @staticmethod def __check_partition_count( @@ -205,7 +205,7 @@ def __prepare_body(cls, topic_name: str, topic_config: TopicConfig) -> TopicSpec :param topic_config: The topic config :return: """ - topic_spec_json: dict = topic_config.dict( + topic_spec_json: dict = topic_config.model_dump( include={ "partitions_count": True, "replication_factor": True, diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index af7914379..34ed3011e 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -64,7 +64,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: response = httpx.post( url=f"{self._host}/v3/clusters/{self.cluster_id}/topics", headers=HEADERS, - json=topic_spec.dict(exclude_none=True), + json=topic_spec.model_dump(exclude_none=True), ) if response.status_code == httpx.codes.CREATED: log.info(f"Topic {topic_spec.topic_name} created.") diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 97dfd9fd3..cb82c4c63 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -121,7 +121,7 @@ def kafka_connect_resetter_chart(self) -> str: def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands""" return HelmFlags( - **self.repo_config.repo_auth_flags.dict(), + **self.repo_config.repo_auth_flags.model_dump(), version=self.version, create_namespace=self.config.create_namespace, ) @@ -130,7 +130,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -273,7 +273,7 @@ def _get_kafka_connect_resetter_values( ), connector_type=connector_type.value, name_override=connector_name, - ).dict(), + ).model_dump(), **self.resetter_values, } diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index e9d512ccb..bec6ec79d 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -104,7 +104,7 @@ def helm_chart(self) -> str: def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands""" return HelmFlags( - **self.repo_config.repo_auth_flags.dict(), + **self.repo_config.repo_auth_flags.model_dump(), version=self.version, create_namespace=self.config.create_namespace, ) @@ -113,7 +113,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -131,7 +131,7 @@ def template(self) -> None: @property def deploy_flags(self) -> HelmUpgradeInstallFlags: """Return flags for Helm upgrade install command""" - return HelmUpgradeInstallFlags(**self.helm_flags.dict()) + return HelmUpgradeInstallFlags(**self.helm_flags.model_dump()) @override def deploy(self, dry_run: bool) -> None: diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index 2564e0012..a1243a3c8 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -25,6 +25,7 @@ streams-app: # inherits from kafka-app kafka-connector: name: "sink-connector" + namespace: "example-namespace" app: batch.size: "2000" behavior.on.malformed.documents: "warn" From 4cd0544ddfe6dffb852b918e6c17f68218d76838 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 15:41:19 +0300 Subject: [PATCH 27/96] chore: lint --- hooks/gen_docs/gen_docs_components.py | 4 +- hooks/gen_docs/gen_docs_env_vars.py | 26 +- .../component_handlers/helm_wrapper/model.py | 2 +- .../component_handlers/kafka_connect/model.py | 2 +- kpops/component_handlers/topic/model.py | 2 +- kpops/components/base_components/kafka_app.py | 2 +- .../base_components/kafka_connector.py | 2 +- .../base_components/kubernetes_app.py | 8 +- .../base_components/models/from_section.py | 2 +- .../base_components/models/to_section.py | 2 +- .../base_components/pipeline_component.py | 2 +- .../streams_bootstrap/producer/model.py | 4 +- .../streams_bootstrap/streams/model.py | 1 - kpops/pipeline_generator/pipeline.py | 1 - poetry.lock | 226 +++++++++--------- 15 files changed, 141 insertions(+), 145 deletions(-) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 6c0c28693..92d640828 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -6,6 +6,7 @@ from typing import NamedTuple, cast import yaml +from pydantic import FieldInfo from hooks import PATH_ROOT from kpops.cli.registry import _find_classes @@ -40,11 +41,12 @@ ).type for component in KPOPS_COMPONENTS } + KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name for field_name, model in component.__fields__.items() - if not model.field_info.exclude + if not model.exclude ] for component in KPOPS_COMPONENTS } diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 436ba19de..5bd218643 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -250,24 +250,20 @@ def __fill_csv_pipeline_config(target: Path) -> None: """ # NOTE: This does not see nested fields, hence if there are env vars in a class like # TopicConfig(), they wil not be listed. Possible fix with recursion. - config_fields = PipelineConfig.__fields__ - for config_field in config_fields.values(): - config_field_info = PipelineConfig.Config.get_field_info(config_field.name) + config_fields = PipelineConfig.model_fields + for field_name, field_value in config_fields.items(): config_field_description: str = ( - config_field.field_info.description + field_value.description or "No description available, please refer to the pipeline config documentation." ) - config_field_default = None or config_field.field_info.default - if config_env_var := config_field_info.get( - "env", - ) or config_field.field_info.extra.get("env"): - csv_append_env_var( - target, - config_env_var, - config_field_default, - config_field_description, - config_field.name, - ) + config_field_default = field_value.default + csv_append_env_var( + target, + field_value.serialization_alias or field_name, + config_field_default, + config_field_description, + field_name, + ) def __fill_csv_cli(target: Path) -> None: diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 0d2836941..5ff258ddf 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -3,7 +3,7 @@ from typing import Iterator import yaml -from pydantic import BaseModel, ConfigDict, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.exception import ParseError diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 5e9bfcfd1..b6ea1300d 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any, Literal -from pydantic import BaseConfig, BaseModel, ConfigDict, Extra, Field, validator, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator from typing_extensions import override from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index 27ce813b7..a6c69cba7 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -1,6 +1,6 @@ from enum import Enum -from pydantic import BaseModel, ConfigDict, Extra +from pydantic import BaseModel, ConfigDict class TopicSpec(BaseModel): diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index c95b8817a..28c16c9ac 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -2,7 +2,7 @@ import logging -from pydantic import ConfigDict, Extra, Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index cb82c4c63..7149359f6 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any, NoReturn -from pydantic import Field, field_validator, FieldValidationInfo +from pydantic import Field, FieldValidationInfo, field_validator from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index bec6ec79d..4eca9b21d 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any -from pydantic import ConfigDict, Extra, Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -162,7 +162,9 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True) + return self.app.model_dump( + by_alias=True, exclude_none=True, exclude_defaults=True + ) def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component @@ -202,7 +204,7 @@ def model_dump(self, *, exclude=None, **kwargs) -> dict[str, Any]: exclude.add("helm") exclude.add("helm_diff") return super().model_dump(exclude=exclude, **kwargs) - + # @model_serializer(mode="wrap", when_used="always") # def serialize_model(self, handler) -> dict[str, Any]: # # breakpoint() diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index ceab067eb..a26870abb 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any, NewType -from pydantic import BaseModel, ConfigDict, Extra, Field, model_validator, root_validator +from pydantic import ConfigDict, Field, model_validator from kpops.components.base_components.models import TopicName from kpops.utils.docstring import describe_attr diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 785c1285b..901792c11 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -1,7 +1,7 @@ from enum import Enum from typing import Any -from pydantic import ConfigDict, Extra, Field, model_validator, root_validator +from pydantic import ConfigDict, Field, model_validator from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.utils.docstring import describe_attr diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index db059febe..ae163ee3b 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,6 +1,6 @@ from __future__ import annotations -from pydantic import AliasChoices, ConfigDict, Extra, Field +from pydantic import AliasChoices, ConfigDict, Field from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 7f4062e0a..1d39d8874 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -32,6 +32,4 @@ class ProducerValues(KafkaAppConfig): default=..., description=describe_attr("streams", __doc__) ) - model_config = ConfigDict( - extra="allow" - ) + model_config = ConfigDict(extra="allow") diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index dca2491a9..61a2a48b0 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -2,7 +2,6 @@ from pydantic import ConfigDict, Field, model_serializer from pydantic.alias_generators import to_snake -from typing_extensions import override from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index d995f7deb..06e0ed5fd 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -16,7 +16,6 @@ from kpops.cli.registry import Registry from kpops.component_handlers import ComponentHandlers from kpops.components.base_components.pipeline_component import PipelineComponent -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp from kpops.utils.dict_ops import generate_substitution, update_nested_pair from kpops.utils.environment import ENV from kpops.utils.yaml_loading import load_yaml_file, substitute, substitute_nested diff --git a/poetry.lock b/poetry.lock index f17e19206..75bae65c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -893,18 +893,18 @@ files = [ [[package]] name = "pydantic" -version = "2.3.0" +version = "2.4.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.3.0-py3-none-any.whl", hash = "sha256:45b5e446c6dfaad9444819a293b921a40e1db1aa61ea08aede0522529ce90e81"}, - {file = "pydantic-2.3.0.tar.gz", hash = "sha256:1607cc106602284cd4a00882986570472f193fde9cb1259bceeaedb26aa79a6d"}, + {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, + {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.6.3" +pydantic-core = "2.10.1" typing-extensions = ">=4.6.1" [package.extras] @@ -912,117 +912,117 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.6.3" +version = "2.10.1" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.6.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a0ddaa723c48af27d19f27f1c73bdc615c73686d763388c8683fe34ae777bad"}, - {file = "pydantic_core-2.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5cfde4fab34dd1e3a3f7f3db38182ab6c95e4ea91cf322242ee0be5c2f7e3d2f"}, - {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a7027bfc6b108e17c3383959485087d5942e87eb62bbac69829eae9bc1f7"}, - {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84e87c16f582f5c753b7f39a71bd6647255512191be2d2dbf49458c4ef024588"}, - {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:522a9c4a4d1924facce7270c84b5134c5cabcb01513213662a2e89cf28c1d309"}, - {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaafc776e5edc72b3cad1ccedb5fd869cc5c9a591f1213aa9eba31a781be9ac1"}, - {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a750a83b2728299ca12e003d73d1264ad0440f60f4fc9cee54acc489249b728"}, - {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e8b374ef41ad5c461efb7a140ce4730661aadf85958b5c6a3e9cf4e040ff4bb"}, - {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b594b64e8568cf09ee5c9501ede37066b9fc41d83d58f55b9952e32141256acd"}, - {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2a20c533cb80466c1d42a43a4521669ccad7cf2967830ac62c2c2f9cece63e7e"}, - {file = "pydantic_core-2.6.3-cp310-none-win32.whl", hash = "sha256:04fe5c0a43dec39aedba0ec9579001061d4653a9b53a1366b113aca4a3c05ca7"}, - {file = "pydantic_core-2.6.3-cp310-none-win_amd64.whl", hash = "sha256:6bf7d610ac8f0065a286002a23bcce241ea8248c71988bda538edcc90e0c39ad"}, - {file = "pydantic_core-2.6.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bcc1ad776fffe25ea5c187a028991c031a00ff92d012ca1cc4714087e575973"}, - {file = "pydantic_core-2.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:df14f6332834444b4a37685810216cc8fe1fe91f447332cd56294c984ecbff1c"}, - {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b7486d85293f7f0bbc39b34e1d8aa26210b450bbd3d245ec3d732864009819"}, - {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a892b5b1871b301ce20d40b037ffbe33d1407a39639c2b05356acfef5536d26a"}, - {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:883daa467865e5766931e07eb20f3e8152324f0adf52658f4d302242c12e2c32"}, - {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4eb77df2964b64ba190eee00b2312a1fd7a862af8918ec70fc2d6308f76ac64"}, - {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce8c84051fa292a5dc54018a40e2a1926fd17980a9422c973e3ebea017aa8da"}, - {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22134a4453bd59b7d1e895c455fe277af9d9d9fbbcb9dc3f4a97b8693e7e2c9b"}, - {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:02e1c385095efbd997311d85c6021d32369675c09bcbfff3b69d84e59dc103f6"}, - {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d79f1f2f7ebdb9b741296b69049ff44aedd95976bfee38eb4848820628a99b50"}, - {file = "pydantic_core-2.6.3-cp311-none-win32.whl", hash = "sha256:430ddd965ffd068dd70ef4e4d74f2c489c3a313adc28e829dd7262cc0d2dd1e8"}, - {file = "pydantic_core-2.6.3-cp311-none-win_amd64.whl", hash = "sha256:84f8bb34fe76c68c9d96b77c60cef093f5e660ef8e43a6cbfcd991017d375950"}, - {file = "pydantic_core-2.6.3-cp311-none-win_arm64.whl", hash = "sha256:5a2a3c9ef904dcdadb550eedf3291ec3f229431b0084666e2c2aa8ff99a103a2"}, - {file = "pydantic_core-2.6.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8421cf496e746cf8d6b677502ed9a0d1e4e956586cd8b221e1312e0841c002d5"}, - {file = "pydantic_core-2.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bb128c30cf1df0ab78166ded1ecf876620fb9aac84d2413e8ea1594b588c735d"}, - {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a822f630712817b6ecc09ccc378192ef5ff12e2c9bae97eb5968a6cdf3b862"}, - {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240a015102a0c0cc8114f1cba6444499a8a4d0333e178bc504a5c2196defd456"}, - {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f90e5e3afb11268628c89f378f7a1ea3f2fe502a28af4192e30a6cdea1e7d5e"}, - {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:340e96c08de1069f3d022a85c2a8c63529fd88709468373b418f4cf2c949fb0e"}, - {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1480fa4682e8202b560dcdc9eeec1005f62a15742b813c88cdc01d44e85308e5"}, - {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f14546403c2a1d11a130b537dda28f07eb6c1805a43dae4617448074fd49c282"}, - {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a87c54e72aa2ef30189dc74427421e074ab4561cf2bf314589f6af5b37f45e6d"}, - {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f93255b3e4d64785554e544c1c76cd32f4a354fa79e2eeca5d16ac2e7fdd57aa"}, - {file = "pydantic_core-2.6.3-cp312-none-win32.whl", hash = "sha256:f70dc00a91311a1aea124e5f64569ea44c011b58433981313202c46bccbec0e1"}, - {file = "pydantic_core-2.6.3-cp312-none-win_amd64.whl", hash = "sha256:23470a23614c701b37252618e7851e595060a96a23016f9a084f3f92f5ed5881"}, - {file = "pydantic_core-2.6.3-cp312-none-win_arm64.whl", hash = "sha256:1ac1750df1b4339b543531ce793b8fd5c16660a95d13aecaab26b44ce11775e9"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a53e3195f134bde03620d87a7e2b2f2046e0e5a8195e66d0f244d6d5b2f6d31b"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:f2969e8f72c6236c51f91fbb79c33821d12a811e2a94b7aa59c65f8dbdfad34a"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:672174480a85386dd2e681cadd7d951471ad0bb028ed744c895f11f9d51b9ebe"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:002d0ea50e17ed982c2d65b480bd975fc41086a5a2f9c924ef8fc54419d1dea3"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ccc13afee44b9006a73d2046068d4df96dc5b333bf3509d9a06d1b42db6d8bf"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:439a0de139556745ae53f9cc9668c6c2053444af940d3ef3ecad95b079bc9987"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63b7545d489422d417a0cae6f9898618669608750fc5e62156957e609e728a5"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b44c42edc07a50a081672e25dfe6022554b47f91e793066a7b601ca290f71e42"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c721bfc575d57305dd922e6a40a8fe3f762905851d694245807a351ad255c58"}, - {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e4a2cf8c4543f37f5dc881de6c190de08096c53986381daebb56a355be5dfe6"}, - {file = "pydantic_core-2.6.3-cp37-none-win32.whl", hash = "sha256:d9b4916b21931b08096efed090327f8fe78e09ae8f5ad44e07f5c72a7eedb51b"}, - {file = "pydantic_core-2.6.3-cp37-none-win_amd64.whl", hash = "sha256:a8acc9dedd304da161eb071cc7ff1326aa5b66aadec9622b2574ad3ffe225525"}, - {file = "pydantic_core-2.6.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5e9c068f36b9f396399d43bfb6defd4cc99c36215f6ff33ac8b9c14ba15bdf6b"}, - {file = "pydantic_core-2.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e61eae9b31799c32c5f9b7be906be3380e699e74b2db26c227c50a5fc7988698"}, - {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85463560c67fc65cd86153a4975d0b720b6d7725cf7ee0b2d291288433fc21b"}, - {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9616567800bdc83ce136e5847d41008a1d602213d024207b0ff6cab6753fe645"}, - {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e9b65a55bbabda7fccd3500192a79f6e474d8d36e78d1685496aad5f9dbd92c"}, - {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f468d520f47807d1eb5d27648393519655eadc578d5dd862d06873cce04c4d1b"}, - {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9680dd23055dd874173a3a63a44e7f5a13885a4cfd7e84814be71be24fba83db"}, - {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a718d56c4d55efcfc63f680f207c9f19c8376e5a8a67773535e6f7e80e93170"}, - {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8ecbac050856eb6c3046dea655b39216597e373aa8e50e134c0e202f9c47efec"}, - {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:788be9844a6e5c4612b74512a76b2153f1877cd845410d756841f6c3420230eb"}, - {file = "pydantic_core-2.6.3-cp38-none-win32.whl", hash = "sha256:07a1aec07333bf5adebd8264047d3dc518563d92aca6f2f5b36f505132399efc"}, - {file = "pydantic_core-2.6.3-cp38-none-win_amd64.whl", hash = "sha256:621afe25cc2b3c4ba05fff53525156d5100eb35c6e5a7cf31d66cc9e1963e378"}, - {file = "pydantic_core-2.6.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:813aab5bfb19c98ae370952b6f7190f1e28e565909bfc219a0909db168783465"}, - {file = "pydantic_core-2.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:50555ba3cb58f9861b7a48c493636b996a617db1a72c18da4d7f16d7b1b9952b"}, - {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e20f8baedd7d987bd3f8005c146e6bcbda7cdeefc36fad50c66adb2dd2da48"}, - {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0a5d7edb76c1c57b95df719af703e796fc8e796447a1da939f97bfa8a918d60"}, - {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f06e21ad0b504658a3a9edd3d8530e8cea5723f6ea5d280e8db8efc625b47e49"}, - {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea053cefa008fda40f92aab937fb9f183cf8752e41dbc7bc68917884454c6362"}, - {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:171a4718860790f66d6c2eda1d95dd1edf64f864d2e9f9115840840cf5b5713f"}, - {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ed7ceca6aba5331ece96c0e328cd52f0dcf942b8895a1ed2642de50800b79d3"}, - {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:acafc4368b289a9f291e204d2c4c75908557d4f36bd3ae937914d4529bf62a76"}, - {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1aa712ba150d5105814e53cb141412217146fedc22621e9acff9236d77d2a5ef"}, - {file = "pydantic_core-2.6.3-cp39-none-win32.whl", hash = "sha256:44b4f937b992394a2e81a5c5ce716f3dcc1237281e81b80c748b2da6dd5cf29a"}, - {file = "pydantic_core-2.6.3-cp39-none-win_amd64.whl", hash = "sha256:9b33bf9658cb29ac1a517c11e865112316d09687d767d7a0e4a63d5c640d1b17"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d7050899026e708fb185e174c63ebc2c4ee7a0c17b0a96ebc50e1f76a231c057"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99faba727727b2e59129c59542284efebbddade4f0ae6a29c8b8d3e1f437beb7"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fa159b902d22b283b680ef52b532b29554ea2a7fc39bf354064751369e9dbd7"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:046af9cfb5384f3684eeb3f58a48698ddab8dd870b4b3f67f825353a14441418"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:930bfe73e665ebce3f0da2c6d64455098aaa67e1a00323c74dc752627879fc67"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85cc4d105747d2aa3c5cf3e37dac50141bff779545ba59a095f4a96b0a460e70"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b25afe9d5c4f60dcbbe2b277a79be114e2e65a16598db8abee2a2dcde24f162b"}, - {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e49ce7dc9f925e1fb010fc3d555250139df61fa6e5a0a95ce356329602c11ea9"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2dd50d6a1aef0426a1d0199190c6c43ec89812b1f409e7fe44cb0fbf6dfa733c"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6595b0d8c8711e8e1dc389d52648b923b809f68ac1c6f0baa525c6440aa0daa"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ef724a059396751aef71e847178d66ad7fc3fc969a1a40c29f5aac1aa5f8784"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3c8945a105f1589ce8a693753b908815e0748f6279959a4530f6742e1994dcb6"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c8c6660089a25d45333cb9db56bb9e347241a6d7509838dbbd1931d0e19dbc7f"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:692b4ff5c4e828a38716cfa92667661a39886e71136c97b7dac26edef18767f7"}, - {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f1a5d8f18877474c80b7711d870db0eeef9442691fcdb00adabfc97e183ee0b0"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3796a6152c545339d3b1652183e786df648ecdf7c4f9347e1d30e6750907f5bb"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b962700962f6e7a6bd77e5f37320cabac24b4c0f76afeac05e9f93cf0c620014"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ea80269077003eaa59723bac1d8bacd2cd15ae30456f2890811efc1e3d4413"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c0ebbebae71ed1e385f7dfd9b74c1cff09fed24a6df43d326dd7f12339ec34"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:252851b38bad3bfda47b104ffd077d4f9604a10cb06fe09d020016a25107bf98"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6656a0ae383d8cd7cc94e91de4e526407b3726049ce8d7939049cbfa426518c8"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9140ded382a5b04a1c030b593ed9bf3088243a0a8b7fa9f071a5736498c5483"}, - {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d38bbcef58220f9c81e42c255ef0bf99735d8f11edef69ab0b499da77105158a"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c9d469204abcca28926cbc28ce98f28e50e488767b084fb3fbdf21af11d3de26"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48c1ed8b02ffea4d5c9c220eda27af02b8149fe58526359b3c07eb391cb353a2"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2b1bfed698fa410ab81982f681f5b1996d3d994ae8073286515ac4d165c2e7"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf9d42a71a4d7a7c1f14f629e5c30eac451a6fc81827d2beefd57d014c006c4a"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4292ca56751aebbe63a84bbfc3b5717abb09b14d4b4442cc43fd7c49a1529efd"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7dc2ce039c7290b4ef64334ec7e6ca6494de6eecc81e21cb4f73b9b39991408c"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:615a31b1629e12445c0e9fc8339b41aaa6cc60bd53bf802d5fe3d2c0cda2ae8d"}, - {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1fa1f6312fb84e8c281f32b39affe81984ccd484da6e9d65b3d18c202c666149"}, - {file = "pydantic_core-2.6.3.tar.gz", hash = "sha256:1508f37ba9e3ddc0189e6ff4e2228bd2d3c3a4641cbe8c07177162f76ed696c7"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, + {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, + {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, + {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, + {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, + {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, + {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, + {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, + {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, + {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, + {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, + {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, + {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, + {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, + {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, + {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, ] [package.dependencies] @@ -1919,4 +1919,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "6637234a3c7d3bbcb0f7dcfc4b8ac27bc8b17498a7d0dd12419b5b8e02e62158" +content-hash = "6bacec8095fc2f4e9d2beea0b278b07977727783e97f044bed449317ad5cbd97" From 9b7840da11ae688e3015a2135de98532de32d0e7 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 15:41:33 +0300 Subject: [PATCH 28/96] chore: upgrade pydantic --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5fb13367b..0cc035402 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ kpops = "kpops.cli.main:app" [tool.poetry.dependencies] python = "^3.10" -pydantic = { extras = ["dotenv"], version = "^2.3.0" } +pydantic = { extras = ["dotenv"], version = "^2.4.2" } pydantic-settings = "^2.0.3" rich = "^12.4.4" PyYAML = "^6.0" From e039fd84329edb1a6a5a5b9ae45439a79c1fb4c0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 15:42:03 +0300 Subject: [PATCH 29/96] fix: env vars doc gen --- .../resources/variables/config_env_vars.env | 48 +++++++++++++++++++ .../resources/variables/config_env_vars.md | 24 ++++++---- .../variables/temp_config_env_vars.csv | 1 - 3 files changed, 63 insertions(+), 10 deletions(-) delete mode 100644 docs/docs/resources/variables/temp_config_env_vars.csv diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 143ec8ba4..6315658c9 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -4,3 +4,51 @@ # alternative to the settings in `config.yaml`. Variables marked as # required can instead be set in the pipeline config. # +# defaults_path +# The path to the folder containing the defaults.yaml file and the +# environment defaults files. Paths can either be absolute or relative +# to `config.yaml` +defaults_path=. +# environment +# The environment you want to generate and deploy the pipeline to. +# Suffix your environment files with this value (e.g. +# defaults_development.yaml for environment=development). +environment=PydanticUndefined +# brokers +# The comma separated Kafka brokers address. +brokers=PydanticUndefined +# defaults_filename_prefix +# The name of the defaults file and the prefix of the defaults +# environment file. +defaults_filename_prefix=defaults +# topic_name_config +# Configure the topic name variables you can use in the pipeline +# definition. +topic_name_config=default_output_topic_name='${pipeline_name}-${component_name}' default_error_topic_name='${pipeline_name}-${component_name}-error' +# schema_registry_url +# Address of the Schema Registry. +schema_registry_url # No default value, not required +# kafka_rest_host +# Address of the Kafka REST Proxy. +kafka_rest_host # No default value, not required +# kafka_connect_host +# Address of Kafka Connect. +kafka_connect_host # No default value, not required +# timeout +# The timeout in seconds that specifies when actions like deletion or +# deploy timeout. +timeout=300 +# create_namespace +# Flag for `helm upgrade --install`. Create the release namespace if +# not present. +create_namespace=False +# helm_config +# Global flags for Helm. +helm_config=context=None debug=False api_version=None +# helm_diff_config +# Configure Helm Diff. +helm_diff_config=ignore=set() +# retain_clean_jobs +# Whether to retain clean up jobs in the cluster or uninstall the, +# after completion. +retain_clean_jobs=False diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 889985257..9f3b89926 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,12 +1,18 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name |Default Value|Required| Description | Setting name | -|-------------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |brokers | -|KPOPS_SCHEMA_REGISTRY_URL| |False |Address of the Schema Registry. |schema_registry_url| -|KPOPS_REST_PROXY_HOST | |False |Address of the Kafka REST Proxy. |kafka_rest_host | -|KPOPS_CONNECT_HOST | |False |Address of Kafka Connect. |kafka_connect_host | -|KPOPS_TIMEOUT | 300|False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|------------------------|----------------------------------------------------------------------------------------------------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------| +|defaults_path |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | +|environment |PydanticUndefined |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|brokers |PydanticUndefined |False |The comma separated Kafka brokers address. |brokers | +|defaults_filename_prefix|defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix| +|topic_name_config |default_output_topic_name='${pipeline_name}-${component_name}' default_error_topic_name='${pipeline_name}-${component_name}-error'|False |Configure the topic name variables you can use in the pipeline definition. |topic_name_config | +|schema_registry_url | |False |Address of the Schema Registry. |schema_registry_url | +|kafka_rest_host | |False |Address of the Kafka REST Proxy. |kafka_rest_host | +|kafka_connect_host | |False |Address of Kafka Connect. |kafka_connect_host | +|timeout |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|create_namespace |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | +|helm_config |context=None debug=False api_version=None |False |Global flags for Helm. |helm_config | +|helm_diff_config |ignore=set() |False |Configure Helm Diff. |helm_diff_config | +|retain_clean_jobs |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/resources/variables/temp_config_env_vars.csv b/docs/docs/resources/variables/temp_config_env_vars.csv deleted file mode 100644 index b40ee402d..000000000 --- a/docs/docs/resources/variables/temp_config_env_vars.csv +++ /dev/null @@ -1 +0,0 @@ -Name,Default Value,Required,Description,Setting name From 07aa1167d294c70ae11a2b1ac258627fbadb0d47 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 15:45:16 +0300 Subject: [PATCH 30/96] chore: remove unneeded import --- hooks/gen_docs/gen_docs_components.py | 1 - 1 file changed, 1 deletion(-) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 92d640828..6b4f0efe1 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -6,7 +6,6 @@ from typing import NamedTuple, cast import yaml -from pydantic import FieldInfo from hooks import PATH_ROOT from kpops.cli.registry import _find_classes From cd2fba38606494c8b31a8ad28339b482f6c40d87 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 4 Oct 2023 17:18:39 +0300 Subject: [PATCH 31/96] fix: generation scripts WIP --- docs/docs/schema/config.json | 347 ++++---- docs/docs/schema/pipeline.json | 798 ++++++++++-------- hooks/gen_docs/gen_docs_components.py | 2 +- hooks/gen_schema.py | 2 +- .../base_components/kafka_connector.py | 4 +- kpops/utils/gen_schema.py | 33 +- 6 files changed, 634 insertions(+), 552 deletions(-) diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index b77b4e850..b2e37e0ec 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -1,19 +1,36 @@ { - "$ref": "#/definitions/PipelineConfig", - "definitions": { + "$defs": { "HelmConfig": { "description": "Global Helm configuration", "properties": { "api_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Kubernetes API version used for Capabilities.APIVersions", - "title": "API version", - "type": "string" + "title": "API version" }, "context": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Name of kubeconfig context (`--kube-context`)", - "example": "dev-storage", - "title": "Context", - "type": "string" + "examples": [ + "dev-storage" + ], + "title": "Context" }, "debug": { "default": false, @@ -29,7 +46,9 @@ "properties": { "ignore": { "description": "Set of keys that should not be checked.", - "example": "- name\n- imageTag", + "examples": [ + "- name\n- imageTag" + ], "items": { "type": "string" }, @@ -41,165 +60,6 @@ "title": "HelmDiffConfig", "type": "object" }, - "PipelineConfig": { - "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", - "properties": { - "brokers": { - "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_KAFKA_BROKERS", - "env_names": [ - "kpops_kafka_brokers" - ], - "example": "broker1:9092,broker2:9092,broker3:9092", - "title": "Brokers", - "type": "string" - }, - "create_namespace": { - "default": false, - "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", - "env_names": [ - "create_namespace" - ], - "title": "Create Namespace", - "type": "boolean" - }, - "defaults_filename_prefix": { - "default": "defaults", - "description": "The name of the defaults file and the prefix of the defaults environment file.", - "env_names": [ - "defaults_filename_prefix" - ], - "title": "Defaults Filename Prefix", - "type": "string" - }, - "defaults_path": { - "default": ".", - "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", - "env_names": [ - "defaults_path" - ], - "example": "defaults", - "format": "path", - "title": "Defaults Path", - "type": "string" - }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "env": "KPOPS_ENVIRONMENT", - "env_names": [ - "kpops_environment" - ], - "example": "development", - "title": "Environment", - "type": "string" - }, - "helm_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmConfig" - } - ], - "default": { - "api_version": null, - "context": null, - "debug": false - }, - "description": "Global flags for Helm.", - "env_names": [ - "helm_config" - ], - "title": "Helm Config" - }, - "helm_diff_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmDiffConfig" - } - ], - "default": { - "ignore": [] - }, - "description": "Configure Helm Diff.", - "env_names": [ - "helm_diff_config" - ], - "title": "Helm Diff Config" - }, - "kafka_connect_host": { - "description": "Address of Kafka Connect.", - "env": "KPOPS_CONNECT_HOST", - "env_names": [ - "kpops_connect_host" - ], - "example": "http://localhost:8083", - "title": "Kafka Connect Host", - "type": "string" - }, - "kafka_rest_host": { - "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_REST_PROXY_HOST", - "env_names": [ - "kpops_rest_proxy_host" - ], - "example": "http://localhost:8082", - "title": "Kafka Rest Host", - "type": "string" - }, - "retain_clean_jobs": { - "default": false, - "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", - "env": "KPOPS_RETAIN_CLEAN_JOBS", - "env_names": [ - "kpops_retain_clean_jobs" - ], - "title": "Retain Clean Jobs", - "type": "boolean" - }, - "schema_registry_url": { - "description": "Address of the Schema Registry.", - "env": "KPOPS_SCHEMA_REGISTRY_URL", - "env_names": [ - "kpops_schema_registry_url" - ], - "example": "http://localhost:8081", - "title": "Schema Registry Url", - "type": "string" - }, - "timeout": { - "default": 300, - "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", - "env": "KPOPS_TIMEOUT", - "env_names": [ - "kpops_timeout" - ], - "title": "Timeout", - "type": "integer" - }, - "topic_name_config": { - "allOf": [ - { - "$ref": "#/definitions/TopicNameConfig" - } - ], - "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" - }, - "description": "Configure the topic name variables you can use in the pipeline definition.", - "env_names": [ - "topic_name_config" - ], - "title": "Topic Name Config" - } - }, - "required": [ - "environment", - "brokers" - ], - "title": "PipelineConfig", - "type": "object" - }, "TopicNameConfig": { "additionalProperties": false, "description": "Configures topic names.", @@ -207,18 +67,12 @@ "default_error_topic_name": { "default": "${pipeline_name}-${component_name}-error", "description": "Configures the value for the variable ${error_topic_name}", - "env_names": [ - "default_error_topic_name" - ], "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { "default": "${pipeline_name}-${component_name}", "description": "Configures the value for the variable ${output_topic_name}", - "env_names": [ - "default_output_topic_name" - ], "title": "Default Output Topic Name", "type": "string" } @@ -227,5 +81,150 @@ "type": "object" } }, - "title": "KPOps config schema" + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { + "KPOPS_connect_host": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of Kafka Connect.", + "examples": [ + "http://localhost:8083" + ], + "title": "Kpops Connect Host" + }, + "KPOPS_rest_proxy_host": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of the Kafka REST Proxy.", + "examples": [ + "http://localhost:8082" + ], + "title": "Kpops Rest Proxy Host" + }, + "brokers": { + "description": "The comma separated Kafka brokers address.", + "examples": [ + "broker1:9092,broker2:9092,broker3:9092" + ], + "title": "Brokers", + "type": "string" + }, + "create_namespace": { + "default": false, + "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", + "title": "Create Namespace", + "type": "boolean" + }, + "defaults_filename_prefix": { + "default": "defaults", + "description": "The name of the defaults file and the prefix of the defaults environment file.", + "title": "Defaults Filename Prefix", + "type": "string" + }, + "defaults_path": { + "default": ".", + "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", + "examples": [ + "defaults", + "." + ], + "format": "path", + "title": "Defaults Path", + "type": "string" + }, + "environment": { + "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", + "examples": [ + "development", + "production" + ], + "title": "Environment", + "type": "string" + }, + "helm_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmConfig" + } + ], + "default": { + "api_version": null, + "context": null, + "debug": false + }, + "description": "Global flags for Helm." + }, + "helm_diff_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmDiffConfig" + } + ], + "default": { + "ignore": [] + }, + "description": "Configure Helm Diff." + }, + "retain_clean_jobs": { + "default": false, + "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", + "title": "Retain Clean Jobs", + "type": "boolean" + }, + "schema_registry_url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of the Schema Registry.", + "examples": [ + "http://localhost:8081" + ], + "title": "Schema Registry Url" + }, + "timeout": { + "default": 300, + "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", + "title": "Timeout", + "type": "integer" + }, + "topic_name_config": { + "allOf": [ + { + "$ref": "#/$defs/TopicNameConfig" + } + ], + "default": { + "default_error_topic_name": "${pipeline_name}-${component_name}-error", + "default_output_topic_name": "${pipeline_name}-${component_name}" + }, + "description": "Configure the topic name variables you can use in the pipeline definition." + } + }, + "required": [ + "environment", + "brokers" + ], + "title": "PipelineConfig", + "type": "object" } diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index cc0000f72..38470b5c8 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -1,12 +1,12 @@ { - "definitions": { + "$defs": { "FromSection": { "additionalProperties": false, "description": "Holds multiple input topics", "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -15,7 +15,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -31,16 +31,28 @@ "description": "Input topic", "properties": { "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/InputTopicTypes" + "$ref": "#/$defs/InputTopicTypes" + }, + { + "type": "null" } ], + "default": null, "description": "Topic type" } }, @@ -53,7 +65,7 @@ "repo_auth_flags": { "allOf": [ { - "$ref": "#/definitions/RepoAuthFlags" + "$ref": "#/$defs/RepoAuthFlags" } ], "default": { @@ -63,8 +75,7 @@ "password": null, "username": null }, - "description": "Authorisation-related flags", - "title": "Repo Auth Flags" + "description": "Authorisation-related flags" }, "repository_name": { "description": "Name of the Helm repository", @@ -94,23 +105,27 @@ "type": "string" }, "KafkaApp": { + "additionalProperties": true, "description": "Base component for Kafka-based components.\nProducer or streaming apps should inherit from this class.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaAppConfig" + "$ref": "#/$defs/KafkaAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -133,7 +148,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -147,32 +162,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-app", - "description": "Base component for Kafka-based components.\nProducer or streaming apps should inherit from this class.", - "enum": [ - "kafka-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -184,21 +199,29 @@ "type": "object" }, "KafkaAppConfig": { + "additionalProperties": true, "description": "Settings specific to Kafka Apps", "properties": { "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/KafkaStreamsConfig" + "$ref": "#/$defs/KafkaStreamsConfig" } ], - "description": "Kafka streams config", - "title": "Streams" + "description": "Kafka streams config" } }, "required": [ @@ -208,23 +231,27 @@ "type": "object" }, "KafkaConnector": { + "additionalProperties": true, "description": "Base class for all Kafka connectors\nShould only be used to set defaults", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -247,7 +274,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -261,8 +288,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -270,28 +296,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-connector", - "description": "Base class for all Kafka connectors\nShould only be used to set defaults", - "enum": [ - "kafka-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -303,7 +330,8 @@ "type": "object" }, "KafkaConnectorConfig": { - "additionalProperties": { + "additionalProperties": true, + "additional_properties": { "type": "string" }, "description": "Settings specific to Kafka Connectors", @@ -311,6 +339,19 @@ "connector.class": { "title": "Connector.Class", "type": "string" + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "hidden_from_schema": true, + "title": "Name" } }, "required": [ @@ -320,23 +361,27 @@ "type": "object" }, "KafkaSinkConnector": { + "additionalProperties": true, "description": "Kafka sink connector model", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -359,7 +404,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -373,8 +418,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -382,28 +426,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-sink-connector", - "description": "Kafka sink connector model", - "enum": [ - "kafka-sink-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -415,23 +460,27 @@ "type": "object" }, "KafkaSourceConnector": { + "additionalProperties": true, "description": "Kafka source connector model", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -446,9 +495,17 @@ "type": "string" }, "offset_topic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running", - "title": "Offset Topic", - "type": "string" + "title": "Offset Topic" }, "prefix": { "default": "${pipeline_name}-", @@ -459,7 +516,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -473,8 +530,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -482,28 +538,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-source-connector", - "description": "Kafka source connector model", - "enum": [ - "kafka-source-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -515,6 +572,7 @@ "type": "object" }, "KafkaStreamsConfig": { + "additionalProperties": true, "description": "Kafka Streams config", "properties": { "brokers": { @@ -523,9 +581,17 @@ "type": "string" }, "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" + "title": "Schemaregistryurl" } }, "required": [ @@ -535,23 +601,27 @@ "type": "object" }, "KubernetesApp": { + "additionalProperties": true, "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KubernetesAppConfig" + "$ref": "#/$defs/KubernetesAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -574,34 +644,35 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kubernetes-app", - "description": "Base class for all Kubernetes apps.\nAll built-in components are Kubernetes apps, except for the Kafka connectors.", - "enum": [ - "kubernetes-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -614,6 +685,7 @@ "type": "object" }, "KubernetesAppConfig": { + "additionalProperties": true, "description": "Settings specific to Kubernetes Apps", "properties": {}, "title": "KubernetesAppConfig", @@ -629,14 +701,19 @@ "type": "string" }, "PipelineComponent": { + "additionalProperties": true, "description": "Base class for all components", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -652,22 +729,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "pipeline-component", - "description": "Base class for all components", - "enum": [ - "pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -677,18 +748,19 @@ "type": "object" }, "ProducerApp": { + "additionalProperties": true, "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/ProducerValues" + "$ref": "#/$defs/ProducerValues" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { + "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" @@ -712,7 +784,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -726,32 +798,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "producer-app", - "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", - "enum": [ - "producer-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -763,6 +835,7 @@ "type": "object" }, "ProducerStreamsConfig": { + "additionalProperties": true, "description": "Kafka Streams settings specific to Producer", "properties": { "brokers": { @@ -780,14 +853,30 @@ "type": "object" }, "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Output topic", - "title": "Outputtopic", - "type": "string" + "title": "Outputtopic" }, "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" + "title": "Schemaregistryurl" } }, "required": [ @@ -797,21 +886,29 @@ "type": "object" }, "ProducerValues": { + "additionalProperties": true, "description": "Settings specific to producers", "properties": { "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/ProducerStreamsConfig" + "$ref": "#/$defs/ProducerStreamsConfig" } ], - "description": "Kafka Streams settings", - "title": "Streams" + "description": "Kafka Streams settings" } }, "required": [ @@ -824,16 +921,32 @@ "description": "Authorisation-related flags for `helm repo`", "properties": { "ca_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers", - "format": "path", - "title": "Ca File", - "type": "string" + "title": "Ca File" }, "cert_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Path to SSL certificate file to identify HTTPS client", - "format": "path", - "title": "Cert File", - "type": "string" + "title": "Cert File" }, "insecure_skip_tls_verify": { "default": false, @@ -842,37 +955,57 @@ "type": "boolean" }, "password": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Password", - "title": "Password", - "type": "string" + "title": "Password" }, "username": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Username", - "title": "Username", - "type": "string" + "title": "Username" } }, "title": "RepoAuthFlags", "type": "object" }, "StreamsApp": { + "additionalProperties": true, "description": "StreamsApp component that configures a streams bootstrap app", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/StreamsAppConfig" + "$ref": "#/$defs/StreamsAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -895,7 +1028,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -909,32 +1042,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "streams-app", - "description": "StreamsApp component that configures a streams bootstrap app", - "enum": [ - "streams-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -946,6 +1079,7 @@ "type": "object" }, "StreamsAppAutoScaling": { + "additionalProperties": true, "description": "Kubernetes Event-driven Autoscaling config", "properties": { "consumerGroup": { @@ -961,13 +1095,22 @@ }, "enabled": { "default": false, + "description": "", "title": "Enabled", "type": "boolean" }, "idleReplicas": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", - "title": "Idle replica count", - "type": "integer" + "title": "Idle replica count" }, "lagThreshold": { "description": "Average target value to trigger scaling actions.", @@ -1016,30 +1159,41 @@ "type": "object" }, "StreamsAppConfig": { + "additionalProperties": true, "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/StreamsAppAutoScaling" + }, { - "$ref": "#/definitions/StreamsAppAutoScaling" + "type": "null" } ], - "description": "Kubernetes Event-driven Autoscaling config", - "title": "Autoscaling" + "default": null, + "description": "Kubernetes Event-driven Autoscaling config" }, "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/StreamsConfig" + "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section", - "title": "Streams" + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -1049,89 +1203,12 @@ "type": "object" }, "StreamsConfig": { - "description": "Streams Bootstrap streams section", - "properties": { - "brokers": { - "description": "Brokers", - "title": "Brokers", - "type": "string" - }, - "config": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Configuration", - "title": "Config", - "type": "object" - }, - "errorTopic": { - "description": "Error topic", - "title": "Errortopic", - "type": "string" - }, - "extraInputPatterns": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Extra input patterns", - "title": "Extrainputpatterns", - "type": "object" - }, - "extraInputTopics": { - "additionalProperties": { - "items": { - "type": "string" - }, - "type": "array" - }, - "default": {}, - "description": "Extra input topics", - "title": "Extrainputtopics", - "type": "object" - }, - "extraOutputTopics": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Extra output topics", - "title": "Extraoutputtopics", - "type": "object" - }, - "inputPattern": { - "description": "Input pattern", - "title": "Inputpattern", - "type": "string" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, - "outputTopic": { - "description": "Output topic", - "title": "Outputtopic", - "type": "string" - }, - "schemaRegistryUrl": { - "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" - } - }, - "required": [ - "brokers" - ], + "description": "Streams Bootstrap streams section\n\n:param input_topics: Input topics, defaults to []\n:param input_pattern: Input pattern, defaults to None\n:param extra_input_topics: Extra input topics, defaults to {}\n:param extra_input_patterns: Extra input patterns, defaults to {}\n:param extra_output_topics: Extra output topics, defaults to {}\n:param output_topic: Output topic, defaults to None\n:param error_topic: Error topic, defaults to None\n:param config: Configuration, defaults to {}", "title": "StreamsConfig", "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics", "properties": { "models": { @@ -1145,7 +1222,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -1177,85 +1254,86 @@ "type": "object" }, "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Key schema class name", - "title": "Key schema", - "type": "string" + "title": "Key schema" }, "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Number of partitions into which the topic is divided", - "title": "Partitions count", - "type": "integer" + "title": "Partitions count" }, "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Replication factor of the topic", - "title": "Replication factor", - "type": "integer" + "title": "Replication factor" }, "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/OutputTopicTypes" + "$ref": "#/$defs/OutputTopicTypes" + }, + { + "type": "null" } ], + "default": null, "description": "Topic type", "title": "Topic type" }, "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Value schema class name", - "title": "Value schema", - "type": "string" + "title": "Value schema" } }, "title": "TopicConfig", "type": "object" } - }, - "items": { - "discriminator": { - "mapping": { - "kafka-app": "#/definitions/KafkaApp", - "kafka-connector": "#/definitions/KafkaConnector", - "kafka-sink-connector": "#/definitions/KafkaSinkConnector", - "kafka-source-connector": "#/definitions/KafkaSourceConnector", - "kubernetes-app": "#/definitions/KubernetesApp", - "pipeline-component": "#/definitions/PipelineComponent", - "producer-app": "#/definitions/ProducerApp", - "streams-app": "#/definitions/StreamsApp" - }, - "propertyName": "type" - }, - "oneOf": [ - { - "$ref": "#/definitions/KafkaApp" - }, - { - "$ref": "#/definitions/KafkaConnector" - }, - { - "$ref": "#/definitions/KafkaSinkConnector" - }, - { - "$ref": "#/definitions/KafkaSourceConnector" - }, - { - "$ref": "#/definitions/KubernetesApp" - }, - { - "$ref": "#/definitions/PipelineComponent" - }, - { - "$ref": "#/definitions/ProducerApp" - }, - { - "$ref": "#/definitions/StreamsApp" - } - ] - }, - "title": "KPOps pipeline schema", - "type": "array" + } } diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 6b4f0efe1..30a35d8d6 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -44,7 +44,7 @@ KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name - for field_name, model in component.__fields__.items() + for field_name, model in component.__fields__.items() # pyright: ignore[reportGeneralTypeIssues] if not model.exclude ] for component in KPOPS_COMPONENTS diff --git a/hooks/gen_schema.py b/hooks/gen_schema.py index 8fc24f938..e72f2bcf5 100644 --- a/hooks/gen_schema.py +++ b/hooks/gen_schema.py @@ -27,4 +27,4 @@ def gen_schema(scope: SchemaScope): if __name__ == "__main__": gen_schema(SchemaScope.PIPELINE) - gen_schema(SchemaScope.CONFIG) + # gen_schema(SchemaScope.CONFIG) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 7149359f6..56a6d065d 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any, NoReturn -from pydantic import Field, FieldValidationInfo, field_validator +from pydantic import Field, ValidationInfo, field_validator from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -76,7 +76,7 @@ class KafkaConnector(PipelineComponent, ABC): def connector_config_should_have_component_name( cls, app: KafkaConnectorConfig | dict[str, str], - info: FieldValidationInfo, + info: ValidationInfo, ) -> Any: if isinstance(app, KafkaConnectorConfig): app = app.model_dump() diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 94755e08f..64dbf6223 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,9 +1,11 @@ +import json import logging from enum import Enum from typing import Annotated, Any, Literal, Sequence, Union -from pydantic import Field, schema_json_of +from pydantic import Field, GenerateSchema, TypeAdapter, schema_json_of from pydantic.fields import FieldInfo +from pydantic.json_schema import JsonSchemaMode, model_json_schema, models_json_schema from pydantic.v1 import schema from pydantic.v1.schema import SkipField @@ -53,8 +55,9 @@ def _is_valid_component( def _add_components( - components_module: str, components: tuple[type[PipelineComponent]] | None = None -) -> tuple[type[PipelineComponent]]: + components_module: str, + components: tuple[type[PipelineComponent], ...] | None = None, +) -> tuple[type[PipelineComponent], ...]: """Add components to a components tuple If an empty tuple is provided or it is not provided at all, the components @@ -92,14 +95,14 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled - components: tuple[type[PipelineComponent]] = tuple() + components: tuple[type[PipelineComponent], ...] = tuple() if include_stock_components: components = tuple(_find_classes("kpops.components", PipelineComponent)) # Add custom components if provided if components_module: components = _add_components(components_module, components) # Create a type union that will hold the union of all component types - PipelineComponents = Union[components] # type: ignore[valid-type] + PipelineComponents: Union[type[PipelineComponent], ...] = Union[components_moded] # type: ignore[valid-type] # re-assign component type as Literal to work as discriminator for component in components: @@ -113,24 +116,26 @@ def gen_pipeline_schema( # model_config=BaseConfig, # class_validators=None, ) + components_moded = tuple([(component, "serialization") for component in components]) AnnotatedPipelineComponents = Annotated[ PipelineComponents, Field(discriminator="type") ] - schema = schema_json_of( + schema = model_json_schema( Sequence[AnnotatedPipelineComponents], - title="KPOps pipeline schema", + # title="KPOps pipeline schema", by_alias=True, - indent=4, - sort_keys=True, ) - print(schema) + # schema = models_json_schema( + # components_moded, + # # title="KPOps pipeline schema", + # by_alias=True, + # ) + print(json.dumps(schema[1], indent=4, sort_keys=True)) def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" - schema = schema_json_of( - PipelineConfig, title="KPOps config schema", indent=4, sort_keys=True - ) - print(schema) + schema = model_json_schema(PipelineConfig) + print(json.dumps(schema, indent=4, sort_keys=True)) From 325a2fe63de8a79e9f3c4b9bad89e4144acb5884 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 6 Oct 2023 16:09:15 +0300 Subject: [PATCH 32/96] fix: schema WIP --- kpops/utils/gen_schema.py | 33 ++++++++++++------- .../schema_handler/test_schema_handler.py | 2 +- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 64dbf6223..06aa88786 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -101,8 +101,10 @@ def gen_pipeline_schema( # Add custom components if provided if components_module: components = _add_components(components_module, components) + if not components: + raise RuntimeError("No valid components found.") # Create a type union that will hold the union of all component types - PipelineComponents: Union[type[PipelineComponent], ...] = Union[components_moded] # type: ignore[valid-type] + PipelineComponents = Union[components] # type: ignore[valid-type] # re-assign component type as Literal to work as discriminator for component in components: @@ -116,26 +118,33 @@ def gen_pipeline_schema( # model_config=BaseConfig, # class_validators=None, ) - components_moded = tuple([(component, "serialization") for component in components]) - + components_moded = tuple([(component, "validation") for component in components]) + # Create a type union that will hold the union of all component types AnnotatedPipelineComponents = Annotated[ PipelineComponents, Field(discriminator="type") ] - schema = model_json_schema( - Sequence[AnnotatedPipelineComponents], + schema = models_json_schema( + components_moded, # title="KPOps pipeline schema", by_alias=True, ) - # schema = models_json_schema( - # components_moded, - # # title="KPOps pipeline schema", - # by_alias=True, - # ) - print(json.dumps(schema[1], indent=4, sort_keys=True)) + print( + json.dumps( + schema, + indent=4, + sort_keys=True, + ) + ) def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" schema = model_json_schema(PipelineConfig) - print(json.dumps(schema, indent=4, sort_keys=True)) + print( + json.dumps( + schema, + indent=4, + sort_keys=True + ) + ) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index ccea021c6..ccf75fb61 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -76,7 +76,7 @@ def test_load_schema_handler(): schema_registry_url="http://localhost:8081", ) - config_disable = config_enable.copy() + config_disable = config_enable.model_copy() config_disable.schema_registry_url = None assert ( SchemaHandler.load_schema_handler(TEST_SCHEMA_PROVIDER_MODULE, config_disable) From d9839ad2cbdc938de0e97982540792007adc4097 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 16:17:45 +0300 Subject: [PATCH 33/96] fix: schema generation WIP --- docs/docs/schema/pipeline.json | 610 +++++++++++++++++- .../component_handlers/kafka_connect/model.py | 9 +- .../base_defaults_component.py | 16 +- kpops/utils/gen_schema.py | 92 ++- 4 files changed, 658 insertions(+), 69 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 38470b5c8..cee2ef659 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -59,6 +59,66 @@ "title": "FromTopic", "type": "object" }, + "HelmConfig": { + "description": "Global Helm configuration", + "properties": { + "api_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Kubernetes API version used for Capabilities.APIVersions", + "title": "API version" + }, + "context": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Name of kubeconfig context (`--kube-context`)", + "examples": [ + "dev-storage" + ], + "title": "Context" + }, + "debug": { + "default": false, + "description": "Run Helm in Debug mode", + "title": "Debug", + "type": "boolean" + } + }, + "title": "HelmConfig", + "type": "object" + }, + "HelmDiffConfig": { + "properties": { + "ignore": { + "description": "Set of keys that should not be checked.", + "examples": [ + "- name\n- imageTag" + ], + "items": { + "type": "string" + }, + "title": "Ignore", + "type": "array", + "uniqueItems": true + } + }, + "title": "HelmDiffConfig", + "type": "object" + }, "HelmRepoConfig": { "description": "Helm repository configuration", "properties": { @@ -116,6 +176,20 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -129,6 +203,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -176,6 +254,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -191,6 +275,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -242,6 +328,20 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -255,6 +355,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -307,6 +411,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -322,6 +432,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -341,16 +453,7 @@ "type": "string" }, "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], "default": null, - "hidden_from_schema": true, "title": "Name" } }, @@ -372,6 +475,20 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -385,6 +502,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -437,6 +558,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -452,6 +579,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -471,6 +600,20 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -484,6 +627,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -549,6 +696,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -564,6 +717,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -612,6 +767,20 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -625,6 +794,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -661,6 +834,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -676,6 +855,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app", @@ -704,6 +885,20 @@ "additionalProperties": true, "description": "Base class for all components", "properties": { + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -717,6 +912,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -739,14 +938,170 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" } }, "required": [ + "config", + "handlers", "name" ], "title": "PipelineComponent", "type": "object" }, + "PipelineConfig": { + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { + "KPOPS_connect_host": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of Kafka Connect.", + "examples": [ + "http://localhost:8083" + ], + "title": "Kpops Connect Host" + }, + "KPOPS_rest_proxy_host": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of the Kafka REST Proxy.", + "examples": [ + "http://localhost:8082" + ], + "title": "Kpops Rest Proxy Host" + }, + "brokers": { + "description": "The comma separated Kafka brokers address.", + "examples": [ + "broker1:9092,broker2:9092,broker3:9092" + ], + "title": "Brokers", + "type": "string" + }, + "create_namespace": { + "default": false, + "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", + "title": "Create Namespace", + "type": "boolean" + }, + "defaults_filename_prefix": { + "default": "defaults", + "description": "The name of the defaults file and the prefix of the defaults environment file.", + "title": "Defaults Filename Prefix", + "type": "string" + }, + "defaults_path": { + "default": ".", + "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", + "examples": [ + "defaults", + "." + ], + "format": "path", + "title": "Defaults Path", + "type": "string" + }, + "environment": { + "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", + "examples": [ + "development", + "production" + ], + "title": "Environment", + "type": "string" + }, + "helm_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmConfig" + } + ], + "default": { + "api_version": null, + "context": null, + "debug": false + }, + "description": "Global flags for Helm." + }, + "helm_diff_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmDiffConfig" + } + ], + "default": { + "ignore": [] + }, + "description": "Configure Helm Diff." + }, + "retain_clean_jobs": { + "default": false, + "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", + "title": "Retain Clean Jobs", + "type": "boolean" + }, + "schema_registry_url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of the Schema Registry.", + "examples": [ + "http://localhost:8081" + ], + "title": "Schema Registry Url" + }, + "timeout": { + "default": 300, + "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", + "title": "Timeout", + "type": "integer" + }, + "topic_name_config": { + "allOf": [ + { + "$ref": "#/$defs/TopicNameConfig" + } + ], + "default": { + "default_error_topic_name": "${pipeline_name}-${component_name}-error", + "default_output_topic_name": "${pipeline_name}-${component_name}" + }, + "description": "Configure the topic name variables you can use in the pipeline definition." + } + }, + "required": [ + "environment", + "brokers" + ], + "title": "PipelineConfig", + "type": "object" + }, "ProducerApp": { "additionalProperties": true, "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", @@ -759,12 +1114,30 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -812,6 +1185,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -827,6 +1206,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -996,6 +1377,20 @@ ], "description": "Application-specific settings" }, + "config": { + "allOf": [ + { + "$ref": "#/$defs/PipelineConfig" + } + ], + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich", + "type": "boolean" + }, "from": { "anyOf": [ { @@ -1009,6 +1404,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -1056,6 +1455,12 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate", + "type": "boolean" + }, "version": { "anyOf": [ { @@ -1071,6 +1476,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -1188,12 +1595,7 @@ "title": "Nameoverride" }, "streams": { - "allOf": [ - { - "$ref": "#/$defs/StreamsConfig" - } - ], - "description": "Streams Bootstrap streams section" + "$ref": "#/$defs/StreamsConfig" } }, "required": [ @@ -1203,7 +1605,118 @@ "type": "object" }, "StreamsConfig": { - "description": "Streams Bootstrap streams section\n\n:param input_topics: Input topics, defaults to []\n:param input_pattern: Input pattern, defaults to None\n:param extra_input_topics: Extra input topics, defaults to {}\n:param extra_input_patterns: Extra input patterns, defaults to {}\n:param extra_output_topics: Extra output topics, defaults to {}\n:param output_topic: Output topic, defaults to None\n:param error_topic: Error topic, defaults to None\n:param config: Configuration, defaults to {}", + "additionalProperties": true, + "description": "Streams Bootstrap streams section", + "properties": { + "brokers": { + "description": "Brokers", + "title": "Brokers", + "type": "string" + }, + "config": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Configuration", + "title": "Config", + "type": "object" + }, + "errorTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Error topic", + "title": "Errortopic" + }, + "extraInputPatterns": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra input patterns", + "title": "Extrainputpatterns", + "type": "object" + }, + "extraInputTopics": { + "additionalProperties": { + "items": { + "type": "string" + }, + "type": "array" + }, + "default": {}, + "description": "Extra input topics", + "title": "Extrainputtopics", + "type": "object" + }, + "extraOutputTopics": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra output topics", + "title": "Extraoutputtopics", + "type": "object" + }, + "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Input pattern", + "title": "Inputpattern" + }, + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, + "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Output topic", + "title": "Outputtopic" + }, + "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "URL of the schema registry", + "title": "Schemaregistryurl" + } + }, + "required": [ + "brokers" + ], "title": "StreamsConfig", "type": "object" }, @@ -1334,6 +1847,69 @@ }, "title": "TopicConfig", "type": "object" + }, + "TopicNameConfig": { + "additionalProperties": false, + "description": "Configures topic names.", + "properties": { + "default_error_topic_name": { + "default": "${pipeline_name}-${component_name}-error", + "description": "Configures the value for the variable ${error_topic_name}", + "title": "Default Error Topic Name", + "type": "string" + }, + "default_output_topic_name": { + "default": "${pipeline_name}-${component_name}", + "description": "Configures the value for the variable ${output_topic_name}", + "title": "Default Output Topic Name", + "type": "string" + } + }, + "title": "TopicNameConfig", + "type": "object" } - } + }, + "items": { + "discriminator": { + "mapping": { + "kafka-app": "#/$defs/KafkaApp", + "kafka-connector": "#/$defs/KafkaConnector", + "kafka-sink-connector": "#/$defs/KafkaSinkConnector", + "kafka-source-connector": "#/$defs/KafkaSourceConnector", + "kubernetes-app": "#/$defs/KubernetesApp", + "pipeline-component": "#/$defs/PipelineComponent", + "producer-app": "#/$defs/ProducerApp", + "streams-app": "#/$defs/StreamsApp" + }, + "propertyName": "type" + }, + "oneOf": [ + { + "$ref": "#/$defs/KafkaApp" + }, + { + "$ref": "#/$defs/KafkaConnector" + }, + { + "$ref": "#/$defs/KafkaSinkConnector" + }, + { + "$ref": "#/$defs/KafkaSourceConnector" + }, + { + "$ref": "#/$defs/KubernetesApp" + }, + { + "$ref": "#/$defs/PipelineComponent" + }, + { + "$ref": "#/$defs/ProducerApp" + }, + { + "$ref": "#/$defs/StreamsApp" + } + ], + "type": "array" + }, + "title": "KPOps pipeline schema" } diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index b6ea1300d..9467fabc9 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -2,7 +2,8 @@ from typing import Any, Literal from pydantic import BaseModel, ConfigDict, Field, field_validator -from typing_extensions import override +from pydantic.json_schema import WithJsonSchema +from typing_extensions import Annotated, override from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot @@ -16,13 +17,9 @@ class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors""" connector_class: str - name: str | None = Field( + name: Annotated[str | None, WithJsonSchema({})] = Field( default=None, - json_schema_extra={ - "hidden_from_schema": True, - }, ) - model_config = ConfigDict( extra="allow", alias_generator=to_dot, diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index eec843851..e6af5ac0d 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -8,6 +8,8 @@ import typer from pydantic import AliasChoices, ConfigDict, Field +from pydantic.json_schema import WithJsonSchema +from typing_extensions import Annotated from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers @@ -48,34 +50,22 @@ class BaseDefaultsComponent(DescConfigModel): default=False, description=describe_attr("enrich", __doc__), exclude=True, - json_schema_extra={ - "hidden_from_schema": True, - }, ) config: PipelineConfig = Field( default=..., description=describe_attr("config", __doc__), exclude=True, - json_schema_extra={ - "hidden_from_schema": True, - }, ) - handlers: ComponentHandlers = Field( + handlers: Annotated[ComponentHandlers, WithJsonSchema({})] = Field( default=..., description=describe_attr("handlers", __doc__), exclude=True, - json_schema_extra={ - "hidden_from_schema": True, - }, ) validate_: bool = Field( validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), exclude=True, - json_schema_extra={ - "hidden_from_schema": True, - }, ) def __init__(self, **kwargs) -> None: diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 06aa88786..81b273c20 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,13 +1,11 @@ import json import logging from enum import Enum -from typing import Annotated, Any, Literal, Sequence, Union +from typing import Literal -from pydantic import Field, GenerateSchema, TypeAdapter, schema_json_of +from pydantic import Field from pydantic.fields import FieldInfo from pydantic.json_schema import JsonSchemaMode, model_json_schema, models_json_schema -from pydantic.v1 import schema -from pydantic.v1.schema import SkipField from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes @@ -20,19 +18,6 @@ class SchemaScope(str, Enum): CONFIG = "config" -original_field_schema = schema.field_schema - - -# adapted from https://github.com/tiangolo/fastapi/issues/1378#issuecomment-764966955 -def field_schema(field, **kwargs: Any) -> Any: - if field.field_info.json_schema_extra.get("hidden_from_schema"): - raise SkipField(f"{field.name} field is being hidden") - else: - return original_field_schema(field, **kwargs) - - -schema.field_schema = field_schema - log = logging.getLogger("") @@ -103,12 +88,10 @@ def gen_pipeline_schema( components = _add_components(components_module, components) if not components: raise RuntimeError("No valid components found.") - # Create a type union that will hold the union of all component types - PipelineComponents = Union[components] # type: ignore[valid-type] # re-assign component type as Literal to work as discriminator for component in components: - component.model_fields["type"] = FieldInfo( + component.model_fields["type"] = Field( alias="type", type_=Literal[component.type], # type: ignore default=component.type, @@ -119,32 +102,75 @@ def gen_pipeline_schema( # class_validators=None, ) components_moded = tuple([(component, "validation") for component in components]) - # Create a type union that will hold the union of all component types - AnnotatedPipelineComponents = Annotated[ - PipelineComponents, Field(discriminator="type") - ] schema = models_json_schema( components_moded, - # title="KPOps pipeline schema", + title="KPOps pipeline schema", by_alias=True, ) + # breakpoint() + stripped_schema_first_item = {k[0]: v for k, v in schema[0].items()} + schema_first_item_adapted = { + "discriminator": { + "mapping": {}, + "propertyName": "type", + }, + "oneOf": [], + "type": "array", + } + mapping = {} + one_of = [] + for k, v in stripped_schema_first_item.items(): + mapping[k.type] = v["$ref"] + one_of.append(v) + schema_first_item_adapted["discriminator"]["mapping"] = mapping + schema_first_item_adapted["oneOf"] = one_of + complete_schema = schema[1].copy() + complete_schema["items"] = schema_first_item_adapted print( json.dumps( - schema, + complete_schema, indent=4, sort_keys=True, ) ) + """ +"items": { + "discriminator": { + "mapping": { + "empty-pipeline-component": "#/definitions/EmptyPipelineComponent", + "sub-pipeline-component": "#/definitions/SubPipelineComponent", + "sub-pipeline-component-correct": "#/definitions/SubPipelineComponentCorrect", + "sub-pipeline-component-correct-docstr": "#/definitions/SubPipelineComponentCorrectDocstr", + "sub-pipeline-component-no-schema-type-no-type": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + }, + "propertyName": "type" + }, + "oneOf": [ + { + "$ref": "#/definitions/EmptyPipelineComponent" + }, + { + "$ref": "#/definitions/SubPipelineComponent" + }, + { + "$ref": "#/definitions/SubPipelineComponentCorrect" + }, + { + "$ref": "#/definitions/SubPipelineComponentCorrectDocstr" + }, + { + "$ref": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + } + ] +}, +"title": "KPOps pipeline schema", +"type": "array" + """ + def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" schema = model_json_schema(PipelineConfig) - print( - json.dumps( - schema, - indent=4, - sort_keys=True - ) - ) + print(json.dumps(schema, indent=4, sort_keys=True)) From b898a9c76d0994078ad527d8e3b616c8e9d1698a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 16:20:20 +0300 Subject: [PATCH 34/96] chore: clean up --- kpops/utils/gen_schema.py | 37 +------------------------------------ 1 file changed, 1 insertion(+), 36 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 81b273c20..cf99e9845 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -4,8 +4,7 @@ from typing import Literal from pydantic import Field -from pydantic.fields import FieldInfo -from pydantic.json_schema import JsonSchemaMode, model_json_schema, models_json_schema +from pydantic.json_schema import model_json_schema, models_json_schema from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes @@ -135,40 +134,6 @@ def gen_pipeline_schema( ) ) - """ -"items": { - "discriminator": { - "mapping": { - "empty-pipeline-component": "#/definitions/EmptyPipelineComponent", - "sub-pipeline-component": "#/definitions/SubPipelineComponent", - "sub-pipeline-component-correct": "#/definitions/SubPipelineComponentCorrect", - "sub-pipeline-component-correct-docstr": "#/definitions/SubPipelineComponentCorrectDocstr", - "sub-pipeline-component-no-schema-type-no-type": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" - }, - "propertyName": "type" - }, - "oneOf": [ - { - "$ref": "#/definitions/EmptyPipelineComponent" - }, - { - "$ref": "#/definitions/SubPipelineComponent" - }, - { - "$ref": "#/definitions/SubPipelineComponentCorrect" - }, - { - "$ref": "#/definitions/SubPipelineComponentCorrectDocstr" - }, - { - "$ref": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" - } - ] -}, -"title": "KPOps pipeline schema", -"type": "array" - """ - def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" From d97098b691870d13db483d17da8bde0487e89f8a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 16:52:46 +0300 Subject: [PATCH 35/96] fix: pipeline schema WIP --- docs/docs/schema/pipeline.json | 6 +++--- kpops/utils/gen_schema.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index cee2ef659..340afa20a 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -1908,8 +1908,8 @@ { "$ref": "#/$defs/StreamsApp" } - ], - "type": "array" + ] }, - "title": "KPOps pipeline schema" + "title": "KPOps pipeline schema", + "type": "array" } diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index cf99e9845..b5a38ec65 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -115,7 +115,6 @@ def gen_pipeline_schema( "propertyName": "type", }, "oneOf": [], - "type": "array", } mapping = {} one_of = [] @@ -126,6 +125,7 @@ def gen_pipeline_schema( schema_first_item_adapted["oneOf"] = one_of complete_schema = schema[1].copy() complete_schema["items"] = schema_first_item_adapted + complete_schema["type"] = "array" print( json.dumps( complete_schema, From e7b6588fb9f81c7e23ab9b9f00a68d9ecaa4921a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 21:19:16 +0300 Subject: [PATCH 36/96] fix: pieline schema WIP --- docs/docs/schema/pipeline.json | 664 +++------------------------------ kpops/utils/gen_schema.py | 19 +- 2 files changed, 77 insertions(+), 606 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 340afa20a..37d60e796 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -6,7 +6,7 @@ "properties": { "components": { "additionalProperties": { - "$ref": "#/$defs/FromTopic" + "$ref": "#/definitions/FromTopic" }, "default": {}, "description": "Components to read from", @@ -15,7 +15,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/$defs/FromTopic" + "$ref": "#/definitions/FromTopic" }, "default": {}, "description": "Input topics", @@ -46,7 +46,7 @@ "type": { "anyOf": [ { - "$ref": "#/$defs/InputTopicTypes" + "$ref": "#/definitions/InputTopicTypes" }, { "type": "null" @@ -59,73 +59,13 @@ "title": "FromTopic", "type": "object" }, - "HelmConfig": { - "description": "Global Helm configuration", - "properties": { - "api_version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Kubernetes API version used for Capabilities.APIVersions", - "title": "API version" - }, - "context": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Name of kubeconfig context (`--kube-context`)", - "examples": [ - "dev-storage" - ], - "title": "Context" - }, - "debug": { - "default": false, - "description": "Run Helm in Debug mode", - "title": "Debug", - "type": "boolean" - } - }, - "title": "HelmConfig", - "type": "object" - }, - "HelmDiffConfig": { - "properties": { - "ignore": { - "description": "Set of keys that should not be checked.", - "examples": [ - "- name\n- imageTag" - ], - "items": { - "type": "string" - }, - "title": "Ignore", - "type": "array", - "uniqueItems": true - } - }, - "title": "HelmDiffConfig", - "type": "object" - }, "HelmRepoConfig": { "description": "Helm repository configuration", "properties": { "repo_auth_flags": { "allOf": [ { - "$ref": "#/$defs/RepoAuthFlags" + "$ref": "#/definitions/RepoAuthFlags" } ], "default": { @@ -171,29 +111,15 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KafkaAppConfig" + "$ref": "#/definitions/KafkaAppConfig" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -203,10 +129,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -226,7 +148,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "default": { @@ -245,7 +167,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -254,12 +176,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -275,8 +191,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -304,7 +218,7 @@ "streams": { "allOf": [ { - "$ref": "#/$defs/KafkaStreamsConfig" + "$ref": "#/definitions/KafkaStreamsConfig" } ], "description": "Kafka streams config" @@ -323,29 +237,15 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KafkaConnectorConfig" + "$ref": "#/definitions/KafkaConnectorConfig" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -355,10 +255,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -378,7 +274,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "default": { @@ -402,7 +298,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -411,12 +307,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -432,8 +322,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -470,29 +358,15 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KafkaConnectorConfig" + "$ref": "#/definitions/KafkaConnectorConfig" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -502,10 +376,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -525,7 +395,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "default": { @@ -549,7 +419,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -558,12 +428,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -579,8 +443,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -595,29 +457,15 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KafkaConnectorConfig" + "$ref": "#/definitions/KafkaConnectorConfig" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -627,10 +475,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -663,7 +507,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "default": { @@ -687,7 +531,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -696,12 +540,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -717,8 +555,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -762,29 +598,15 @@ "app": { "allOf": [ { - "$ref": "#/$defs/KubernetesAppConfig" + "$ref": "#/definitions/KubernetesAppConfig" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -794,10 +616,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -817,7 +635,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "description": "Configuration of the Helm chart repo to be used for deploying the component" @@ -825,7 +643,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -834,12 +652,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -855,8 +667,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app", @@ -885,24 +695,10 @@ "additionalProperties": true, "description": "Base class for all components", "properties": { - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -912,10 +708,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -930,7 +722,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -938,170 +730,14 @@ ], "default": null, "description": "Topic(s) into which the component will write output" - }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" } }, "required": [ - "config", - "handlers", "name" ], "title": "PipelineComponent", "type": "object" }, - "PipelineConfig": { - "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", - "properties": { - "KPOPS_connect_host": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Address of Kafka Connect.", - "examples": [ - "http://localhost:8083" - ], - "title": "Kpops Connect Host" - }, - "KPOPS_rest_proxy_host": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Address of the Kafka REST Proxy.", - "examples": [ - "http://localhost:8082" - ], - "title": "Kpops Rest Proxy Host" - }, - "brokers": { - "description": "The comma separated Kafka brokers address.", - "examples": [ - "broker1:9092,broker2:9092,broker3:9092" - ], - "title": "Brokers", - "type": "string" - }, - "create_namespace": { - "default": false, - "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", - "title": "Create Namespace", - "type": "boolean" - }, - "defaults_filename_prefix": { - "default": "defaults", - "description": "The name of the defaults file and the prefix of the defaults environment file.", - "title": "Defaults Filename Prefix", - "type": "string" - }, - "defaults_path": { - "default": ".", - "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", - "examples": [ - "defaults", - "." - ], - "format": "path", - "title": "Defaults Path", - "type": "string" - }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "examples": [ - "development", - "production" - ], - "title": "Environment", - "type": "string" - }, - "helm_config": { - "allOf": [ - { - "$ref": "#/$defs/HelmConfig" - } - ], - "default": { - "api_version": null, - "context": null, - "debug": false - }, - "description": "Global flags for Helm." - }, - "helm_diff_config": { - "allOf": [ - { - "$ref": "#/$defs/HelmDiffConfig" - } - ], - "default": { - "ignore": [] - }, - "description": "Configure Helm Diff." - }, - "retain_clean_jobs": { - "default": false, - "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", - "title": "Retain Clean Jobs", - "type": "boolean" - }, - "schema_registry_url": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Address of the Schema Registry.", - "examples": [ - "http://localhost:8081" - ], - "title": "Schema Registry Url" - }, - "timeout": { - "default": 300, - "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", - "title": "Timeout", - "type": "integer" - }, - "topic_name_config": { - "allOf": [ - { - "$ref": "#/$defs/TopicNameConfig" - } - ], - "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" - }, - "description": "Configure the topic name variables you can use in the pipeline definition." - } - }, - "required": [ - "environment", - "brokers" - ], - "title": "PipelineConfig", - "type": "object" - }, "ProducerApp": { "additionalProperties": true, "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", @@ -1109,35 +745,17 @@ "app": { "allOf": [ { - "$ref": "#/$defs/ProducerValues" + "$ref": "#/definitions/ProducerValues" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -1157,7 +775,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "default": { @@ -1176,7 +794,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -1185,12 +803,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -1206,8 +818,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -1286,7 +896,7 @@ "streams": { "allOf": [ { - "$ref": "#/$defs/ProducerStreamsConfig" + "$ref": "#/definitions/ProducerStreamsConfig" } ], "description": "Kafka Streams settings" @@ -1372,29 +982,15 @@ "app": { "allOf": [ { - "$ref": "#/$defs/StreamsAppConfig" + "$ref": "#/definitions/StreamsAppConfig" } ], "description": "Application-specific settings" }, - "config": { - "allOf": [ - { - "$ref": "#/$defs/PipelineConfig" - } - ], - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich", - "type": "boolean" - }, "from": { "anyOf": [ { - "$ref": "#/$defs/FromSection" + "$ref": "#/definitions/FromSection" }, { "type": "null" @@ -1404,10 +1000,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -1427,7 +1019,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/$defs/HelmRepoConfig" + "$ref": "#/definitions/HelmRepoConfig" } ], "default": { @@ -1446,7 +1038,7 @@ "to": { "anyOf": [ { - "$ref": "#/$defs/ToSection" + "$ref": "#/definitions/ToSection" }, { "type": "null" @@ -1455,12 +1047,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate", - "type": "boolean" - }, "version": { "anyOf": [ { @@ -1476,8 +1062,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -1572,7 +1156,7 @@ "autoscaling": { "anyOf": [ { - "$ref": "#/$defs/StreamsAppAutoScaling" + "$ref": "#/definitions/StreamsAppAutoScaling" }, { "type": "null" @@ -1595,7 +1179,12 @@ "title": "Nameoverride" }, "streams": { - "$ref": "#/$defs/StreamsConfig" + "allOf": [ + { + "$ref": "#/definitions/StreamsConfig" + } + ], + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -1605,118 +1194,7 @@ "type": "object" }, "StreamsConfig": { - "additionalProperties": true, - "description": "Streams Bootstrap streams section", - "properties": { - "brokers": { - "description": "Brokers", - "title": "Brokers", - "type": "string" - }, - "config": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Configuration", - "title": "Config", - "type": "object" - }, - "errorTopic": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Error topic", - "title": "Errortopic" - }, - "extraInputPatterns": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Extra input patterns", - "title": "Extrainputpatterns", - "type": "object" - }, - "extraInputTopics": { - "additionalProperties": { - "items": { - "type": "string" - }, - "type": "array" - }, - "default": {}, - "description": "Extra input topics", - "title": "Extrainputtopics", - "type": "object" - }, - "extraOutputTopics": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Extra output topics", - "title": "Extraoutputtopics", - "type": "object" - }, - "inputPattern": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Input pattern", - "title": "Inputpattern" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, - "outputTopic": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Output topic", - "title": "Outputtopic" - }, - "schemaRegistryUrl": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "URL of the schema registry", - "title": "Schemaregistryurl" - } - }, - "required": [ - "brokers" - ], + "description": "Streams Bootstrap streams section\n\n:param input_topics: Input topics, defaults to []\n:param input_pattern: Input pattern, defaults to None\n:param extra_input_topics: Extra input topics, defaults to {}\n:param extra_input_patterns: Extra input patterns, defaults to {}\n:param extra_output_topics: Extra output topics, defaults to {}\n:param output_topic: Output topic, defaults to None\n:param error_topic: Error topic, defaults to None\n:param config: Configuration, defaults to {}", "title": "StreamsConfig", "type": "object" }, @@ -1735,7 +1213,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/$defs/TopicConfig" + "$ref": "#/definitions/TopicConfig" }, "default": {}, "description": "Output topics", @@ -1821,7 +1299,7 @@ "type": { "anyOf": [ { - "$ref": "#/$defs/OutputTopicTypes" + "$ref": "#/definitions/OutputTopicTypes" }, { "type": "null" @@ -1847,66 +1325,46 @@ }, "title": "TopicConfig", "type": "object" - }, - "TopicNameConfig": { - "additionalProperties": false, - "description": "Configures topic names.", - "properties": { - "default_error_topic_name": { - "default": "${pipeline_name}-${component_name}-error", - "description": "Configures the value for the variable ${error_topic_name}", - "title": "Default Error Topic Name", - "type": "string" - }, - "default_output_topic_name": { - "default": "${pipeline_name}-${component_name}", - "description": "Configures the value for the variable ${output_topic_name}", - "title": "Default Output Topic Name", - "type": "string" - } - }, - "title": "TopicNameConfig", - "type": "object" } }, "items": { "discriminator": { "mapping": { - "kafka-app": "#/$defs/KafkaApp", - "kafka-connector": "#/$defs/KafkaConnector", - "kafka-sink-connector": "#/$defs/KafkaSinkConnector", - "kafka-source-connector": "#/$defs/KafkaSourceConnector", - "kubernetes-app": "#/$defs/KubernetesApp", - "pipeline-component": "#/$defs/PipelineComponent", - "producer-app": "#/$defs/ProducerApp", - "streams-app": "#/$defs/StreamsApp" + "kafka-app": "#/definitions/KafkaApp", + "kafka-connector": "#/definitions/KafkaConnector", + "kafka-sink-connector": "#/definitions/KafkaSinkConnector", + "kafka-source-connector": "#/definitions/KafkaSourceConnector", + "kubernetes-app": "#/definitions/KubernetesApp", + "pipeline-component": "#/definitions/PipelineComponent", + "producer-app": "#/definitions/ProducerApp", + "streams-app": "#/definitions/StreamsApp" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/$defs/KafkaApp" + "$ref": "#/definitions/KafkaApp" }, { - "$ref": "#/$defs/KafkaConnector" + "$ref": "#/definitions/KafkaConnector" }, { - "$ref": "#/$defs/KafkaSinkConnector" + "$ref": "#/definitions/KafkaSinkConnector" }, { - "$ref": "#/$defs/KafkaSourceConnector" + "$ref": "#/definitions/KafkaSourceConnector" }, { - "$ref": "#/$defs/KubernetesApp" + "$ref": "#/definitions/KubernetesApp" }, { - "$ref": "#/$defs/PipelineComponent" + "$ref": "#/definitions/PipelineComponent" }, { - "$ref": "#/$defs/ProducerApp" + "$ref": "#/definitions/ProducerApp" }, { - "$ref": "#/$defs/StreamsApp" + "$ref": "#/definitions/StreamsApp" } ] }, diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index b5a38ec65..d392b20f2 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,10 +1,11 @@ import json import logging from enum import Enum -from typing import Literal +from typing import Annotated, Literal, Union -from pydantic import Field +from pydantic import Field, TypeAdapter from pydantic.json_schema import model_json_schema, models_json_schema +from pydantic_core import to_json from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes @@ -100,12 +101,13 @@ def gen_pipeline_schema( # model_config=BaseConfig, # class_validators=None, ) - components_moded = tuple([(component, "validation") for component in components]) + components_moded = tuple([(component, "serialization") for component in components]) schema = models_json_schema( components_moded, title="KPOps pipeline schema", by_alias=True, + ref_template="#/definitions/{model}", ) # breakpoint() stripped_schema_first_item = {k[0]: v for k, v in schema[0].items()} @@ -134,6 +136,17 @@ def gen_pipeline_schema( ) ) + # Create a type union that will hold the union of all component types + # PipelineComponents = Union[components] # type: ignore[valid-type] + # AnnotatedPipelineComponents = Annotated[ + # PipelineComponents, Field(discriminator="type") + # ] + # DumpablePipelineComponents = TypeAdapter(AnnotatedPipelineComponents) + + # schema = to_json(AnnotatedPipelineComponents) + + # print(schema) + def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config""" From b220442abb73ef2e4541027f28c72d5d05b8f362 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 21:31:45 +0300 Subject: [PATCH 37/96] fix: schema WIP --- docs/docs/schema/pipeline.json | 500 +++++++++++++++++- .../base_defaults_component.py | 6 +- kpops/utils/gen_schema.py | 2 +- 3 files changed, 497 insertions(+), 11 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 37d60e796..5357a853c 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -59,6 +59,66 @@ "title": "FromTopic", "type": "object" }, + "HelmConfig": { + "description": "Global Helm configuration", + "properties": { + "api_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Kubernetes API version used for Capabilities.APIVersions", + "title": "API version" + }, + "context": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Name of kubeconfig context (`--kube-context`)", + "examples": [ + "dev-storage" + ], + "title": "Context" + }, + "debug": { + "default": false, + "description": "Run Helm in Debug mode", + "title": "Debug", + "type": "boolean" + } + }, + "title": "HelmConfig", + "type": "object" + }, + "HelmDiffConfig": { + "properties": { + "ignore": { + "description": "Set of keys that should not be checked.", + "examples": [ + "- name\n- imageTag" + ], + "items": { + "type": "string" + }, + "title": "Ignore", + "type": "array", + "uniqueItems": true + } + }, + "title": "HelmDiffConfig", + "type": "object" + }, "HelmRepoConfig": { "description": "Helm repository configuration", "properties": { @@ -116,6 +176,14 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -129,6 +197,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -176,6 +248,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -191,6 +268,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -242,6 +321,14 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -255,6 +342,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -307,6 +398,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -322,6 +418,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -363,6 +461,14 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -376,6 +482,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -428,6 +538,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -443,6 +558,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -462,6 +579,14 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -475,6 +600,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -540,6 +669,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -555,6 +689,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -603,6 +739,14 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -616,6 +760,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -652,6 +800,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -667,6 +820,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app", @@ -695,6 +850,14 @@ "additionalProperties": true, "description": "Base class for all components", "properties": { + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -708,6 +871,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -730,14 +897,169 @@ ], "default": null, "description": "Topic(s) into which the component will write output" + }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" } }, "required": [ + "config", + "handlers", "name" ], "title": "PipelineComponent", "type": "object" }, + "PipelineConfig": { + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { + "KPOPS_connect_host": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of Kafka Connect.", + "examples": [ + "http://localhost:8083" + ], + "title": "Kpops Connect Host" + }, + "KPOPS_rest_proxy_host": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of the Kafka REST Proxy.", + "examples": [ + "http://localhost:8082" + ], + "title": "Kpops Rest Proxy Host" + }, + "brokers": { + "description": "The comma separated Kafka brokers address.", + "examples": [ + "broker1:9092,broker2:9092,broker3:9092" + ], + "title": "Brokers", + "type": "string" + }, + "create_namespace": { + "default": false, + "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", + "title": "Create Namespace", + "type": "boolean" + }, + "defaults_filename_prefix": { + "default": "defaults", + "description": "The name of the defaults file and the prefix of the defaults environment file.", + "title": "Defaults Filename Prefix", + "type": "string" + }, + "defaults_path": { + "default": ".", + "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", + "examples": [ + "defaults", + "." + ], + "format": "path", + "title": "Defaults Path", + "type": "string" + }, + "environment": { + "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", + "examples": [ + "development", + "production" + ], + "title": "Environment", + "type": "string" + }, + "helm_config": { + "allOf": [ + { + "$ref": "#/definitions/HelmConfig" + } + ], + "default": { + "api_version": null, + "context": null, + "debug": false + }, + "description": "Global flags for Helm." + }, + "helm_diff_config": { + "allOf": [ + { + "$ref": "#/definitions/HelmDiffConfig" + } + ], + "default": { + "ignore": [] + }, + "description": "Configure Helm Diff." + }, + "retain_clean_jobs": { + "default": false, + "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", + "title": "Retain Clean Jobs", + "type": "boolean" + }, + "schema_registry_url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Address of the Schema Registry.", + "examples": [ + "http://localhost:8081" + ], + "title": "Schema Registry Url" + }, + "timeout": { + "default": 300, + "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", + "title": "Timeout", + "type": "integer" + }, + "topic_name_config": { + "allOf": [ + { + "$ref": "#/definitions/TopicNameConfig" + } + ], + "default": { + "default_error_topic_name": "${pipeline_name}-${component_name}-error", + "default_output_topic_name": "${pipeline_name}-${component_name}" + }, + "description": "Configure the topic name variables you can use in the pipeline definition." + } + }, + "required": [ + "environment", + "brokers" + ], + "title": "PipelineConfig", + "type": "object" + }, "ProducerApp": { "additionalProperties": true, "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", @@ -750,12 +1072,24 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -803,6 +1137,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -818,6 +1157,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -987,6 +1328,14 @@ ], "description": "Application-specific settings" }, + "config": { + "description": "Pipeline configuration to be accessed by this component" + }, + "enrich": { + "default": false, + "description": "Whether to enrich component with defaults", + "title": "Enrich" + }, "from": { "anyOf": [ { @@ -1000,6 +1349,10 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, + "handlers": { + "description": "Component handlers to be accessed by this component", + "title": "Handlers" + }, "name": { "description": "Component name", "title": "Name", @@ -1047,6 +1400,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "validate": { + "default": true, + "description": "Whether to run custom validation on the component", + "title": "Validate" + }, "version": { "anyOf": [ { @@ -1062,6 +1420,8 @@ } }, "required": [ + "config", + "handlers", "name", "namespace", "app" @@ -1179,12 +1539,7 @@ "title": "Nameoverride" }, "streams": { - "allOf": [ - { - "$ref": "#/definitions/StreamsConfig" - } - ], - "description": "Streams Bootstrap streams section" + "$ref": "#/definitions/StreamsConfig" } }, "required": [ @@ -1194,7 +1549,118 @@ "type": "object" }, "StreamsConfig": { - "description": "Streams Bootstrap streams section\n\n:param input_topics: Input topics, defaults to []\n:param input_pattern: Input pattern, defaults to None\n:param extra_input_topics: Extra input topics, defaults to {}\n:param extra_input_patterns: Extra input patterns, defaults to {}\n:param extra_output_topics: Extra output topics, defaults to {}\n:param output_topic: Output topic, defaults to None\n:param error_topic: Error topic, defaults to None\n:param config: Configuration, defaults to {}", + "additionalProperties": true, + "description": "Streams Bootstrap streams section", + "properties": { + "brokers": { + "description": "Brokers", + "title": "Brokers", + "type": "string" + }, + "config": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Configuration", + "title": "Config", + "type": "object" + }, + "errorTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Error topic", + "title": "Errortopic" + }, + "extraInputPatterns": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra input patterns", + "title": "Extrainputpatterns", + "type": "object" + }, + "extraInputTopics": { + "additionalProperties": { + "items": { + "type": "string" + }, + "type": "array" + }, + "default": {}, + "description": "Extra input topics", + "title": "Extrainputtopics", + "type": "object" + }, + "extraOutputTopics": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra output topics", + "title": "Extraoutputtopics", + "type": "object" + }, + "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Input pattern", + "title": "Inputpattern" + }, + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, + "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Output topic", + "title": "Outputtopic" + }, + "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "URL of the schema registry", + "title": "Schemaregistryurl" + } + }, + "required": [ + "brokers" + ], "title": "StreamsConfig", "type": "object" }, @@ -1325,6 +1791,26 @@ }, "title": "TopicConfig", "type": "object" + }, + "TopicNameConfig": { + "additionalProperties": false, + "description": "Configures topic names.", + "properties": { + "default_error_topic_name": { + "default": "${pipeline_name}-${component_name}-error", + "description": "Configures the value for the variable ${error_topic_name}", + "title": "Default Error Topic Name", + "type": "string" + }, + "default_output_topic_name": { + "default": "${pipeline_name}-${component_name}", + "description": "Configures the value for the variable ${output_topic_name}", + "title": "Default Output Topic Name", + "type": "string" + } + }, + "title": "TopicNameConfig", + "type": "object" } }, "items": { diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index e6af5ac0d..b964abe48 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -46,12 +46,12 @@ class BaseDefaultsComponent(DescConfigModel): ignored_types=(cached_property, cached_classproperty), ) - enrich: bool = Field( + enrich: Annotated[bool, WithJsonSchema({})] = Field( default=False, description=describe_attr("enrich", __doc__), exclude=True, ) - config: PipelineConfig = Field( + config: Annotated[PipelineConfig, WithJsonSchema({})] = Field( default=..., description=describe_attr("config", __doc__), exclude=True, @@ -61,7 +61,7 @@ class BaseDefaultsComponent(DescConfigModel): description=describe_attr("handlers", __doc__), exclude=True, ) - validate_: bool = Field( + validate_: Annotated[bool, WithJsonSchema({})] = Field( validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index d392b20f2..7200b70aa 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -101,7 +101,7 @@ def gen_pipeline_schema( # model_config=BaseConfig, # class_validators=None, ) - components_moded = tuple([(component, "serialization") for component in components]) + components_moded = tuple([(component, "validation") for component in components]) schema = models_json_schema( components_moded, From 320ad3194e2704c4b02ff855e98c2f35449fe7bb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 21:44:51 +0300 Subject: [PATCH 38/96] fix: schema WIP --- docs/docs/schema/pipeline.json | 384 ------------------ .../component_handlers/kafka_connect/model.py | 3 +- .../base_defaults_component.py | 8 +- .../streams_bootstrap/streams/model.py | 26 +- kpops/utils/gen_schema.py | 2 +- 5 files changed, 20 insertions(+), 403 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 5357a853c..317a915b9 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -59,66 +59,6 @@ "title": "FromTopic", "type": "object" }, - "HelmConfig": { - "description": "Global Helm configuration", - "properties": { - "api_version": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Kubernetes API version used for Capabilities.APIVersions", - "title": "API version" - }, - "context": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Name of kubeconfig context (`--kube-context`)", - "examples": [ - "dev-storage" - ], - "title": "Context" - }, - "debug": { - "default": false, - "description": "Run Helm in Debug mode", - "title": "Debug", - "type": "boolean" - } - }, - "title": "HelmConfig", - "type": "object" - }, - "HelmDiffConfig": { - "properties": { - "ignore": { - "description": "Set of keys that should not be checked.", - "examples": [ - "- name\n- imageTag" - ], - "items": { - "type": "string" - }, - "title": "Ignore", - "type": "array", - "uniqueItems": true - } - }, - "title": "HelmDiffConfig", - "type": "object" - }, "HelmRepoConfig": { "description": "Helm repository configuration", "properties": { @@ -176,14 +116,6 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -197,10 +129,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -248,11 +176,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -268,8 +191,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -321,14 +242,6 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -342,10 +255,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -398,11 +307,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -418,8 +322,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -437,10 +339,6 @@ "connector.class": { "title": "Connector.Class", "type": "string" - }, - "name": { - "default": null, - "title": "Name" } }, "required": [ @@ -461,14 +359,6 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -482,10 +372,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -538,11 +424,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -558,8 +439,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -579,14 +458,6 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -600,10 +471,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -669,11 +536,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -689,8 +551,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -739,14 +599,6 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -760,10 +612,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -800,11 +648,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -820,8 +663,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app", @@ -850,14 +691,6 @@ "additionalProperties": true, "description": "Base class for all components", "properties": { - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -871,10 +704,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -897,169 +726,14 @@ ], "default": null, "description": "Topic(s) into which the component will write output" - }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" } }, "required": [ - "config", - "handlers", "name" ], "title": "PipelineComponent", "type": "object" }, - "PipelineConfig": { - "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", - "properties": { - "KPOPS_connect_host": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Address of Kafka Connect.", - "examples": [ - "http://localhost:8083" - ], - "title": "Kpops Connect Host" - }, - "KPOPS_rest_proxy_host": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Address of the Kafka REST Proxy.", - "examples": [ - "http://localhost:8082" - ], - "title": "Kpops Rest Proxy Host" - }, - "brokers": { - "description": "The comma separated Kafka brokers address.", - "examples": [ - "broker1:9092,broker2:9092,broker3:9092" - ], - "title": "Brokers", - "type": "string" - }, - "create_namespace": { - "default": false, - "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", - "title": "Create Namespace", - "type": "boolean" - }, - "defaults_filename_prefix": { - "default": "defaults", - "description": "The name of the defaults file and the prefix of the defaults environment file.", - "title": "Defaults Filename Prefix", - "type": "string" - }, - "defaults_path": { - "default": ".", - "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", - "examples": [ - "defaults", - "." - ], - "format": "path", - "title": "Defaults Path", - "type": "string" - }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "examples": [ - "development", - "production" - ], - "title": "Environment", - "type": "string" - }, - "helm_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmConfig" - } - ], - "default": { - "api_version": null, - "context": null, - "debug": false - }, - "description": "Global flags for Helm." - }, - "helm_diff_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmDiffConfig" - } - ], - "default": { - "ignore": [] - }, - "description": "Configure Helm Diff." - }, - "retain_clean_jobs": { - "default": false, - "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", - "title": "Retain Clean Jobs", - "type": "boolean" - }, - "schema_registry_url": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Address of the Schema Registry.", - "examples": [ - "http://localhost:8081" - ], - "title": "Schema Registry Url" - }, - "timeout": { - "default": 300, - "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", - "title": "Timeout", - "type": "integer" - }, - "topic_name_config": { - "allOf": [ - { - "$ref": "#/definitions/TopicNameConfig" - } - ], - "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" - }, - "description": "Configure the topic name variables you can use in the pipeline definition." - } - }, - "required": [ - "environment", - "brokers" - ], - "title": "PipelineConfig", - "type": "object" - }, "ProducerApp": { "additionalProperties": true, "description": "Producer component\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", @@ -1072,24 +746,12 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -1137,11 +799,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -1157,8 +814,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -1328,14 +983,6 @@ ], "description": "Application-specific settings" }, - "config": { - "description": "Pipeline configuration to be accessed by this component" - }, - "enrich": { - "default": false, - "description": "Whether to enrich component with defaults", - "title": "Enrich" - }, "from": { "anyOf": [ { @@ -1349,10 +996,6 @@ "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, - "handlers": { - "description": "Component handlers to be accessed by this component", - "title": "Handlers" - }, "name": { "description": "Component name", "title": "Name", @@ -1400,11 +1043,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "validate": { - "default": true, - "description": "Whether to run custom validation on the component", - "title": "Validate" - }, "version": { "anyOf": [ { @@ -1420,8 +1058,6 @@ } }, "required": [ - "config", - "handlers", "name", "namespace", "app" @@ -1791,26 +1427,6 @@ }, "title": "TopicConfig", "type": "object" - }, - "TopicNameConfig": { - "additionalProperties": false, - "description": "Configures topic names.", - "properties": { - "default_error_topic_name": { - "default": "${pipeline_name}-${component_name}-error", - "description": "Configures the value for the variable ${error_topic_name}", - "title": "Default Error Topic Name", - "type": "string" - }, - "default_output_topic_name": { - "default": "${pipeline_name}-${component_name}", - "description": "Configures the value for the variable ${output_topic_name}", - "title": "Default Output Topic Name", - "type": "string" - } - }, - "title": "TopicNameConfig", - "type": "object" } }, "items": { diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 9467fabc9..0ebe6a914 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -17,8 +17,9 @@ class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors""" connector_class: str - name: Annotated[str | None, WithJsonSchema({})] = Field( + name: str | None = Field( default=None, + exclude=True, ) model_config = ConfigDict( extra="allow", diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index b964abe48..15ca0bb28 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -46,22 +46,22 @@ class BaseDefaultsComponent(DescConfigModel): ignored_types=(cached_property, cached_classproperty), ) - enrich: Annotated[bool, WithJsonSchema({})] = Field( + enrich: bool = Field( default=False, description=describe_attr("enrich", __doc__), exclude=True, ) - config: Annotated[PipelineConfig, WithJsonSchema({})] = Field( + config: PipelineConfig = Field( default=..., description=describe_attr("config", __doc__), exclude=True, ) - handlers: Annotated[ComponentHandlers, WithJsonSchema({})] = Field( + handlers: ComponentHandlers = Field( default=..., description=describe_attr("handlers", __doc__), exclude=True, ) - validate_: Annotated[bool, WithJsonSchema({})] = Field( + validate_: bool = Field( validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 61a2a48b0..56327aed7 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -71,19 +71,19 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) - @model_serializer(mode="wrap", when_used="always") - def serialize_model(self, handler) -> dict[str, Any]: - result = handler(self) - # if dict(result.items()).get("extraInputTopics"): - # breakpoint() - extra_fields = set() - if self.model_extra is not None: - extra_fields = set(self.model_extra.keys()) - fields = extra_fields.union(self.model_fields_set) - filtered_result_extra_set = { - k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) - } - return filtered_result_extra_set + # @model_serializer(mode="wrap", when_used="always") + # def serialize_model(self, handler) -> dict[str, Any]: + # result = handler(self) + # # if dict(result.items()).get("extraInputTopics"): + # # breakpoint() + # extra_fields = set() + # if self.model_extra is not None: + # extra_fields = set(self.model_extra.keys()) + # fields = extra_fields.union(self.model_fields_set) + # filtered_result_extra_set = { + # k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) + # } + # return filtered_result_extra_set class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 7200b70aa..d392b20f2 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -101,7 +101,7 @@ def gen_pipeline_schema( # model_config=BaseConfig, # class_validators=None, ) - components_moded = tuple([(component, "validation") for component in components]) + components_moded = tuple([(component, "serialization") for component in components]) schema = models_json_schema( components_moded, From 98b103ea25215d5d0419682e891f9a02a2632c4c Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 9 Oct 2023 22:25:00 +0300 Subject: [PATCH 39/96] fix: misc errors from merge --- hooks/gen_docs/gen_docs_env_vars.py | 34 ++++++++----------- .../base_components/kafka_connector.py | 10 +++--- poetry.lock | 2 +- tests/components/test_kafka_sink_connector.py | 4 +-- tests/utils/resources/nested_base_settings.py | 5 +-- 5 files changed, 26 insertions(+), 29 deletions(-) diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index ac88b82b6..6ac7b2adc 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -8,8 +8,8 @@ from textwrap import fill from typing import Any -from pydantic import BaseSettings -from pydantic.fields import ModelField +from pydantic_settings import BaseSettings +from pydantic.fields import FieldInfo from pytablewriter import MarkdownTableWriter from typer.models import ArgumentInfo, OptionInfo @@ -254,33 +254,29 @@ def fill_csv_pipeline_config(target: Path) -> None: :param target: The path to the `.csv` file. Note that it must already contain the column names """ - for field in collect_fields(PipelineConfig): - field_info = PipelineConfig.Config.get_field_info(field.name) + for field_name, field_value in collect_fields(PipelineConfig): field_description: str = ( - field.field_info.description + field_value.description or "No description available, please refer to the pipeline config documentation." ) - field_default = field.field_info.default - if config_env_var := field_info.get( - "env", - ) or field.field_info.extra.get("env"): - csv_append_env_var( - target, - config_env_var, - field_default, - field_description, - field.name, - ) + field_default = field_value.default + csv_append_env_var( + target, + field_value.serialization_alias or field_name, + field_default, + field_description, + field_name, + ) -def collect_fields(settings: type[BaseSettings]) -> Iterator[ModelField]: +def collect_fields(settings: type[BaseSettings]) -> Iterator[FieldInfo]: """Collect and yield all fields in a settings class. :param model: settings class :yield: all settings including nested ones in settings classes """ - for field in settings.__fields__.values(): - if issubclass(field_type := field.type_, BaseSettings): + for field in settings.model_fields.values(): + if field.annotation and issubclass(field_type := field.annotation, BaseSettings): yield from collect_fields(field_type) yield field diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 43a7c246d..a03fe7dc0 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -73,7 +73,7 @@ class KafkaConnector(PipelineComponent, ABC): default_factory=dict, description=describe_attr("resetter_values", __doc__), ) - _connector_type: KafkaConnectorType = Field() + connector_type: KafkaConnectorType = Field() @field_validator("app") @classmethod def connector_config_should_have_component_name( @@ -190,7 +190,7 @@ def _run_connect_resetter( log.info( magentaify( - f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}" + f"Connector Cleanup: deploy Connect {self.connector_type.value} resetter for {self.full_name}" ) ) @@ -246,7 +246,7 @@ def _get_kafka_connect_resetter_values( brokers=self.config.brokers, **kwargs, ), - connector_type=self._connector_type.value, + connector_type=self.connector_type.value, name_override=self.full_name, ).model_dump(), **self.resetter_values, @@ -278,7 +278,7 @@ class KafkaSourceConnector(KafkaConnector): description=describe_attr("offset_topic", __doc__), ) - _connector_type = KafkaConnectorType.SOURCE + connector_type: KafkaConnectorType = KafkaConnectorType.SOURCE @override def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: @@ -323,7 +323,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None: class KafkaSinkConnector(KafkaConnector): """Kafka sink connector model.""" - _connector_type = KafkaConnectorType.SINK + connector_type: KafkaConnectorType = KafkaConnectorType.SINK @override def add_input_topics(self, topics: list[str]) -> None: diff --git a/poetry.lock b/poetry.lock index a12566d30..f0470161a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1879,4 +1879,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "6bacec8095fc2f4e9d2beea0b278b07977727783e97f044bed449317ad5cbd97" +content-hash = "42e29da5d5585c2df77080c1625e627e65b6e7b02276a33c1ad7ec535e5eb77b" diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index e8ed7aa22..5d77ab85b 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -73,7 +73,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics": topic_name} + **{**connector_config.model_dump(), "topics": topic_name} ), namespace="test-namespace", ) @@ -85,7 +85,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics.regex": topic_pattern} + **{**connector_config.model_dump(), "topics.regex": topic_pattern} ), namespace="test-namespace", ) diff --git a/tests/utils/resources/nested_base_settings.py b/tests/utils/resources/nested_base_settings.py index f7f92358a..97e755e71 100644 --- a/tests/utils/resources/nested_base_settings.py +++ b/tests/utils/resources/nested_base_settings.py @@ -1,4 +1,5 @@ -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings class NestedSettings(BaseSettings): @@ -10,5 +11,5 @@ class ParentSettings(BaseSettings): nested_field: NestedSettings = Field(...) field_with_env_defined: str = Field( default=..., - env="FIELD_WITH_ENV_DEFINED", + alias="FIELD_WITH_ENV_DEFINED", ) From 7c81a82b065ebcb8b86ad8c6d200b4d4434e07d2 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 10 Oct 2023 11:07:55 +0300 Subject: [PATCH 40/96] fix: migration errors --- .../dependencies/kpops_structure.yaml | 3 + docs/docs/schema/pipeline.json | 254 +++++++----------- hooks/gen_docs/gen_docs_components.py | 4 +- hooks/gen_docs/gen_docs_env_vars.py | 16 +- kpops/cli/pipeline_config.py | 10 +- .../component_handlers/helm_wrapper/model.py | 6 +- .../kafka_connect/connect_wrapper.py | 3 +- .../component_handlers/kafka_connect/model.py | 10 +- .../base_defaults_component.py | 2 - kpops/components/base_components/kafka_app.py | 2 +- .../base_components/kafka_connector.py | 14 +- .../base_components/kubernetes_app.py | 2 +- .../base_components/models/from_section.py | 9 +- .../base_components/models/to_section.py | 9 +- .../base_components/pipeline_component.py | 3 +- .../streams_bootstrap/streams/model.py | 28 +- kpops/utils/gen_schema.py | 17 +- tests/utils/test_doc_gen.py | 4 +- 18 files changed, 165 insertions(+), 231 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 21af5971c..5566d7ba3 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -18,6 +18,7 @@ kpops_components_fields: - repo_config - version - resetter_values + - connector_type kafka-sink-connector: - name - prefix @@ -28,6 +29,7 @@ kpops_components_fields: - repo_config - version - resetter_values + - connector_type kafka-source-connector: - name - prefix @@ -38,6 +40,7 @@ kpops_components_fields: - repo_config - version - resetter_values + - connector_type - offset_topic kubernetes-app: - name diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index f7ec539c5..4653f57c5 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -122,7 +122,16 @@ "title": "KafkaConnectorConfig", "type": "object" }, + "KafkaConnectorType": { + "enum": [ + "sink", + "source" + ], + "title": "KafkaConnectorType", + "type": "string" + }, "KafkaSinkConnector": { + "additionalProperties": true, "description": "Kafka sink connector model.", "properties": { "app": { @@ -133,6 +142,14 @@ ], "description": "Application-specific settings" }, + "connector_type": { + "allOf": [ + { + "$ref": "#/definitions/KafkaConnectorType" + } + ], + "default": "sink" + }, "from": { "anyOf": [ { @@ -190,21 +207,23 @@ "anyOf": [ { "$ref": "#/definitions/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-sink-connector", - "description": "Kafka sink connector model.", - "enum": [ - "kafka-sink-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", "title": "Version" @@ -219,6 +238,7 @@ "type": "object" }, "KafkaSourceConnector": { + "additionalProperties": true, "description": "Kafka source connector model.", "properties": { "app": { @@ -229,6 +249,14 @@ ], "description": "Application-specific settings" }, + "connector_type": { + "allOf": [ + { + "$ref": "#/definitions/KafkaConnectorType" + } + ], + "default": "source" + }, "from": { "anyOf": [ { @@ -299,21 +327,23 @@ "anyOf": [ { "$ref": "#/definitions/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-source-connector", - "description": "Kafka source connector model.", - "enum": [ - "kafka-source-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", "title": "Version" @@ -369,11 +399,15 @@ "type": "string" }, "repo_config": { - "allOf": [ + "anyOf": [ { "$ref": "#/definitions/HelmRepoConfig" + }, + { + "type": "null" } ], + "default": null, "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { @@ -411,6 +445,7 @@ "type": "object" }, "KubernetesAppConfig": { + "additionalProperties": true, "description": "Settings specific to Kubernetes Apps.", "properties": {}, "title": "KubernetesAppConfig", @@ -426,6 +461,7 @@ "type": "string" }, "ProducerApp": { + "additionalProperties": true, "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { @@ -481,21 +517,23 @@ "anyOf": [ { "$ref": "#/definitions/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "producer-app", - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", - "enum": [ - "producer-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", "title": "Version" @@ -510,6 +548,7 @@ "type": "object" }, "ProducerStreamsConfig": { + "additionalProperties": true, "description": "Kafka Streams settings specific to Producer.", "properties": { "brokers": { @@ -560,6 +599,7 @@ "type": "object" }, "ProducerValues": { + "additionalProperties": true, "description": "Settings specific to producers.", "properties": { "nameOverride": { @@ -658,6 +698,7 @@ "type": "object" }, "StreamsApp": { + "additionalProperties": true, "description": "StreamsApp component that configures a streams bootstrap app.", "properties": { "app": { @@ -720,21 +761,23 @@ "anyOf": [ { "$ref": "#/definitions/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "streams-app", - "description": "StreamsApp component that configures a streams bootstrap app.", - "enum": [ - "streams-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", "title": "Version" @@ -749,6 +792,7 @@ "type": "object" }, "StreamsAppAutoScaling": { + "additionalProperties": true, "description": "Kubernetes Event-driven Autoscaling config.", "properties": { "consumerGroup": { @@ -857,7 +901,12 @@ "title": "Nameoverride" }, "streams": { - "$ref": "#/definitions/StreamsConfig" + "allOf": [ + { + "$ref": "#/definitions/StreamsConfig" + } + ], + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -867,121 +916,12 @@ "type": "object" }, "StreamsConfig": { - "description": "Streams Bootstrap streams section.", - "properties": { - "brokers": { - "description": "Brokers", - "title": "Brokers", - "type": "string" - }, - "config": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Configuration", - "title": "Config", - "type": "object" - }, - "errorTopic": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Error topic", - "title": "Errortopic" - }, - "extraInputPatterns": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Extra input patterns", - "title": "Extrainputpatterns", - "type": "object" - }, - "extraInputTopics": { - "additionalProperties": { - "items": { - "type": "string" - }, - "type": "array" - }, - "default": {}, - "description": "Extra input topics", - "title": "Extrainputtopics", - "type": "object" - }, - "extraOutputTopics": { - "additionalProperties": { - "type": "string" - }, - "default": {}, - "description": "Extra output topics", - "title": "Extraoutputtopics", - "type": "object" - }, - "inputPattern": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Input pattern", - "title": "Inputpattern" - }, - "inputTopics": { - "default": [], - "description": "Input topics", - "items": { - "type": "string" - }, - "title": "Inputtopics", - "type": "array" - }, - "outputTopic": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "Output topic", - "title": "Outputtopic" - }, - "schemaRegistryUrl": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "description": "URL of the schema registry", - "title": "Schemaregistryurl" - } - }, - "required": [ - "brokers" - ], + "description": "Streams Bootstrap streams section.\n\n:param input_topics: Input topics, defaults to []\n:param input_pattern: Input pattern, defaults to None\n:param extra_input_topics: Extra input topics, defaults to {}\n:param extra_input_patterns: Extra input patterns, defaults to {}\n:param extra_output_topics: Extra output topics, defaults to {}\n:param output_topic: Output topic, defaults to None\n:param error_topic: Error topic, defaults to None\n:param config: Configuration, defaults to {}", "title": "StreamsConfig", "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics.", "properties": { "models": { diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 3fffd8c7b..ebd7353f2 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -44,8 +44,8 @@ KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name - for field_name, model in component.__fields__.items() # pyright: ignore[reportGeneralTypeIssues] - if not model.exclude + for field_name, field_info in component.model_fields.items() # pyright: ignore[reportGeneralTypeIssues] + if not field_info.exclude ] for component in KPOPS_COMPONENTS } diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 6ac7b2adc..b4d22fff9 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -6,10 +6,10 @@ from dataclasses import dataclass from pathlib import Path from textwrap import fill -from typing import Any +from typing import Any, get_args +from pydantic.fields import FieldInfo from pydantic_settings import BaseSettings -from pydantic.fields import FieldInfo from pytablewriter import MarkdownTableWriter from typer.models import ArgumentInfo, OptionInfo @@ -269,16 +269,18 @@ def fill_csv_pipeline_config(target: Path) -> None: ) -def collect_fields(settings: type[BaseSettings]) -> Iterator[FieldInfo]: +def collect_fields(settings: type[BaseSettings]) -> Iterator[tuple[str, FieldInfo]]: """Collect and yield all fields in a settings class. :param model: settings class :yield: all settings including nested ones in settings classes """ - for field in settings.model_fields.values(): - if field.annotation and issubclass(field_type := field.annotation, BaseSettings): - yield from collect_fields(field_type) - yield field + for field_name, field_value in settings.model_fields.items(): + if field_value.annotation: + for field_type in get_args(field_value.annotation): + if field_type and issubclass(field_type, BaseSettings): + yield from collect_fields(field_type) + yield field_name, field_value def fill_csv_cli(target: Path) -> None: diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index ad66dfb27..61538c1ce 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -1,7 +1,6 @@ from __future__ import annotations from pathlib import Path -from typing import TYPE_CHECKING, Any from pydantic import AliasChoices, Field from pydantic_settings import BaseSettings, PydanticBaseSettingsSource @@ -9,11 +8,6 @@ from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig -if TYPE_CHECKING: - from collections.abc import Callable - - from pydantic.env_settings import SettingsSourceCallable - ENV_PREFIX = "KPOPS_" @@ -34,7 +28,7 @@ class PipelineConfig(BaseSettings): """Pipeline configuration unrelated to the components.""" defaults_path: Path = Field( - default=Path("."), + default=Path(), examples=["defaults", "."], description="The path to the folder containing the defaults.yaml file and the environment defaults files. " "Paths can either be absolute or relative to `config.yaml`", @@ -121,9 +115,9 @@ def settings_customise_sources( file_secret_settings: PydanticBaseSettingsSource, ): return ( + env_settings, init_settings, YamlConfigSettingsSource(settings_cls), dotenv_settings, - env_settings, file_secret_settings, ) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 66100d7ef..0a155bb0d 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -20,7 +20,7 @@ class HelmDiffConfig(BaseModel): class RepoAuthFlags(DescConfigModel): - """Authorisation-related flags for `helm repo` + """Authorisation-related flags for `helm repo`. :param username: Username, defaults to None :param password: Password, defaults to None @@ -62,7 +62,7 @@ def to_command(self) -> list[str]: class HelmRepoConfig(DescConfigModel): - """Helm repository configuration + """Helm repository configuration. :param repository_name: Name of the Helm repository :param url: URL to the Helm repository @@ -79,7 +79,7 @@ class HelmRepoConfig(DescConfigModel): class HelmConfig(DescConfigModel): - """Global Helm configuration + """Global Helm configuration. :param context: Name of kubeconfig context (`--kube-context`) :param debug: Run Helm in Debug mode diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index aba8b2fc1..61e9def54 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -62,8 +62,7 @@ def create_connector( raise KafkaConnectError(response) def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: - """ - Get information about the connector. + """Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 9efeebdeb..c79dd8dc6 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -2,8 +2,7 @@ from typing import Any, Literal from pydantic import BaseModel, ConfigDict, Field, field_validator -from pydantic.json_schema import WithJsonSchema -from typing_extensions import Annotated, override +from typing_extensions import override from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot @@ -14,13 +13,10 @@ class KafkaConnectorType(str, Enum): class KafkaConnectorConfig(DescConfigModel): - """Settings specific to Kafka Connectors""" + """Settings specific to Kafka Connectors.""" connector_class: str - name: str | None = Field( - default=None, - exclude=True, - ) + name: str | None = Field(default=None) model_config = ConfigDict( extra="allow", alias_generator=to_dot, diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 17e780e58..e8007fd19 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -8,8 +8,6 @@ import typer from pydantic import AliasChoices, ConfigDict, Field -from pydantic.json_schema import WithJsonSchema -from typing_extensions import Annotated from kpops.cli.pipeline_config import PipelineConfig from kpops.component_handlers import ComponentHandlers diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index 9d277a4f7..e0430db80 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -22,7 +22,7 @@ class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): - """Kafka Streams config + """Kafka Streams config. :param brokers: Brokers :param schema_registry_url: URL of the schema registry, defaults to None diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index a03fe7dc0..c70dccb58 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -73,7 +73,8 @@ class KafkaConnector(PipelineComponent, ABC): default_factory=dict, description=describe_attr("resetter_values", __doc__), ) - connector_type: KafkaConnectorType = Field() + _connector_type: KafkaConnectorType + @field_validator("app") @classmethod def connector_config_should_have_component_name( @@ -89,8 +90,7 @@ def connector_config_should_have_component_name( msg = "Connector name should be the same as component name" raise ValueError(msg) app["name"] = component_name - app = KafkaConnectorConfig(**app) - return app + return KafkaConnectorConfig(**app) @cached_property def helm(self) -> Helm: @@ -190,7 +190,7 @@ def _run_connect_resetter( log.info( magentaify( - f"Connector Cleanup: deploy Connect {self.connector_type.value} resetter for {self.full_name}" + f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}" ) ) @@ -246,7 +246,7 @@ def _get_kafka_connect_resetter_values( brokers=self.config.brokers, **kwargs, ), - connector_type=self.connector_type.value, + connector_type=self._connector_type.value, name_override=self.full_name, ).model_dump(), **self.resetter_values, @@ -278,7 +278,7 @@ class KafkaSourceConnector(KafkaConnector): description=describe_attr("offset_topic", __doc__), ) - connector_type: KafkaConnectorType = KafkaConnectorType.SOURCE + _connector_type: KafkaConnectorType = KafkaConnectorType.SOURCE @override def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: @@ -323,7 +323,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None: class KafkaSinkConnector(KafkaConnector): """Kafka sink connector model.""" - connector_type: KafkaConnectorType = KafkaConnectorType.SINK + _connector_type: KafkaConnectorType = KafkaConnectorType.SINK @override def add_input_topics(self, topics: list[str]) -> None: diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index a6e1581cb..287cf0406 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -30,7 +30,7 @@ class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): - """Settings specific to Kubernetes Apps""" + """Settings specific to Kubernetes Apps.""" model_config = ConfigDict( extra="allow", diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 57e933f5d..bb82dc780 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -19,7 +19,7 @@ class InputTopicTypes(str, Enum): class FromTopic(DescConfigModel): - """Input topic + """Input topic. :param type: Topic type, defaults to None :param role: Custom identifier belonging to a topic; @@ -39,9 +39,10 @@ class FromTopic(DescConfigModel): @model_validator(mode="after") @classmethod def extra_topic_role(cls, values: Any) -> Any: - """Ensure that cls.role is used correctly, assign type if needed""" + """Ensure that cls.role is used correctly, assign type if needed.""" if values.type == InputTopicTypes.INPUT and values.role: - raise ValueError("Define role only if `type` is `pattern` or `None`") + msg = "Define role only if `type` is `pattern` or `None`" + raise ValueError(msg) return values @@ -49,7 +50,7 @@ def extra_topic_role(cls, values: Any) -> Any: class FromSection(DescConfigModel): - """Holds multiple input topics + """Holds multiple input topics. :param topics: Input topics :param components: Components to read from diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 5bd0e9e03..743caf2a2 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -19,7 +19,7 @@ class OutputTopicTypes(str, Enum): class TopicConfig(DescConfigModel): - """Configure an output topic + """Configure an output topic. :param type: Topic type :param key_schema: Key schema class name @@ -66,14 +66,15 @@ class TopicConfig(DescConfigModel): @model_validator(mode="after") def extra_topic_role(cls, values: Any) -> Any: - """Ensure that cls.role is used correctly, assign type if needed""" + """Ensure that cls.role is used correctly, assign type if needed.""" if values.type and values.role: - raise ValueError("Define `role` only if `type` is undefined") + msg = "Define `role` only if `type` is undefined" + raise ValueError(msg) return values class ToSection(DescConfigModel): - """Holds multiple output topics + """Holds multiple output topics. :param topics: Output topics :param models: Data models diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 8c8a7b292..7be212300 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -1,8 +1,9 @@ from __future__ import annotations -from pydantic import AliasChoices, ConfigDict, Field from abc import ABC +from pydantic import AliasChoices, ConfigDict, Field + from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, ) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index bc5ced4e6..06c6bd8b2 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,4 +1,3 @@ -from collections.abc import Mapping, Set from typing import Any from pydantic import ConfigDict, Field, model_serializer @@ -72,23 +71,22 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) - # @model_serializer(mode="wrap", when_used="always") - # def serialize_model(self, handler) -> dict[str, Any]: - # result = handler(self) - # # if dict(result.items()).get("extraInputTopics"): - # # breakpoint() - # extra_fields = set() - # if self.model_extra is not None: - # extra_fields = set(self.model_extra.keys()) - # fields = extra_fields.union(self.model_fields_set) - # filtered_result_extra_set = { - # k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) - # } - # return filtered_result_extra_set + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler) -> dict[str, Any]: + result = handler(self) + # if dict(result.items()).get("extraInputTopics"): + # breakpoint() + extra_fields = set() + if self.model_extra is not None: + extra_fields = set(self.model_extra.keys()) + fields = extra_fields.union(self.model_fields_set) + return { + k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) + } class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): - """Kubernetes Event-driven Autoscaling config + """Kubernetes Event-driven Autoscaling config. :param enabled: Whether to enable auto-scaling using KEDA., defaults to False :param consumer_group: Name of the consumer group used for checking the diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 6587d76ab..49ecdd4d5 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,14 +1,12 @@ -import json import inspect +import json import logging from abc import ABC -from collections.abc import Sequence from enum import Enum -from typing import Annotated, Any, Literal, Union +from typing import Literal -from pydantic import Field, TypeAdapter +from pydantic import Field from pydantic.json_schema import model_json_schema, models_json_schema -from pydantic_core import to_json from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes @@ -47,7 +45,7 @@ def _add_components( components_module: str, components: tuple[type[PipelineComponent], ...] | None = None, ) -> tuple[type[PipelineComponent], ...]: - """Add components to a components tuple + """Add components to a components tuple. If an empty tuple is provided or it is not provided at all, the components types from the given module are 'tupled' @@ -84,14 +82,15 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled - components: tuple[type[PipelineComponent], ...] = tuple() + components: tuple[type[PipelineComponent], ...] = () if include_stock_components: components = _add_components("kpops.components") # Add custom components if provided if components_module: components = _add_components(components_module, components) if not components: - raise RuntimeError("No valid components found.") + msg = "No valid components found." + raise RuntimeError(msg) # re-assign component type as Literal to work as discriminator for component in components: @@ -153,6 +152,6 @@ def gen_pipeline_schema( def gen_config_schema() -> None: - """Generate a json schema from the model of pipeline config""" + """Generate a json schema from the model of pipeline config.""" schema = model_json_schema(PipelineConfig) print(json.dumps(schema, indent=4, sort_keys=True)) diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py index d234bd79d..184115e1b 100644 --- a/tests/utils/test_doc_gen.py +++ b/tests/utils/test_doc_gen.py @@ -22,7 +22,9 @@ def test_collect_fields(self): Ellipsis, Ellipsis, ] - actual = [field.field_info.default for field in collect_fields(ParentSettings)] + actual = [ + field_value.default for _, field_value in collect_fields(ParentSettings) + ] assert actual == expected @pytest.mark.parametrize( From adde6ca94a8a55ca36e5a1285becc357f864f990 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 10 Oct 2023 11:15:20 +0300 Subject: [PATCH 41/96] fix: serialization WIP --- .../streams_bootstrap/streams/model.py | 47 +++++++++++++++++-- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 06c6bd8b2..6b5982a5f 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,6 +1,6 @@ from typing import Any -from pydantic import ConfigDict, Field, model_serializer +from pydantic import ConfigDict, Field, SerializationInfo, model_serializer from pydantic.alias_generators import to_snake from kpops.components.base_components.base_defaults_component import deduplicate @@ -72,17 +72,58 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: ) @model_serializer(mode="wrap", when_used="always") - def serialize_model(self, handler) -> dict[str, Any]: + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + # class _SerInfoClone: + # def __init__( + # self, + # include, + # exclude, + # mode, + # by_alias, + # exclude_unset, + # exclude_default, + # exclude_none, + # round_trip, + # ): + # self.include = include + # self.exclude = exclude + # self.mode = mode + # self.by_alias = by_alias + # self.exclude_unset = exclude_unset + # self.exclude_default = exclude_default + # self.exclude_none = exclude_none + # self.round_trip = round_trip + + # info2: _SerInfoClone = _SerInfoClone( + # info.include, + # info.exclude, + # info.mode, + # info.by_alias, + # True, + # True, + # True, + # info.round_trip, + # ) + # breakpoint() + # return handler(self, info2) result = handler(self) # if dict(result.items()).get("extraInputTopics"): # breakpoint() extra_fields = set() if self.model_extra is not None: extra_fields = set(self.model_extra.keys()) + # fields = extra_fields.union(self.model_fields_set) fields = extra_fields.union(self.model_fields_set) - return { + if self.extra_input_topics: + fields.add("extra_input_topics") + if self.extra_input_patterns: + fields.add("extra_input_patterns") + if self.extra_output_topics: + fields.add("extra_output_topics") + filtered_result_extra_set = { k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) } + return filtered_result_extra_set class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): From b09633fdc8a0f246e7dc67ea5ebcd6050f298e7d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 11 Oct 2023 15:13:31 +0300 Subject: [PATCH 42/96] misc fixes --- kpops/cli/pipeline_config.py | 8 ++- kpops/cli/settings_sources.py | 4 ++ .../streams_bootstrap/streams/model.py | 63 +++++-------------- tests/components/test_kafka_connector.py | 2 +- .../pipeline-with-env-defaults/defaults.yaml | 1 - tests/pipeline/test_pipeline.py | 3 +- 6 files changed, 27 insertions(+), 54 deletions(-) diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 61538c1ce..468ca3bac 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -1,9 +1,10 @@ from __future__ import annotations from pathlib import Path +from typing_extensions import override from pydantic import AliasChoices, Field -from pydantic_settings import BaseSettings, PydanticBaseSettingsSource +from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig @@ -48,6 +49,7 @@ class PipelineConfig(BaseSettings): "broker1:9092,broker2:9092,broker3:9092", ], description="The comma separated Kafka brokers address.", + validation_alias=AliasChoices("brokers", f"{ENV_PREFIX}kafka_brokers"), ) defaults_filename_prefix: str = Field( default="defaults", @@ -105,6 +107,9 @@ class PipelineConfig(BaseSettings): description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) + model_config = SettingsConfigDict(env_prefix='KPOPS_') + + @override @classmethod def settings_customise_sources( cls, @@ -114,6 +119,7 @@ def settings_customise_sources( dotenv_settings: PydanticBaseSettingsSource, file_secret_settings: PydanticBaseSettingsSource, ): + # breakpoint() return ( env_settings, init_settings, diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index d07f080ef..77cf699fe 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -1,5 +1,6 @@ from pathlib import Path from typing import Any +from typing_extensions import override from pydantic.fields import FieldInfo from pydantic_settings import PydanticBaseSettingsSource @@ -12,6 +13,7 @@ class YamlConfigSettingsSource(PydanticBaseSettingsSource): path_to_config = Path("config.yaml") + @override def get_field_value( self, field: FieldInfo, @@ -24,11 +26,13 @@ def get_field_value( return field_value, field_name, False return None, field_name, False + @override def prepare_field_value( self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool ) -> Any: return value + @override def __call__(self) -> dict[str, Any]: d: dict[str, Any] = {} diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 6b5982a5f..7cf1bd835 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,7 +1,8 @@ from typing import Any -from pydantic import ConfigDict, Field, SerializationInfo, model_serializer +from pydantic import ConfigDict, Field, SerializationInfo, SerializeAsAny, model_serializer from pydantic.alias_generators import to_snake +from pydantic_core import PydanticUndefined from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( @@ -73,58 +74,22 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: @model_serializer(mode="wrap", when_used="always") def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: - # class _SerInfoClone: - # def __init__( - # self, - # include, - # exclude, - # mode, - # by_alias, - # exclude_unset, - # exclude_default, - # exclude_none, - # round_trip, - # ): - # self.include = include - # self.exclude = exclude - # self.mode = mode - # self.by_alias = by_alias - # self.exclude_unset = exclude_unset - # self.exclude_default = exclude_default - # self.exclude_none = exclude_none - # self.round_trip = round_trip - - # info2: _SerInfoClone = _SerInfoClone( - # info.include, - # info.exclude, - # info.mode, - # info.by_alias, - # True, - # True, - # True, - # info.round_trip, - # ) - # breakpoint() - # return handler(self, info2) result = handler(self) # if dict(result.items()).get("extraInputTopics"): # breakpoint() - extra_fields = set() - if self.model_extra is not None: - extra_fields = set(self.model_extra.keys()) - # fields = extra_fields.union(self.model_fields_set) - fields = extra_fields.union(self.model_fields_set) - if self.extra_input_topics: - fields.add("extra_input_topics") - if self.extra_input_patterns: - fields.add("extra_input_patterns") - if self.extra_output_topics: - fields.add("extra_output_topics") - filtered_result_extra_set = { - k: v for k, v in result.items() if ((to_snake(k) in fields) or k in fields) + default_fields = { + field_name: field_info.default + for field_name, field_info + in self.model_fields.items() } - return filtered_result_extra_set - + filtered_result = { + k: v + for k, v + in result.items() + if (v != default_fields.get(k) and v is not None) + and (v != default_fields.get(to_snake(k)) and v is not None) + } + return filtered_result class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config. diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 2adf867da..155d9686a 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -7,7 +7,7 @@ from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig -from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig +from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig, KafkaConnectorType from kpops.components.base_components.kafka_connector import KafkaConnector DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml index a1243a3c8..cee15c9a5 100644 --- a/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml +++ b/tests/pipeline/resources/pipeline-with-env-defaults/defaults.yaml @@ -35,6 +35,5 @@ kafka-connector: key.ignore: "false" linger.ms: "5000" max.buffered.records: "20000" - name: "sink-connector" read.timeout.ms: "120000" tasks.max: "1" diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 433960e74..c49c09a72 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -460,8 +460,7 @@ def test_default_config(self, snapshot: SnapshotTest): def test_env_vars_precedence_over_config( self, - monkeypatch: pytest.MonkeyPatch, - snapshot: SnapshotTest, + monkeypatch: pytest.MonkeyPatch ): monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker") From f4ee855c2ce04409463ae80b4e663a0d2096a51a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 11 Oct 2023 15:22:32 +0300 Subject: [PATCH 43/96] test: remove leftover from playground --- tests/test_model_serialization.py | 110 ------------------------------ 1 file changed, 110 deletions(-) delete mode 100644 tests/test_model_serialization.py diff --git a/tests/test_model_serialization.py b/tests/test_model_serialization.py deleted file mode 100644 index 61e13bfeb..000000000 --- a/tests/test_model_serialization.py +++ /dev/null @@ -1,110 +0,0 @@ -from pathlib import Path -from unittest.mock import MagicMock -import pytest -import yaml -from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig -from kpops.component_handlers import ComponentHandlers -from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig -from kpops.components.streams_bootstrap.streams.model import StreamsConfig, StreamsAppConfig -from kpops.components.streams_bootstrap.streams.streams_app import StreamsApp - -@pytest.fixture() -def streams_config() -> StreamsConfig: - return StreamsConfig( - brokers="", - extra_input_patterns={ - "eip1k": "eip1v", - "eip2k": "eip2v", - }, - extra_input_topics={ - "eit1k": ["eit1v"], - "eit2k": ["eit2v"], - }, - ) - -@pytest.fixture() -def streams_app_config(streams_config: StreamsConfig) -> StreamsAppConfig: - return StreamsAppConfig(streams=streams_config) - -STREAMS_APP_NAME = "test-streams-app-with-long-name-0123456789abcdefghijklmnop" -STREAMS_APP_CLEAN_NAME = "test-streams-app-with-long-name-0123456789abcd-clean" -DEFAULTS_PATH: Path = Path(__file__).parent - -@pytest.fixture -def handlers() -> ComponentHandlers: - return ComponentHandlers( - schema_handler=MagicMock(), - connector_handler=MagicMock(), - topic_handler=MagicMock(), - ) - -@pytest.fixture -def config() -> PipelineConfig: - return PipelineConfig( - defaults_path=DEFAULTS_PATH, - environment="development", - topic_name_config=TopicNameConfig( - default_error_topic_name="${component_type}-error-topic", - default_output_topic_name="${component_type}-output-topic", - ), - helm_diff_config=HelmDiffConfig(), - ) - -@pytest.fixture() -def streams_app(streams_app_config: StreamsAppConfig, config: PipelineConfig, handlers: ComponentHandlers) -> StreamsApp: - return StreamsApp( - name=STREAMS_APP_NAME, - namespace="namespace", - config=config, - handlers=handlers, - app=streams_app_config, - ) - -def test_streams_config(streams_config: StreamsConfig): - assert streams_config.model_dump() == { - "brokers": "", - "extra_input_patterns": { - "eip1k": "eip1v", - "eip2k": "eip2v", - }, - "extra_input_topics": { - "eit1k": ["eit1v"], - "eit2k": ["eit2v"], - }, - } - -def test_streams_app_config(streams_app_config: StreamsAppConfig): - assert streams_app_config.model_dump() == { - "autoscaling": None, - "name_override": None, - "streams": { - "brokers": "", - "extra_input_patterns": { - "eip1k": "eip1v", - "eip2k": "eip2v", - }, - "extra_input_topics": { - "eit1k": ["eit1v"], - "eit2k": ["eit2v"], - }, - } - } - -def test_streams_app(streams_app): - assert streams_app.model_dump() == { - "app": { - "autoscaling": None, - "name_override": None, - "streams": { - "brokers": "", - "extra_input_patterns": { - "eip1k": "eip1v", - "eip2k": "eip2v", - }, - "extra_input_topics": { - "eit1k": ["eit1v"], - "eit2k": ["eit2v"], - }, - } - }, - } From e36f62226a43c5efda24607ef9e0365b203a9480 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 11 Oct 2023 15:39:17 +0300 Subject: [PATCH 44/96] test: found bug --- tests/pipeline/test_pipeline.py | 2 +- tests/utils/test_doc_gen.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index c49c09a72..712bccd7f 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -271,7 +271,7 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") - def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest): + def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest): # INTERFERES WITH test_with_env_defaults """Parse Connector topics from previous component to section.""" result = runner.invoke( app, diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py index 184115e1b..04044dc92 100644 --- a/tests/utils/test_doc_gen.py +++ b/tests/utils/test_doc_gen.py @@ -1,5 +1,6 @@ from pathlib import Path from typing import Any +from pydantic_core import PydanticUndefined import pytest @@ -18,9 +19,8 @@ class TestEnvDocGen: def test_collect_fields(self): expected: list[Any] = [ "not_nested_field", - "attr", - Ellipsis, - Ellipsis, + PydanticUndefined, + PydanticUndefined, ] actual = [ field_value.default for _, field_value in collect_fields(ParentSettings) From a4f508d7a543c1029bd83af7020e96e788c9af75 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 11 Oct 2023 21:34:54 +0300 Subject: [PATCH 45/96] fix: pydantic deprecation warnings --- kpops/components/base_components/kubernetes_app.py | 4 ++-- tests/cli/test_schema_generation.py | 2 +- .../kafka_connect/test_connect_wrapper.py | 8 ++++---- tests/pipeline/test_components/components.py | 2 +- tests/utils/test_dict_ops.py | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 287cf0406..6795fabf7 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -105,7 +105,7 @@ def helm_chart(self) -> str: @property def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} + auth_flags = self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} return HelmFlags( **auth_flags, version=self.version, @@ -134,7 +134,7 @@ def template(self) -> None: @property def deploy_flags(self) -> HelmUpgradeInstallFlags: """Return flags for Helm upgrade install command.""" - return HelmUpgradeInstallFlags(**self.helm_flags.dict()) + return HelmUpgradeInstallFlags(**self.helm_flags.model_dump()) @override def deploy(self, dry_run: bool) -> None: diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index cbb855d14..d860a0b9c 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -26,7 +26,7 @@ # type is inherited from PipelineComponent class EmptyPipelineComponent(PipelineComponent): class Config: - anystr_strip_whitespace = True + str_strip_whitespace = True # abstract component inheriting from ABC should be excluded diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 8e60d92a7..3c200d5e2 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -79,7 +79,7 @@ def test_should_create_post_requests_for_given_connector_configuration( headers=HEADERS, json={ "name": "test-connector", - "config": KafkaConnectorConfig(**configs).dict(), + "config": KafkaConnectorConfig(**configs).model_dump(), }, ) @@ -249,7 +249,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc mock_put.assert_called_with( url=f"{HOST}/connectors/{connector_name}/config", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**configs).dict(), + json=KafkaConnectorConfig(**configs).model_dump(), ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") @@ -469,7 +469,7 @@ def test_should_create_correct_validate_connector_config_request( mock_put.assert_called_with( url=f"{HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=connector_config.dict(), + json=connector_config.model_dump(), ) @patch("httpx.put") @@ -491,7 +491,7 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( mock_put.assert_called_with( url=f"{HOST}/connector-plugins/{connector_name}/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**{"name": connector_name, **configs}).dict(), + json=KafkaConnectorConfig(**{"name": connector_name, **configs}).model_dump(), ) def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 86e2c8b8e..84698c0b4 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -71,7 +71,7 @@ def inflate(self) -> list[PipelineComponent]: f"{self.full_name}-" + "${component_name}" ): TopicConfig(type=OutputTopicTypes.OUTPUT) } - ).dict(), + ).model_dump(), ) inflate_steps.append(streams_app) diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py index 1ea410770..224934d87 100644 --- a/tests/utils/test_dict_ops.py +++ b/tests/utils/test_dict_ops.py @@ -70,7 +70,7 @@ class SimpleModel(BaseModel): }, }, problems=99, - ).json() + ).model_dump_json() ) existing_substitution = { "key1": "Everything", From e236873d9d5a727aefb96160bbf51be983886a27 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 11 Oct 2023 22:07:33 +0300 Subject: [PATCH 46/96] chore: lint and format --- .../dependencies/kpops_structure.yaml | 3 -- docs/docs/schema/pipeline.json | 36 +++++++------------ kpops/cli/pipeline_config.py | 10 ++++-- kpops/cli/settings_sources.py | 4 +-- .../kafka_connect/connect_wrapper.py | 1 + .../base_components/kubernetes_app.py | 4 ++- .../streams_bootstrap/streams/model.py | 15 ++++---- kpops/utils/gen_schema.py | 6 ++-- .../kafka_connect/test_connect_wrapper.py | 4 ++- tests/components/test_kafka_connector.py | 2 +- tests/pipeline/test_pipeline.py | 9 +++-- tests/utils/test_doc_gen.py | 2 +- 12 files changed, 43 insertions(+), 53 deletions(-) diff --git a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml index 5566d7ba3..21af5971c 100644 --- a/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml +++ b/docs/docs/resources/pipeline-components/dependencies/kpops_structure.yaml @@ -18,7 +18,6 @@ kpops_components_fields: - repo_config - version - resetter_values - - connector_type kafka-sink-connector: - name - prefix @@ -29,7 +28,6 @@ kpops_components_fields: - repo_config - version - resetter_values - - connector_type kafka-source-connector: - name - prefix @@ -40,7 +38,6 @@ kpops_components_fields: - repo_config - version - resetter_values - - connector_type - offset_topic kubernetes-app: - name diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 4653f57c5..21904b96b 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -114,6 +114,18 @@ "connector.class": { "title": "Connector.Class", "type": "string" + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "title": "Name" } }, "required": [ @@ -122,14 +134,6 @@ "title": "KafkaConnectorConfig", "type": "object" }, - "KafkaConnectorType": { - "enum": [ - "sink", - "source" - ], - "title": "KafkaConnectorType", - "type": "string" - }, "KafkaSinkConnector": { "additionalProperties": true, "description": "Kafka sink connector model.", @@ -142,14 +146,6 @@ ], "description": "Application-specific settings" }, - "connector_type": { - "allOf": [ - { - "$ref": "#/definitions/KafkaConnectorType" - } - ], - "default": "sink" - }, "from": { "anyOf": [ { @@ -249,14 +245,6 @@ ], "description": "Application-specific settings" }, - "connector_type": { - "allOf": [ - { - "$ref": "#/definitions/KafkaConnectorType" - } - ], - "default": "source" - }, "from": { "anyOf": [ { diff --git a/kpops/cli/pipeline_config.py b/kpops/cli/pipeline_config.py index 468ca3bac..1205c2401 100644 --- a/kpops/cli/pipeline_config.py +++ b/kpops/cli/pipeline_config.py @@ -1,10 +1,14 @@ from __future__ import annotations from pathlib import Path -from typing_extensions import override from pydantic import AliasChoices, Field -from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict +from pydantic_settings import ( + BaseSettings, + PydanticBaseSettingsSource, + SettingsConfigDict, +) +from typing_extensions import override from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig @@ -107,7 +111,7 @@ class PipelineConfig(BaseSettings): description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - model_config = SettingsConfigDict(env_prefix='KPOPS_') + model_config = SettingsConfigDict(env_prefix="KPOPS_") @override @classmethod diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index 77cf699fe..d93304c58 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -1,9 +1,9 @@ from pathlib import Path from typing import Any -from typing_extensions import override from pydantic.fields import FieldInfo from pydantic_settings import PydanticBaseSettingsSource +from typing_extensions import override from kpops.utils.yaml_loading import load_yaml_file @@ -17,7 +17,7 @@ class YamlConfigSettingsSource(PydanticBaseSettingsSource): def get_field_value( self, field: FieldInfo, - field_name: str, # noqa: + field_name: str, ) -> tuple[Any, str, bool]: if self.path_to_config.exists() and isinstance( (file_content_yaml := load_yaml_file(self.path_to_config)), dict diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 61e9def54..643371eef 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -63,6 +63,7 @@ def create_connector( def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: """Get information about the connector. + API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 6795fabf7..7b6280ac7 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -105,7 +105,9 @@ def helm_chart(self) -> str: @property def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} + auth_flags = ( + self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} + ) return HelmFlags( **auth_flags, version=self.version, diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 7cf1bd835..a6811702a 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,8 +1,7 @@ from typing import Any -from pydantic import ConfigDict, Field, SerializationInfo, SerializeAsAny, model_serializer +from pydantic import ConfigDict, Field, SerializationInfo, model_serializer from pydantic.alias_generators import to_snake -from pydantic_core import PydanticUndefined from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( @@ -79,17 +78,15 @@ def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: # breakpoint() default_fields = { field_name: field_info.default - for field_name, field_info - in self.model_fields.items() + for field_name, field_info in self.model_fields.items() } - filtered_result = { + return { k: v - for k, v - in result.items() + for k, v in result.items() if (v != default_fields.get(k) and v is not None) - and (v != default_fields.get(to_snake(k)) and v is not None) + and (v != default_fields.get(to_snake(k)) and v is not None) } - return filtered_result + class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config. diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 49ecdd4d5..ff67acf95 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -5,7 +5,7 @@ from enum import Enum from typing import Literal -from pydantic import Field +from pydantic.fields import FieldInfo from pydantic.json_schema import model_json_schema, models_json_schema from kpops.cli.pipeline_config import PipelineConfig @@ -94,9 +94,9 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: - component.model_fields["type"] = Field( + component.model_fields["type"] = FieldInfo( alias="type", - type_=Literal[component.type], # type: ignore + annotation=Literal[component.type], # type: ignore[reportGeneralTypeIssues] default=component.type, # final=True, title="Component type", diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 3c200d5e2..4b5d9facd 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -491,7 +491,9 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( mock_put.assert_called_with( url=f"{HOST}/connector-plugins/{connector_name}/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**{"name": connector_name, **configs}).model_dump(), + json=KafkaConnectorConfig( + **{"name": connector_name, **configs} + ).model_dump(), ) def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): diff --git a/tests/components/test_kafka_connector.py b/tests/components/test_kafka_connector.py index 3bd19fe79..98771d4af 100644 --- a/tests/components/test_kafka_connector.py +++ b/tests/components/test_kafka_connector.py @@ -8,7 +8,7 @@ from kpops.cli.pipeline_config import PipelineConfig, TopicNameConfig from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.helm_wrapper.model import HelmDiffConfig -from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig, KafkaConnectorType +from kpops.component_handlers.kafka_connect.model import KafkaConnectorConfig from kpops.components.base_components.kafka_connector import KafkaConnector DEFAULTS_PATH = Path(__file__).parent / "resources" diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 712bccd7f..9a782dcd8 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -271,7 +271,9 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") - def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest): # INTERFERES WITH test_with_env_defaults + def test_kafka_connect_sink_weave_from_topics( + self, snapshot: SnapshotTest + ): # INTERFERES WITH test_with_env_defaults """Parse Connector topics from previous component to section.""" result = runner.invoke( app, @@ -458,10 +460,7 @@ def test_default_config(self, snapshot: SnapshotTest): snapshot.assert_match(enriched_pipeline, "test-pipeline") - def test_env_vars_precedence_over_config( - self, - monkeypatch: pytest.MonkeyPatch - ): + def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): monkeypatch.setenv(name="KPOPS_KAFKA_BROKERS", value="env_broker") result = runner.invoke( diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py index 04044dc92..308723386 100644 --- a/tests/utils/test_doc_gen.py +++ b/tests/utils/test_doc_gen.py @@ -1,8 +1,8 @@ from pathlib import Path from typing import Any -from pydantic_core import PydanticUndefined import pytest +from pydantic_core import PydanticUndefined from hooks.gen_docs.gen_docs_env_vars import ( EnvVarAttrs, From a6b10e33f3589b5e96d1a6ead11d449110148fda Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 19 Oct 2023 18:36:46 +0300 Subject: [PATCH 47/96] refactor: Custom schema gen WIP --- kpops/utils/gen_schema.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index ff67acf95..6f1799124 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -6,7 +6,7 @@ from typing import Literal from pydantic.fields import FieldInfo -from pydantic.json_schema import model_json_schema, models_json_schema +from pydantic.json_schema import model_json_schema, models_json_schema, GenerateJsonSchema from kpops.cli.pipeline_config import PipelineConfig from kpops.cli.registry import _find_classes @@ -18,6 +18,9 @@ class SchemaScope(str, Enum): PIPELINE = "pipeline" CONFIG = "config" +class MultiComponentGenerateJsonSchema(GenerateJsonSchema): + ... + log = logging.getLogger("") From 542f8a18008d805ee23f1f002f111be3a54676fb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 21:16:31 +0300 Subject: [PATCH 48/96] test: update snapshot of example --- tests/pipeline/snapshots/snap_test_example.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 95d63ab70..406679c8b 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -23,12 +23,12 @@ 'replicaCount': 1, 'schedule': '0 12 * * *', 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, 'suspend': True }, @@ -74,12 +74,12 @@ 'replicaCount': 1, 'schedule': '0 12 * * *', 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, 'suspend': True }, @@ -129,14 +129,14 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-transaction-joiner-dead-letter-topic', 'inputTopics': [ 'bakdata-atm-fraud-detection-transaction-avro-producer-topic' ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'name': 'transaction-joiner', @@ -191,14 +191,14 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-fraud-detector-dead-letter-topic', 'inputTopics': [ 'bakdata-atm-fraud-detection-transaction-joiner-topic' ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'name': 'fraud-detector', @@ -253,7 +253,7 @@ }, 'replicaCount': 1, 'streams': { - 'brokers': 'http://k8kafka-cp-kafka-headless.${NAMESPACE}.svc.cluster.local:9092', + 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'errorTopic': 'bakdata-atm-fraud-detection-account-linker-dead-letter-topic', 'extraInputTopics': { 'accounts': [ @@ -265,7 +265,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.${NAMESPACE}.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'from': { From db7abbfd25b816d9f182a8bf353c9e8d2a905a99 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 21:21:10 +0300 Subject: [PATCH 49/96] test: update test pipeline snapshot --- .../pipeline/snapshots/snap_test_pipeline.py | 74 +++++++++---------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index c2e339fbc..d1e6f1776 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -25,7 +25,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-custom-config-app1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -67,7 +67,7 @@ 'resources-custom-config-app1' ], 'outputTopic': 'resources-custom-config-app2', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -120,7 +120,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -187,7 +187,7 @@ 'resources-pipeline-with-inflate-scheduled-producer' ], 'outputTopic': 'resources-pipeline-with-inflate-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -262,7 +262,7 @@ 'resources-pipeline-with-inflate-converter' ], 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'should-inflate', @@ -359,7 +359,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'should-inflate-inflated-streams-app', @@ -413,7 +413,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -506,7 +506,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-first-pipeline-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -573,7 +573,7 @@ 'resources-first-pipeline-scheduled-producer' ], 'outputTopic': 'resources-first-pipeline-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -648,7 +648,7 @@ 'resources-first-pipeline-converter' ], 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', @@ -698,7 +698,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'out', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'account-producer', @@ -744,7 +744,7 @@ 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', 'inputPattern': '.*', 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -804,7 +804,7 @@ 'inputTopics': [ 'example-output' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -863,7 +863,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -926,7 +926,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-pipeline-with-envs-input-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'input-producer', @@ -993,7 +993,7 @@ 'resources-pipeline-with-envs-input-producer' ], 'outputTopic': 'resources-pipeline-with-envs-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -1068,7 +1068,7 @@ 'resources-pipeline-with-envs-converter' ], 'outputTopic': 'resources-pipeline-with-envs-filter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'filter', @@ -1127,7 +1127,7 @@ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' }, 'suspend': True }, @@ -1157,7 +1157,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-read-from-component-producer1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'producer1', @@ -1192,7 +1192,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-read-from-component-producer2', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'producer2', @@ -1247,7 +1247,7 @@ 'resources-read-from-component-producer2' ], 'outputTopic': 'resources-read-from-component-inflate-step', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step', @@ -1344,7 +1344,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-inflated-streams-app', @@ -1407,7 +1407,7 @@ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-without-prefix', @@ -1504,7 +1504,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-without-prefix-inflated-streams-app', @@ -1552,7 +1552,7 @@ 'resources-read-from-component-producer1' ], 'outputTopic': 'resources-read-from-component-consumer1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1609,7 +1609,7 @@ 'resources-read-from-component-producer1', 'resources-read-from-component-consumer1' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1664,7 +1664,7 @@ 'resources-read-from-component-producer1', 'resources-read-from-component-producer2' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1718,7 +1718,7 @@ 'inputTopics': [ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1769,7 +1769,7 @@ 'inputTopics': [ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1832,7 +1832,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-component-type-substitution-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -1899,7 +1899,7 @@ 'resources-component-type-substitution-scheduled-producer' ], 'outputTopic': 'resources-component-type-substitution-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -1981,7 +1981,7 @@ 'resources-component-type-substitution-converter' ], 'outputTopic': 'resources-component-type-substitution-filter-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'filter-app', @@ -2039,7 +2039,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -2081,7 +2081,7 @@ 'app1-test-topic' ], 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -2136,7 +2136,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -2178,7 +2178,7 @@ 'app1-test-topic' ], 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -2231,7 +2231,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { From 78001a2f4cbdd2463a45c8fa542a6c48e19bc883 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 21:23:37 +0300 Subject: [PATCH 50/96] fix: adapt to new AnyHttpUrl and pydantic v2 validation --- .../resources/variables/config_env_vars.env | 30 +++++++---- .../resources/variables/config_env_vars.md | 24 +++++---- docs/docs/schema/pipeline.json | 52 +++++++++++-------- .../kafka_connect/connect_wrapper.py | 10 ++-- .../schema_handler/schema_handler.py | 2 +- .../component_handlers/topic/proxy_wrapper.py | 14 ++--- kpops/config.py | 16 ++---- tests/cli/test_kpops_config.py | 16 +++--- .../topic/test_proxy_wrapper.py | 3 +- 9 files changed, 94 insertions(+), 73 deletions(-) diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index db6a641af..1e38d4d9d 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -13,19 +13,27 @@ defaults_path=. # The environment you want to generate and deploy the pipeline to. # Suffix your environment files with this value (e.g. # defaults_development.yaml for environment=development). -KPOPS_ENVIRONMENT # No default value, required +environment=PydanticUndefined # kafka_brokers # The comma separated Kafka brokers address. -KPOPS_KAFKA_BROKERS # No default value, required -# url -# Address of the Schema Registry. -KPOPS_SCHEMA_REGISTRY_URL=http://localhost:8081 -# url -# Address of the Kafka REST Proxy. -KPOPS_KAFKA_REST_URL=http://localhost:8082 -# url -# Address of Kafka Connect. -KPOPS_KAFKA_CONNECT_URL=http://localhost:8083 +kafka_brokers=PydanticUndefined +# defaults_filename_prefix +# The name of the defaults file and the prefix of the defaults +# environment file. +defaults_filename_prefix=defaults +# topic_name_config +# Configure the topic name variables you can use in the pipeline +# definition. +topic_name_config=default_output_topic_name='${pipeline_name}-${component_name}' default_error_topic_name='${pipeline_name}-${component_name}-error' +# schema_registry +# Configuration for Schema Registry. +schema_registry=enabled=False url=Url('http://localhost:8081/') +# kafka_rest +# Configuration for Kafka REST Proxy. +kafka_rest=url=Url('http://localhost:8082/') +# kafka_connect +# Configuration for Kafka Connect. +kafka_connect=url=Url('http://localhost:8083/') # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 0fac41eb1..9c1c704a9 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,11 +1,17 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value | Required | Description | Setting name | -| ------------------------- | --------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------- | -| KPOPS_ENVIRONMENT | | True | The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development). | environment | -| KPOPS_KAFKA_BROKERS | | True | The comma separated Kafka brokers address. | kafka_brokers | -| KPOPS_SCHEMA_REGISTRY_URL | http://localhost:8081 | False | Address of the Schema Registry. | url | -| KPOPS_KAFKA_REST_URL | http://localhost:8082 | False | Address of the Kafka REST Proxy. | url | -| KPOPS_KAFKA_CONNECT_URL | http://localhost:8083 | False | Address of Kafka Connect. | url | -| KPOPS_TIMEOUT | 300 | False | The timeout in seconds that specifies when actions like deletion or deploy timeout. | timeout | -| KPOPS_RETAIN_CLEAN_JOBS | False | False | Whether to retain clean up jobs in the cluster or uninstall the, after completion. | retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|------------------------|----------------------------------------------------------------------------------------------------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------| +|defaults_path |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | +|environment |PydanticUndefined |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|kafka_brokers |PydanticUndefined |False |The comma separated Kafka brokers address. |kafka_brokers | +|defaults_filename_prefix|defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix| +|topic_name_config |default_output_topic_name='${pipeline_name}-${component_name}' default_error_topic_name='${pipeline_name}-${component_name}-error'|False |Configure the topic name variables you can use in the pipeline definition. |topic_name_config | +|schema_registry |enabled=False url=Url('http://localhost:8081/') |False |Configuration for Schema Registry. |schema_registry | +|kafka_rest |url=Url('http://localhost:8082/') |False |Configuration for Kafka REST Proxy. |kafka_rest | +|kafka_connect |url=Url('http://localhost:8083/') |False |Configuration for Kafka Connect. |kafka_connect | +|timeout |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|create_namespace |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | +|helm_config |context=None debug=False api_version=None |False |Global flags for Helm. |helm_config | +|helm_diff_config |ignore=set() |False |Configure Helm Diff. |helm_diff_config | +|retain_clean_jobs |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 468cecae0..85c03a513 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -60,6 +60,7 @@ "type": "object" }, "HelmApp": { + "additionalProperties": true, "description": "Kubernetes app managed through Helm with an associated Helm chart.", "properties": { "app": { @@ -68,15 +69,18 @@ "$ref": "#/definitions/KubernetesAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { "$ref": "#/definitions/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -97,36 +101,41 @@ "type": "string" }, "repo_config": { - "allOf": [ + "anyOf": [ { "$ref": "#/definitions/HelmRepoConfig" + }, + { + "type": "null" } ], - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "default": null, + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ { "$ref": "#/definitions/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "helm-app", - "description": "Kubernetes app managed through Helm with an associated Helm chart.", - "enum": [ - "helm-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -424,7 +433,8 @@ "type": "object" }, "KubernetesAppConfig": { - "description": "Settings specific to Kubernetes apps.", + "additionalProperties": true, + "description": "Settings specific to Kubernetes Apps.", "properties": {}, "title": "KubernetesAppConfig", "type": "object" diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 1cc5a2f9a..4d92bad03 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -49,7 +49,7 @@ def create_connector( config_json = connector_config.model_dump() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self.url}/connectors", headers=HEADERS, json=connect_data + url=f"{self.url}connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -74,7 +74,7 @@ def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: msg = "Connector name not set" raise Exception(msg) response = httpx.get( - url=f"{self.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -105,7 +105,7 @@ def update_connector_config( connector_name = connector_config.name config_json = connector_config.model_dump() response = httpx.put( - url=f"{self.url}/connectors/{connector_name}/config", + url=f"{self.url}connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -136,7 +136,7 @@ def validate_connector_config( :return: List of all found errors """ response = httpx.put( - url=f"{self.url}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self.url}connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, json=connector_config.model_dump(), ) @@ -166,7 +166,7 @@ def delete_connector(self, connector_name: str) -> None: :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( - url=f"{self.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index e4eba9931..fae2da0e7 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -30,7 +30,7 @@ def __init__( components_module: str | None, ) -> None: self.schema_registry_client = SchemaRegistryClient( - kpops_config.schema_registry.url + str(kpops_config.schema_registry.url) ) self.components_module = components_module diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 1b246d6f5..aa1db6283 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -46,7 +46,7 @@ def cluster_id(self) -> str: :raises KafkaRestProxyError: Kafka REST proxy error :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._config.url}/v3/clusters") + response = httpx.get(url=f"{self._config.url!s}v3/clusters") if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -67,7 +67,7 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics", headers=HEADERS, json=topic_spec.model_dump(exclude_none=True), ) @@ -88,7 +88,7 @@ def delete_topic(self, topic_name: str) -> None: :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.delete( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: @@ -109,7 +109,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: :return: Response of the get topic API. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.OK: @@ -139,7 +139,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: :return: The topic configuration. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", headers=HEADERS, ) @@ -169,7 +169,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": json_body}, ) @@ -189,7 +189,7 @@ def get_broker_config(self) -> BrokerConfigResponse: :return: The broker configuration. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/brokers/-/configs", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/brokers/-/configs", headers=HEADERS, ) diff --git a/kpops/config.py b/kpops/config.py index ca66d0cfb..c6c5c18b6 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -1,20 +1,18 @@ from __future__ import annotations from pathlib import Path -from typing import override -from pydantic import AliasChoices, Field, AnyHttpUrl, parse_obj_as +from pydantic import AnyHttpUrl, Field, TypeAdapter from pydantic_settings import ( BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, ) +from typing_extensions import override from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object -from kpops.utils.yaml_loading import load_yaml_file - ENV_PREFIX = "KPOPS_" @@ -42,8 +40,7 @@ class SchemaRegistryConfig(BaseSettings): url: AnyHttpUrl = Field( # For validating URLs use parse_obj_as # https://github.com/pydantic/pydantic/issues/1106 - default=parse_obj_as(AnyHttpUrl, "http://localhost:8081"), - validation_alias=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8081"), description="Address of the Schema Registry.", ) @@ -52,8 +49,7 @@ class KafkaRestConfig(BaseSettings): """Configuration for Kafka REST Proxy.""" url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), - validation_alias=f"{ENV_PREFIX}KAFKA_REST_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8082"), description="Address of the Kafka REST Proxy.", ) @@ -62,8 +58,7 @@ class KafkaConnectConfig(BaseSettings): """Configuration for Kafka Connect.""" url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), - validation_alias=f"{ENV_PREFIX}KAFKA_CONNECT_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8083"), description="Address of Kafka Connect.", ) @@ -92,7 +87,6 @@ class KpopsConfig(BaseSettings): "broker1:9092,broker2:9092,broker3:9092", ], description="The comma separated Kafka brokers address.", - validation_alias=AliasChoices("brokers", f"{ENV_PREFIX}kafka_brokers"), ) defaults_filename_prefix: str = Field( default="defaults", diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 33db1560a..717a67e46 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -1,7 +1,7 @@ from pathlib import Path import pytest -from pydantic import AnyHttpUrl, ValidationError, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter, ValidationError from kpops.config import ( KafkaConnectConfig, @@ -27,9 +27,9 @@ def test_kpops_config_with_default_values(): == "${pipeline_name}-${component_name}-error" ) assert default_config.schema_registry.enabled is False - assert default_config.schema_registry.url == "http://localhost:8081" - assert default_config.kafka_rest.url == "http://localhost:8082" - assert default_config.kafka_connect.url == "http://localhost:8083" + assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081") + assert default_config.kafka_rest.url == AnyHttpUrl("http://localhost:8082") + assert default_config.kafka_connect.url == AnyHttpUrl("http://localhost:8083") assert default_config.timeout == 300 assert default_config.create_namespace is False assert default_config.helm_config.context is None @@ -45,7 +45,7 @@ def test_kpops_config_with_different_invalid_urls(): environment="development", kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( - url=parse_obj_as(AnyHttpUrl, "invalid-host") + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") ), ) @@ -53,7 +53,9 @@ def test_kpops_config_with_different_invalid_urls(): KpopsConfig( environment="development", kafka_brokers="http://broker:9092", - kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "invalid-host")), + kafka_rest=KafkaRestConfig( + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") + ), ) with pytest.raises(ValidationError): @@ -62,6 +64,6 @@ def test_kpops_config_with_different_invalid_urls(): kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, - url=parse_obj_as(AnyHttpUrl, "invalid-host"), + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host"), ), ) diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index bbd87bc1e..3cee5f06b 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock, patch import pytest +from pydantic import AnyHttpUrl from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture @@ -45,7 +46,7 @@ def _setup(self, httpx_mock: HTTPXMock): json=cluster_response, status_code=200, ) - assert self.proxy_wrapper.url == DEFAULT_HOST + assert self.proxy_wrapper.url == AnyHttpUrl(DEFAULT_HOST) assert self.proxy_wrapper.cluster_id == "cluster-1" @patch("httpx.post") From f80d6e8facfa60702f6a81600d551894b3a1244f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 21:28:31 +0300 Subject: [PATCH 51/96] fix: update deprecated code --- kpops/components/base_components/helm_app.py | 11 ++++++----- .../schema_handler/test_schema_handler.py | 6 +++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index f98abd648..d6b9eba71 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -79,7 +79,7 @@ def helm_chart(self) -> str: @property def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} + auth_flags = self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} return HelmFlags( **auth_flags, version=self.version, @@ -90,7 +90,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command.""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -108,7 +108,7 @@ def template(self) -> None: @property def deploy_flags(self) -> HelmUpgradeInstallFlags: """Return flags for Helm upgrade install command.""" - return HelmUpgradeInstallFlags(**self.helm_flags.dict()) + return HelmUpgradeInstallFlags(**self.helm_flags.model_dump()) @override def deploy(self, dry_run: bool) -> None: @@ -139,7 +139,7 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) + return self.app.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True) def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component. @@ -156,6 +156,7 @@ def print_helm_diff(self, stdout: str) -> None: new_release = Helm.load_manifest(stdout) self.helm_diff.log_helm_diff(log, current_release, new_release) + # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` @override def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: # HACK: workaround for Pydantic to exclude cached properties during model export @@ -163,4 +164,4 @@ def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: exclude = set() exclude.add("helm") exclude.add("helm_diff") - return super().dict(exclude=exclude, **kwargs) + return super().model_dump(exclude=exclude, **kwargs) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 9f1fe143f..6f18e3b38 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock import pytest -from pydantic import AnyHttpUrl, BaseModel, parse_obj_as +from pydantic import AnyHttpUrl, BaseModel, TypeAdapter from pytest_mock import MockerFixture from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion @@ -74,7 +74,7 @@ def kpops_config_with_sr_enabled() -> KpopsConfig: environment="development", kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( - enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") + enabled=True, url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081") ), ) @@ -87,7 +87,7 @@ def test_load_schema_handler(kpops_config_with_sr_enabled: KpopsConfig): SchemaHandler, ) - config_disable = kpops_config_with_sr_enabled.copy() + config_disable = kpops_config_with_sr_enabled.model_copy() config_disable.schema_registry = SchemaRegistryConfig(enabled=False) assert ( From 0bd0395caec12aec1c318425a03a7dfc36de08eb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 22:01:15 +0300 Subject: [PATCH 52/96] fix: json schema KafkaConnectorConfig --- kpops/component_handlers/kafka_connect/model.py | 14 +++++++++++--- kpops/pipeline_generator/pipeline.py | 2 +- kpops/utils/pydantic.py | 11 ++++++----- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index c79dd8dc6..d40f2f1c5 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -4,7 +4,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot +from kpops.utils.pydantic import CamelCaseConfigModel, to_dot, DescConfigModel class KafkaConnectorType(str, Enum): @@ -17,11 +17,17 @@ class KafkaConnectorConfig(DescConfigModel): connector_class: str name: str | None = Field(default=None) + + @override + @staticmethod + def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: + super(KafkaConnectorConfig, KafkaConnectorConfig).json_schema_extra(schema, model) + schema["additional_properties"] = {"type": "string"} + model_config = ConfigDict( extra="allow", alias_generator=to_dot, - # TODO(sujuka99): combine with ``json_schema_extra`` of ``DescCohnfigModel`` - json_schema_extra={"additional_properties": {"type": "string"}}, + json_schema_extra=json_schema_extra, ) @field_validator("connector_class") @@ -35,6 +41,7 @@ def connector_class_must_contain_dot(cls, connector_class: str) -> str: def class_name(self) -> str: return self.connector_class.split(".")[-1] + # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` @override def model_dump(self, **_) -> dict[str, Any]: return super().model_dump(by_alias=True, exclude_none=True) @@ -81,6 +88,7 @@ class KafkaConnectResetterValues(CamelCaseConfigModel): config: KafkaConnectResetterConfig name_override: str + # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` @override def model_dump(self, **_) -> dict[str, Any]: return super().model_dump(by_alias=True, exclude_none=True) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index c1b3488ae..f4676105c 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -279,7 +279,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: config = self.config # Leftover variables that were previously introduced in the component by the substitution # functions, still hardcoded, because of their names. - # TODO: Get rid of them + # TODO(Ivan Yordanov): Get rid of them substitution_hardcoded = { "error_topic_name": config.topic_name_config.default_error_topic_name, "output_topic_name": config.topic_name_config.default_output_topic_name, diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 1dc5063cd..97fcccdaa 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -21,10 +21,6 @@ def to_dot(s: str) -> str: return s.replace("_", ".") -def schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: - schema["description"] = describe_object(model.__doc__) - - class CamelCaseConfigModel(BaseModel): model_config = ConfigDict( alias_generator=to_camel, @@ -33,4 +29,9 @@ class CamelCaseConfigModel(BaseModel): class DescConfigModel(BaseModel): - model_config = ConfigDict(json_schema_extra=schema_extra) + + @staticmethod + def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: + schema["description"] = describe_object(model.__doc__) + + model_config = ConfigDict(json_schema_extra=json_schema_extra) From 5db5586ca96cea652526c409331dcbec386ebd5d Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 24 Oct 2023 22:11:00 +0300 Subject: [PATCH 53/96] chore: comment, update func name --- kpops/components/base_components/helm_app.py | 2 +- kpops/components/streams_bootstrap/streams/model.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index d6b9eba71..cbba13b15 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -158,7 +158,7 @@ def print_helm_diff(self, stdout: str) -> None: # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` @override - def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: + def model_dump(self, *, exclude=None, **kwargs) -> dict[str, Any]: # HACK: workaround for Pydantic to exclude cached properties during model export if exclude is None: exclude = set() diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index a6811702a..882a9faeb 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -71,6 +71,7 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) + # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe @model_serializer(mode="wrap", when_used="always") def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: result = handler(self) From d2c0527a0127995964226e8986b3b96227b5b565 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 30 Oct 2023 16:42:45 +0200 Subject: [PATCH 54/96] chore: refactor, comment --- hooks/gen_docs/gen_docs_components.py | 2 +- hooks/gen_docs/gen_docs_env_vars.py | 2 +- kpops/cli/settings_sources.py | 2 +- kpops/component_handlers/kafka_connect/model.py | 6 ++++-- kpops/components/base_components/helm_app.py | 8 ++++++-- kpops/utils/pydantic.py | 1 - .../schema_handler/test_schema_handler.py | 3 ++- 7 files changed, 15 insertions(+), 9 deletions(-) diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index ebd7353f2..ecbc59bc9 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -44,7 +44,7 @@ KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name - for field_name, field_info in component.model_fields.items() # pyright: ignore[reportGeneralTypeIssues] + for field_name, field_info in component.model_fields.items() if not field_info.exclude ] for component in KPOPS_COMPONENTS diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 6339f8231..bf4a1ff7b 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -268,7 +268,7 @@ def fill_csv_pipeline_config(target: Path) -> None: field_name, ) - +# TODO(Ivan Yordanov): Should we yield both the nested fields and their parents? def collect_fields(settings: type[BaseSettings]) -> Iterator[tuple[str, FieldInfo]]: """Collect and yield all fields in a settings class. diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index d93304c58..0d8f0d5f8 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -7,7 +7,7 @@ from kpops.utils.yaml_loading import load_yaml_file - +# TODO(Ivan Yordanov): Test! class YamlConfigSettingsSource(PydanticBaseSettingsSource): """Loads variables from a YAML file at the project's root.""" diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index d40f2f1c5..a885c514e 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -4,7 +4,7 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfigModel, to_dot, DescConfigModel +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot class KafkaConnectorType(str, Enum): @@ -21,7 +21,9 @@ class KafkaConnectorConfig(DescConfigModel): @override @staticmethod def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: - super(KafkaConnectorConfig, KafkaConnectorConfig).json_schema_extra(schema, model) + super(KafkaConnectorConfig, KafkaConnectorConfig).json_schema_extra( + schema, model + ) schema["additional_properties"] = {"type": "string"} model_config = ConfigDict( diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index cbba13b15..a7b3acc95 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -79,7 +79,9 @@ def helm_chart(self) -> str: @property def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} + auth_flags = ( + self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} + ) return HelmFlags( **auth_flags, version=self.version, @@ -139,7 +141,9 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True) + return self.app.model_dump( + by_alias=True, exclude_none=True, exclude_defaults=True + ) def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component. diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 97fcccdaa..321ddc545 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -29,7 +29,6 @@ class CamelCaseConfigModel(BaseModel): class DescConfigModel(BaseModel): - @staticmethod def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: schema["description"] = describe_object(model.__doc__) diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 6f18e3b38..00718f3bd 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -74,7 +74,8 @@ def kpops_config_with_sr_enabled() -> KpopsConfig: environment="development", kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( - enabled=True, url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081") + enabled=True, + url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081"), ), ) From 18defbb613b870a81be0bdeaeacd8d3c64977b5c Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 19:11:40 +0100 Subject: [PATCH 55/96] WIP schema generation --- .../base_defaults_component.py | 3 +- kpops/utils/gen_schema.py | 117 +++++++++++------- 2 files changed, 75 insertions(+), 45 deletions(-) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 3cb31f692..b4077df6e 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -9,6 +9,7 @@ import typer from pydantic import AliasChoices, ConfigDict, Field +from pydantic.json_schema import SkipJsonSchema from kpops.component_handlers import ComponentHandlers from kpops.config import KpopsConfig @@ -55,7 +56,7 @@ class BaseDefaultsComponent(DescConfigModel, ABC): description=describe_attr("config", __doc__), exclude=True, ) - handlers: ComponentHandlers = Field( + handlers: SkipJsonSchema[ComponentHandlers] = Field( default=..., description=describe_attr("handlers", __doc__), exclude=True, diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 601732961..4a95a26a0 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -2,20 +2,17 @@ import json import logging from abc import ABC +from collections.abc import Sequence from enum import Enum -from typing import Literal +from typing import Annotated, Literal, Union +from pydantic import BaseModel, Field from pydantic.fields import FieldInfo -from pydantic.json_schema import ( - GenerateJsonSchema, - model_json_schema, - models_json_schema, -) +from pydantic.json_schema import GenerateJsonSchema, model_json_schema from kpops.cli.registry import _find_classes -from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.components import PipelineComponent from kpops.config import KpopsConfig -from kpops.utils.docstring import describe_object class SchemaScope(str, Enum): @@ -90,6 +87,7 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled + # components: tuple[type[PipelineComponent], ...] = (PipelineComponent,KubernetesApp,) components: tuple[type[PipelineComponent], ...] = () if include_stock_components: components = _add_components("kpops.components") @@ -103,49 +101,80 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: component.model_fields["type"] = FieldInfo( - alias="type", annotation=Literal[component.type], # type: ignore[reportGeneralTypeIssues] default=component.type, # final=True, - title="Component type", - description=describe_object(component.__doc__), + # title="Component type", + # description=describe_object(component.__doc__), # model_config=BaseConfig, # class_validators=None, ) - components_moded = tuple([(component, "serialization") for component in components]) + extra_schema = { + "type": "model-field", + "schema": { + "type": "literal", + "expected": [component.type], + "metadata": { + "pydantic.internal.needs_apply_discriminated_union": False + }, + }, + "metadata": { + "pydantic_js_functions": [], + "pydantic_js_annotation_functions": [], + }, + } + if "schema" not in component.__pydantic_core_schema__["schema"]: + component.__pydantic_core_schema__["schema"]["fields"][ + "type" + ] = extra_schema + else: + component.__pydantic_core_schema__["schema"]["schema"]["fields"][ + "type" + ] = extra_schema + + PipelineComponents = Union[components] # type: ignore[valid-type] + AnnotatedPipelineComponents = Annotated[ + PipelineComponents, Field(discriminator="type") + ] + + class PipelineSchema(BaseModel): + components: Sequence[AnnotatedPipelineComponents] + + schema = PipelineSchema.model_json_schema() + + # info, schema = models_json_schema( + # components_moded, + # title="KPOps pipeline schema", + # by_alias=True, + # # ref_template="#/definitions/{model}", + # ) + print(json.dumps(schema, indent=4, sort_keys=True)) - schema = models_json_schema( - components_moded, - title="KPOps pipeline schema", - by_alias=True, - ref_template="#/definitions/{model}", - ) - # breakpoint() - stripped_schema_first_item = {k[0]: v for k, v in schema[0].items()} - schema_first_item_adapted = { - "discriminator": { - "mapping": {}, - "propertyName": "type", - }, - "oneOf": [], - } - mapping = {} - one_of = [] - for k, v in stripped_schema_first_item.items(): - mapping[k.type] = v["$ref"] - one_of.append(v) - schema_first_item_adapted["discriminator"]["mapping"] = mapping - schema_first_item_adapted["oneOf"] = one_of - complete_schema = schema[1].copy() - complete_schema["items"] = schema_first_item_adapted - complete_schema["type"] = "array" - print( - json.dumps( - complete_schema, - indent=4, - sort_keys=True, - ) - ) + # stripped_schema_first_item = {k[0]: v for k, v in schema[0].items()} + # schema_first_item_adapted = { + # "discriminator": { + # "mapping": {}, + # "propertyName": "type", + # }, + # "oneOf": [], + # } + # mapping = {} + # one_of = [] + # for k, v in stripped_schema_first_item.items(): + # mapping[k.type] = v["$ref"] + # one_of.append(v) + # schema_first_item_adapted["discriminator"]["mapping"] = mapping + # schema_first_item_adapted["oneOf"] = one_of + # complete_schema = schema[1].copy() + # complete_schema["items"] = schema_first_item_adapted + # complete_schema["type"] = "array" + # print( + # json.dumps( + # complete_schema, + # indent=4, + # sort_keys=True, + # ) + # ) # Create a type union that will hold the union of all component types # PipelineComponents = Union[components] # type: ignore[valid-type] From 3d821e2c6aec0acd8991c7a7101750a2c4fbdbe7 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 19:15:44 +0100 Subject: [PATCH 56/96] Use `pydantic.RootModel` for pipeline components union --- kpops/utils/gen_schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 4a95a26a0..9c9fc6dc8 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -6,7 +6,7 @@ from enum import Enum from typing import Annotated, Literal, Union -from pydantic import BaseModel, Field +from pydantic import Field, RootModel from pydantic.fields import FieldInfo from pydantic.json_schema import GenerateJsonSchema, model_json_schema @@ -137,8 +137,8 @@ def gen_pipeline_schema( PipelineComponents, Field(discriminator="type") ] - class PipelineSchema(BaseModel): - components: Sequence[AnnotatedPipelineComponents] + class PipelineSchema(RootModel): + root: Sequence[AnnotatedPipelineComponents] schema = PipelineSchema.model_json_schema() From e19c4de4558c50734c7e5bdbcf204966c3f85070 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 19:21:43 +0100 Subject: [PATCH 57/96] Cleanup schema generator --- kpops/utils/gen_schema.py | 53 ++------------------------------------- 1 file changed, 2 insertions(+), 51 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 9c9fc6dc8..badf0bf5f 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -103,11 +103,6 @@ def gen_pipeline_schema( component.model_fields["type"] = FieldInfo( annotation=Literal[component.type], # type: ignore[reportGeneralTypeIssues] default=component.type, - # final=True, - # title="Component type", - # description=describe_object(component.__doc__), - # model_config=BaseConfig, - # class_validators=None, ) extra_schema = { "type": "model-field", @@ -138,55 +133,11 @@ def gen_pipeline_schema( ] class PipelineSchema(RootModel): - root: Sequence[AnnotatedPipelineComponents] + root: Sequence[AnnotatedPipelineComponents] # type: ignore - schema = PipelineSchema.model_json_schema() - - # info, schema = models_json_schema( - # components_moded, - # title="KPOps pipeline schema", - # by_alias=True, - # # ref_template="#/definitions/{model}", - # ) + schema = PipelineSchema.model_json_schema(by_alias=True) print(json.dumps(schema, indent=4, sort_keys=True)) - # stripped_schema_first_item = {k[0]: v for k, v in schema[0].items()} - # schema_first_item_adapted = { - # "discriminator": { - # "mapping": {}, - # "propertyName": "type", - # }, - # "oneOf": [], - # } - # mapping = {} - # one_of = [] - # for k, v in stripped_schema_first_item.items(): - # mapping[k.type] = v["$ref"] - # one_of.append(v) - # schema_first_item_adapted["discriminator"]["mapping"] = mapping - # schema_first_item_adapted["oneOf"] = one_of - # complete_schema = schema[1].copy() - # complete_schema["items"] = schema_first_item_adapted - # complete_schema["type"] = "array" - # print( - # json.dumps( - # complete_schema, - # indent=4, - # sort_keys=True, - # ) - # ) - - # Create a type union that will hold the union of all component types - # PipelineComponents = Union[components] # type: ignore[valid-type] - # AnnotatedPipelineComponents = Annotated[ - # PipelineComponents, Field(discriminator="type") - # ] - # DumpablePipelineComponents = TypeAdapter(AnnotatedPipelineComponents) - - # schema = to_json(AnnotatedPipelineComponents) - - # print(schema) - def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config.""" From b731b6cc081cf901d06cb2a217bc75a17dd4af12 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 19:39:46 +0100 Subject: [PATCH 58/96] Cleanup generated schema --- docs/docs/schema/pipeline.json | 216 ++++++++++++++---- .../base_defaults_component.py | 6 +- kpops/utils/gen_schema.py | 20 +- 3 files changed, 192 insertions(+), 50 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 85c03a513..c2ce2844c 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -6,7 +6,7 @@ "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -15,7 +15,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -46,7 +46,7 @@ "type": { "anyOf": [ { - "$ref": "#/definitions/InputTopicTypes" + "$ref": "#/$defs/InputTopicTypes" }, { "type": "null" @@ -66,7 +66,7 @@ "app": { "allOf": [ { - "$ref": "#/definitions/KubernetesAppConfig" + "$ref": "#/$defs/KubernetesAppConfig" } ], "description": "Application-specific settings" @@ -74,7 +74,7 @@ "from": { "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" }, { "type": "null" @@ -103,7 +103,7 @@ "repo_config": { "anyOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" }, { "type": "null" @@ -115,7 +115,7 @@ "to": { "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" }, { "type": "null" @@ -124,6 +124,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "helm-app", + "default": "helm-app", + "title": "Type" + }, "version": { "anyOf": [ { @@ -152,7 +157,7 @@ "repo_auth_flags": { "allOf": [ { - "$ref": "#/definitions/RepoAuthFlags" + "$ref": "#/$defs/RepoAuthFlags" } ], "default": { @@ -228,7 +233,7 @@ "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], "description": "Application-specific settings" @@ -236,7 +241,7 @@ "from": { "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" }, { "type": "null" @@ -265,7 +270,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -289,7 +294,7 @@ "to": { "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" }, { "type": "null" @@ -298,6 +303,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "kafka-sink-connector", + "default": "kafka-sink-connector", + "title": "Type" + }, "version": { "anyOf": [ { @@ -327,7 +337,7 @@ "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], "description": "Application-specific settings" @@ -335,7 +345,7 @@ "from": { "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" }, { "type": "null" @@ -377,7 +387,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -401,7 +411,7 @@ "to": { "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" }, { "type": "null" @@ -410,6 +420,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "kafka-source-connector", + "default": "kafka-source-connector", + "title": "Type" + }, "version": { "anyOf": [ { @@ -455,7 +470,7 @@ "app": { "allOf": [ { - "$ref": "#/definitions/ProducerValues" + "$ref": "#/$defs/ProducerValues" } ], "description": "Application-specific settings" @@ -485,7 +500,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -504,7 +519,7 @@ "to": { "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" }, { "type": "null" @@ -513,6 +528,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "producer-app", + "default": "producer-app", + "title": "Type" + }, "version": { "anyOf": [ { @@ -606,7 +626,7 @@ "streams": { "allOf": [ { - "$ref": "#/definitions/ProducerStreamsConfig" + "$ref": "#/$defs/ProducerStreamsConfig" } ], "description": "Kafka Streams settings" @@ -692,7 +712,7 @@ "app": { "allOf": [ { - "$ref": "#/definitions/StreamsAppConfig" + "$ref": "#/$defs/StreamsAppConfig" } ], "description": "Application-specific settings" @@ -700,7 +720,7 @@ "from": { "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" }, { "type": "null" @@ -729,7 +749,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -748,7 +768,7 @@ "to": { "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" }, { "type": "null" @@ -757,6 +777,11 @@ "default": null, "description": "Topic(s) into which the component will write output" }, + "type": { + "const": "streams-app", + "default": "streams-app", + "title": "Type" + }, "version": { "anyOf": [ { @@ -866,7 +891,7 @@ "autoscaling": { "anyOf": [ { - "$ref": "#/definitions/StreamsAppAutoScaling" + "$ref": "#/$defs/StreamsAppAutoScaling" }, { "type": "null" @@ -891,7 +916,7 @@ "streams": { "allOf": [ { - "$ref": "#/definitions/StreamsConfig" + "$ref": "#/$defs/StreamsConfig" } ], "description": "Streams Bootstrap streams section" @@ -904,7 +929,118 @@ "type": "object" }, "StreamsConfig": { - "description": "Streams Bootstrap streams section.\n\n:param input_topics: Input topics, defaults to []\n:param input_pattern: Input pattern, defaults to None\n:param extra_input_topics: Extra input topics, defaults to {}\n:param extra_input_patterns: Extra input patterns, defaults to {}\n:param extra_output_topics: Extra output topics, defaults to {}\n:param output_topic: Output topic, defaults to None\n:param error_topic: Error topic, defaults to None\n:param config: Configuration, defaults to {}", + "additionalProperties": true, + "description": "Streams Bootstrap streams section.", + "properties": { + "brokers": { + "description": "Brokers", + "title": "Brokers", + "type": "string" + }, + "config": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Configuration", + "title": "Config", + "type": "object" + }, + "errorTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Error topic", + "title": "Errortopic" + }, + "extraInputPatterns": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra input patterns", + "title": "Extrainputpatterns", + "type": "object" + }, + "extraInputTopics": { + "additionalProperties": { + "items": { + "type": "string" + }, + "type": "array" + }, + "default": {}, + "description": "Extra input topics", + "title": "Extrainputtopics", + "type": "object" + }, + "extraOutputTopics": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Extra output topics", + "title": "Extraoutputtopics", + "type": "object" + }, + "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Input pattern", + "title": "Inputpattern" + }, + "inputTopics": { + "default": [], + "description": "Input topics", + "items": { + "type": "string" + }, + "title": "Inputtopics", + "type": "array" + }, + "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "Output topic", + "title": "Outputtopic" + }, + "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, + "description": "URL of the schema registry", + "title": "Schemaregistryurl" + } + }, + "required": [ + "brokers" + ], "title": "StreamsConfig", "type": "object" }, @@ -923,7 +1059,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -1009,7 +1145,7 @@ "type": { "anyOf": [ { - "$ref": "#/definitions/OutputTopicTypes" + "$ref": "#/$defs/OutputTopicTypes" }, { "type": "null" @@ -1040,32 +1176,32 @@ "items": { "discriminator": { "mapping": { - "helm-app": "#/definitions/HelmApp", - "kafka-sink-connector": "#/definitions/KafkaSinkConnector", - "kafka-source-connector": "#/definitions/KafkaSourceConnector", - "producer-app": "#/definitions/ProducerApp", - "streams-app": "#/definitions/StreamsApp" + "helm-app": "#/$defs/HelmApp", + "kafka-sink-connector": "#/$defs/KafkaSinkConnector", + "kafka-source-connector": "#/$defs/KafkaSourceConnector", + "producer-app": "#/$defs/ProducerApp", + "streams-app": "#/$defs/StreamsApp" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/definitions/HelmApp" + "$ref": "#/$defs/HelmApp" }, { - "$ref": "#/definitions/KafkaSinkConnector" + "$ref": "#/$defs/KafkaSinkConnector" }, { - "$ref": "#/definitions/KafkaSourceConnector" + "$ref": "#/$defs/KafkaSourceConnector" }, { - "$ref": "#/definitions/ProducerApp" + "$ref": "#/$defs/ProducerApp" }, { - "$ref": "#/definitions/StreamsApp" + "$ref": "#/$defs/StreamsApp" } ] }, - "title": "KPOps pipeline schema", + "title": "PipelineSchema", "type": "array" } diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index b4077df6e..293d17dcc 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -46,12 +46,12 @@ class BaseDefaultsComponent(DescConfigModel, ABC): ignored_types=(cached_property, cached_classproperty), ) - enrich: bool = Field( + enrich: SkipJsonSchema[bool] = Field( default=False, description=describe_attr("enrich", __doc__), exclude=True, ) - config: KpopsConfig = Field( + config: SkipJsonSchema[KpopsConfig] = Field( default=..., description=describe_attr("config", __doc__), exclude=True, @@ -61,7 +61,7 @@ class BaseDefaultsComponent(DescConfigModel, ABC): description=describe_attr("handlers", __doc__), exclude=True, ) - validate_: bool = Field( + validate_: SkipJsonSchema[bool] = Field( validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index badf0bf5f..f32baf3ee 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -8,7 +8,7 @@ from pydantic import Field, RootModel from pydantic.fields import FieldInfo -from pydantic.json_schema import GenerateJsonSchema, model_json_schema +from pydantic.json_schema import GenerateJsonSchema, SkipJsonSchema, model_json_schema from kpops.cli.registry import _find_classes from kpops.components import PipelineComponent @@ -101,18 +101,24 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: component.model_fields["type"] = FieldInfo( - annotation=Literal[component.type], # type: ignore[reportGeneralTypeIssues] + annotation=SkipJsonSchema[Literal[component.type]], # type: ignore[reportGeneralTypeIssues] default=component.type, ) extra_schema = { "type": "model-field", "schema": { - "type": "literal", - "expected": [component.type], - "metadata": { - "pydantic.internal.needs_apply_discriminated_union": False + "type": "default", + "schema": { + "type": "literal", + "expected": [component.type], + "metadata": { + "pydantic.internal.needs_apply_discriminated_union": False, + "pydantic_js_annotation_functions": [], + }, }, + "default": component.type, }, + "serialization_exclude": True, "metadata": { "pydantic_js_functions": [], "pydantic_js_annotation_functions": [], @@ -133,7 +139,7 @@ def gen_pipeline_schema( ] class PipelineSchema(RootModel): - root: Sequence[AnnotatedPipelineComponents] # type: ignore + root: Sequence[AnnotatedPipelineComponents] # pyright:ignore[reportGeneralTypeIssues] schema = PipelineSchema.model_json_schema(by_alias=True) print(json.dumps(schema, indent=4, sort_keys=True)) From 0c803c4982b652182f887ee889a221e05f9d4f52 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 19:58:05 +0100 Subject: [PATCH 59/96] Cleanup generated schema: hide discriminator field --- docs/docs/schema/pipeline.json | 25 ------------------------- kpops/utils/gen_schema.py | 7 +++++-- 2 files changed, 5 insertions(+), 27 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index c2ce2844c..644f51431 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -124,11 +124,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "type": { - "const": "helm-app", - "default": "helm-app", - "title": "Type" - }, "version": { "anyOf": [ { @@ -303,11 +298,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "type": { - "const": "kafka-sink-connector", - "default": "kafka-sink-connector", - "title": "Type" - }, "version": { "anyOf": [ { @@ -420,11 +410,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "type": { - "const": "kafka-source-connector", - "default": "kafka-source-connector", - "title": "Type" - }, "version": { "anyOf": [ { @@ -528,11 +513,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "type": { - "const": "producer-app", - "default": "producer-app", - "title": "Type" - }, "version": { "anyOf": [ { @@ -777,11 +757,6 @@ "default": null, "description": "Topic(s) into which the component will write output" }, - "type": { - "const": "streams-app", - "default": "streams-app", - "title": "Type" - }, "version": { "anyOf": [ { diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index f32baf3ee..8d29b4bcc 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -101,8 +101,9 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: component.model_fields["type"] = FieldInfo( - annotation=SkipJsonSchema[Literal[component.type]], # type: ignore[reportGeneralTypeIssues] + annotation=Literal[component.type], # type: ignore[reportGeneralTypeIssues] default=component.type, + exclude=True, ) extra_schema = { "type": "model-field", @@ -113,7 +114,9 @@ def gen_pipeline_schema( "expected": [component.type], "metadata": { "pydantic.internal.needs_apply_discriminated_union": False, - "pydantic_js_annotation_functions": [], + "pydantic_js_annotation_functions": [ + SkipJsonSchema().__get_pydantic_json_schema__ # pyright:ignore[reportGeneralTypeIssues] + ], }, }, "default": component.type, From dac09bce6c5ae110e9af4bea3ab659e940f3b042 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 20:01:37 +0100 Subject: [PATCH 60/96] Update test snapshot --- .../snapshots/snap_test_schema_generation.py | 260 +++++++++++------- 1 file changed, 158 insertions(+), 102 deletions(-) diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 2dd92b512..f8f75d870 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -8,16 +8,21 @@ snapshots = Snapshot() snapshots['TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation'] = '''{ - "definitions": { + "$defs": { "EmptyPipelineComponent": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -33,21 +38,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "empty-pipeline-component", - "enum": [ - "empty-pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -62,7 +62,7 @@ "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -71,7 +71,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -87,16 +87,28 @@ "description": "Input topic.", "properties": { "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/InputTopicTypes" + }, { - "$ref": "#/definitions/InputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type" } }, @@ -122,14 +134,19 @@ "type": "string" }, "SubPipelineComponent": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -145,21 +162,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component", - "enum": [ - "sub-pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -169,14 +181,19 @@ "type": "object" }, "SubPipelineComponentCorrect": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -192,21 +209,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-correct", - "enum": [ - "sub-pipeline-component-correct" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -216,6 +228,7 @@ "type": "object" }, "SubPipelineComponentCorrectDocstr": { + "additionalProperties": true, "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", "properties": { "example_attr": { @@ -224,11 +237,15 @@ "type": "string" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -244,22 +261,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-correct-docstr", - "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", - "enum": [ - "sub-pipeline-component-correct-docstr" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -270,14 +281,19 @@ "type": "object" }, "SubPipelineComponentNoSchemaTypeNoType": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -293,21 +309,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-no-schema-type-no-type", - "enum": [ - "sub-pipeline-component-no-schema-type-no-type" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -317,6 +328,7 @@ "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics.", "properties": { "models": { @@ -330,7 +342,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -362,38 +374,82 @@ "type": "object" }, "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Key schema class name", - "title": "Key schema", - "type": "string" + "title": "Key schema" }, "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Number of partitions into which the topic is divided", - "title": "Partitions count", - "type": "integer" + "title": "Partitions count" }, "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Replication factor of the topic", - "title": "Replication factor", - "type": "integer" + "title": "Replication factor" }, "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, { - "$ref": "#/definitions/OutputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type", "title": "Topic type" }, "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Value schema class name", - "title": "Value schema", - "type": "string" + "title": "Value schema" } }, "title": "TopicConfig", @@ -403,33 +459,33 @@ "items": { "discriminator": { "mapping": { - "empty-pipeline-component": "#/definitions/EmptyPipelineComponent", - "sub-pipeline-component": "#/definitions/SubPipelineComponent", - "sub-pipeline-component-correct": "#/definitions/SubPipelineComponentCorrect", - "sub-pipeline-component-correct-docstr": "#/definitions/SubPipelineComponentCorrectDocstr", - "sub-pipeline-component-no-schema-type-no-type": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + "empty-pipeline-component": "#/$defs/EmptyPipelineComponent", + "sub-pipeline-component": "#/$defs/SubPipelineComponent", + "sub-pipeline-component-correct": "#/$defs/SubPipelineComponentCorrect", + "sub-pipeline-component-correct-docstr": "#/$defs/SubPipelineComponentCorrectDocstr", + "sub-pipeline-component-no-schema-type-no-type": "#/$defs/SubPipelineComponentNoSchemaTypeNoType" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/definitions/EmptyPipelineComponent" + "$ref": "#/$defs/EmptyPipelineComponent" }, { - "$ref": "#/definitions/SubPipelineComponent" + "$ref": "#/$defs/SubPipelineComponent" }, { - "$ref": "#/definitions/SubPipelineComponentCorrect" + "$ref": "#/$defs/SubPipelineComponentCorrect" }, { - "$ref": "#/definitions/SubPipelineComponentCorrectDocstr" + "$ref": "#/$defs/SubPipelineComponentCorrectDocstr" }, { - "$ref": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + "$ref": "#/$defs/SubPipelineComponentNoSchemaTypeNoType" } ] }, - "title": "KPOps pipeline schema", + "title": "PipelineSchema", "type": "array" } ''' From 566deba9380f4bdc5bc26e33d5c112bce8e39b15 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 20:32:59 +0100 Subject: [PATCH 61/96] Refactor schema generation --- kpops/cli/settings_sources.py | 1 + kpops/utils/gen_schema.py | 60 ++++++++++++++++------------------- 2 files changed, 29 insertions(+), 32 deletions(-) diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index 0d8f0d5f8..f3713cc8d 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -7,6 +7,7 @@ from kpops.utils.yaml_loading import load_yaml_file + # TODO(Ivan Yordanov): Test! class YamlConfigSettingsSource(PydanticBaseSettingsSource): """Loads variables from a YAML file at the project's root.""" diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 8d29b4bcc..7f28b4fea 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -9,6 +9,12 @@ from pydantic import Field, RootModel from pydantic.fields import FieldInfo from pydantic.json_schema import GenerateJsonSchema, SkipJsonSchema, model_json_schema +from pydantic_core.core_schema import ( + DefinitionsSchema, + LiteralSchema, + ModelField, + ModelFieldsSchema, +) from kpops.cli.registry import _find_classes from kpops.components import PipelineComponent @@ -87,7 +93,6 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled - # components: tuple[type[PipelineComponent], ...] = (PipelineComponent,KubernetesApp,) components: tuple[type[PipelineComponent], ...] = () if include_stock_components: components = _add_components("kpops.components") @@ -101,40 +106,29 @@ def gen_pipeline_schema( # re-assign component type as Literal to work as discriminator for component in components: component.model_fields["type"] = FieldInfo( - annotation=Literal[component.type], # type: ignore[reportGeneralTypeIssues] + annotation=Literal[component.type], # type:ignore[valid-type] default=component.type, exclude=True, ) - extra_schema = { - "type": "model-field", - "schema": { - "type": "default", - "schema": { - "type": "literal", - "expected": [component.type], - "metadata": { - "pydantic.internal.needs_apply_discriminated_union": False, - "pydantic_js_annotation_functions": [ - SkipJsonSchema().__get_pydantic_json_schema__ # pyright:ignore[reportGeneralTypeIssues] - ], - }, + core_schema: DefinitionsSchema = ( + component.__pydantic_core_schema__ + ) # pyright:ignore[reportGeneralTypeIssues] + model_schema: ModelFieldsSchema = core_schema["schema"][ + "schema" + ] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess] + model_schema["fields"]["type"] = ModelField( + type="model-field", + schema=LiteralSchema( + type="literal", + expected=[component.type], + metadata={ + "pydantic.internal.needs_apply_discriminated_union": False, + "pydantic_js_annotation_functions": [ + SkipJsonSchema().__get_pydantic_json_schema__ # pyright:ignore[reportGeneralTypeIssues] + ], }, - "default": component.type, - }, - "serialization_exclude": True, - "metadata": { - "pydantic_js_functions": [], - "pydantic_js_annotation_functions": [], - }, - } - if "schema" not in component.__pydantic_core_schema__["schema"]: - component.__pydantic_core_schema__["schema"]["fields"][ - "type" - ] = extra_schema - else: - component.__pydantic_core_schema__["schema"]["schema"]["fields"][ - "type" - ] = extra_schema + ), + ) PipelineComponents = Union[components] # type: ignore[valid-type] AnnotatedPipelineComponents = Annotated[ @@ -142,7 +136,9 @@ def gen_pipeline_schema( ] class PipelineSchema(RootModel): - root: Sequence[AnnotatedPipelineComponents] # pyright:ignore[reportGeneralTypeIssues] + root: Sequence[ + AnnotatedPipelineComponents # pyright:ignore[reportGeneralTypeIssues] + ] schema = PipelineSchema.model_json_schema(by_alias=True) print(json.dumps(schema, indent=4, sort_keys=True)) From f0791fcd8a5bc2906ec9fc787de13524cd643dbf Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 20:36:39 +0100 Subject: [PATCH 62/96] Fix format --- hooks/gen_docs/gen_docs_env_vars.py | 1 + 1 file changed, 1 insertion(+) diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index bf4a1ff7b..cf85539a9 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -268,6 +268,7 @@ def fill_csv_pipeline_config(target: Path) -> None: field_name, ) + # TODO(Ivan Yordanov): Should we yield both the nested fields and their parents? def collect_fields(settings: type[BaseSettings]) -> Iterator[tuple[str, FieldInfo]]: """Collect and yield all fields in a settings class. From 6ad04c8248094108f3143cdeddd456c79c4ccebe Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 30 Oct 2023 20:40:11 +0100 Subject: [PATCH 63/96] Fix Pyright diagnostics --- kpops/components/base_components/kafka_connector.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index a2130db4f..b4ff7e79b 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -5,7 +5,7 @@ from functools import cached_property from typing import Any, NoReturn -from pydantic import Field, ValidationInfo, field_validator +from pydantic import Field, PrivateAttr, ValidationInfo, field_validator from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -46,7 +46,6 @@ class KafkaConnector(PipelineComponent, ABC): :param version: Helm chart version, defaults to "1.0.4" :param resetter_values: Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc., defaults to dict - :param _connector_type: Defines the type of the connector (Source or Sink) """ namespace: str = Field( @@ -71,7 +70,7 @@ class KafkaConnector(PipelineComponent, ABC): default_factory=dict, description=describe_attr("resetter_values", __doc__), ) - _connector_type: KafkaConnectorType + _connector_type: KafkaConnectorType = PrivateAttr() @field_validator("app") @classmethod @@ -276,7 +275,7 @@ class KafkaSourceConnector(KafkaConnector): description=describe_attr("offset_topic", __doc__), ) - _connector_type: KafkaConnectorType = KafkaConnectorType.SOURCE + _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SOURCE) @override def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: @@ -321,7 +320,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None: class KafkaSinkConnector(KafkaConnector): """Kafka sink connector model.""" - _connector_type: KafkaConnectorType = KafkaConnectorType.SINK + _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SINK) @override def add_input_topics(self, topics: list[str]) -> None: From b8cf538900638d3c78a7fd3886dc7a3a3da0ca82 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 31 Oct 2023 14:13:33 +0200 Subject: [PATCH 64/96] chore: add comment --- kpops/components/base_components/models/to_section.py | 1 + kpops/config.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 743caf2a2..d6066da46 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -64,6 +64,7 @@ class TopicConfig(DescConfigModel): populate_by_name=True, ) + # TODO(Ivan Yordanov): check if `values` is needed, refine type annotation @model_validator(mode="after") def extra_topic_role(cls, values: Any) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" diff --git a/kpops/config.py b/kpops/config.py index c6c5c18b6..51334820e 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -141,7 +141,6 @@ def settings_customise_sources( dotenv_settings: PydanticBaseSettingsSource, file_secret_settings: PydanticBaseSettingsSource, ): - # breakpoint() return ( env_settings, init_settings, From 9788ab49ab5b0a5f6b9d214916e485ddc0bfe77a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 2 Nov 2023 22:43:38 +0200 Subject: [PATCH 65/96] feat: Introduce functions that mimick the ``model_dump`` capabilities --- kpops/utils/pydantic.py | 66 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 65 insertions(+), 1 deletion(-) diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 321ddc545..1cf89d4e9 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,7 +1,8 @@ from typing import Any import humps -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, Field +from pydantic.alias_generators import to_snake from kpops.utils.docstring import describe_object @@ -21,6 +22,69 @@ def to_dot(s: str) -> str: return s.replace("_", ".") +def by_alias(field_name: str, model: BaseModel) -> str: + """Return field alias if exists else field name. + + :param field_name: Name of the field to get alias of + :param model: Model that owns the field + """ + return model.model_fields.get(field_name, Field()).alias or field_name + + +def exclude_by_value( + dumped_model: dict[str, Any], *excluded_values: Any +) -> dict[str, Any]: + """Strip all key-value pairs with certain values. + + :param dumped_model: Dumped model + :param excluded_values: Excluded field values + :return: Dumped model without excluded fields + """ + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_value not in excluded_values + } + + +def exclude_by_name( + dumped_model: dict[str, Any], *excluded_fields: str +) -> dict[str, Any]: + """Strip all key-value pairs with certain field names. + + :param dumped_model: Dumped model + :param excluded_fields: Excluded field names + :return: Dumped model without excluded fields + """ + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_name not in excluded_fields + } + + +def exclude_defaults(model: BaseModel, dumped_model: dict[str, Any]) -> dict[str, str]: + """Strip all key-value pairs with default values. + + :param model: Model + :param dumped_model: Dumped model + :return: Dumped model without defaults + """ + default_fields = { + field_name: field_info.default + for field_name, field_info in model.model_fields.items() + } + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_value + not in ( + default_fields.get(field_name), + default_fields.get(to_snake(field_name)), + ) + } + + class CamelCaseConfigModel(BaseModel): model_config = ConfigDict( alias_generator=to_camel, From d8337ca24ffe51cb0955bb31128e473bb3451a48 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 2 Nov 2023 22:44:44 +0200 Subject: [PATCH 66/96] refactor: add serializers --- .../component_handlers/kafka_connect/model.py | 30 ++++++++++++++----- kpops/components/base_components/helm_app.py | 17 +++++------ .../streams_bootstrap/streams/model.py | 27 +++++++---------- 3 files changed, 41 insertions(+), 33 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index a885c514e..bd6b2659d 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,10 +1,23 @@ from enum import Enum from typing import Any, Literal -from pydantic import BaseModel, ConfigDict, Field, field_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + SerializationInfo, + field_validator, + model_serializer, +) from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel, to_dot +from kpops.utils.pydantic import ( + CamelCaseConfigModel, + DescConfigModel, + by_alias, + exclude_by_value, + to_dot, +) class KafkaConnectorType(str, Enum): @@ -43,10 +56,11 @@ def connector_class_must_contain_dot(cls, connector_class: str) -> str: def class_name(self) -> str: return self.connector_class.split(".")[-1] - # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` - @override - def model_dump(self, **_) -> dict[str, Any]: - return super().model_dump(by_alias=True, exclude_none=True) + # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + result = exclude_by_value(handler(self), None) + return {by_alias(name, self): value for name, value in result.items()} class ConnectorTask(BaseModel): @@ -90,7 +104,9 @@ class KafkaConnectResetterValues(CamelCaseConfigModel): config: KafkaConnectResetterConfig name_override: str - # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` + # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` + # BEWARE! All default values are enforced, hard to replicate without + # access to ``model_dump`` @override def model_dump(self, **_) -> dict[str, Any]: return super().model_dump(by_alias=True, exclude_none=True) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index a7b3acc95..4f73b05f5 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -4,7 +4,7 @@ from functools import cached_property from typing import Any -from pydantic import Field +from pydantic import Field, SerializationInfo, model_serializer from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -19,6 +19,7 @@ from kpops.components.base_components.kubernetes_app import KubernetesApp from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr +from kpops.utils.pydantic import exclude_by_name log = logging.getLogger("HelmApp") @@ -160,12 +161,8 @@ def print_helm_diff(self, stdout: str) -> None: new_release = Helm.load_manifest(stdout) self.helm_diff.log_helm_diff(log, current_release, new_release) - # TODO(Ivan Yordanov): replace with a function decorated with `@model_serializer` - @override - def model_dump(self, *, exclude=None, **kwargs) -> dict[str, Any]: - # HACK: workaround for Pydantic to exclude cached properties during model export - if exclude is None: - exclude = set() - exclude.add("helm") - exclude.add("helm_diff") - return super().model_dump(exclude=exclude, **kwargs) + # HACK: workaround for Pydantic to exclude cached properties during model export + # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + return exclude_by_name(handler(self), "helm", "helm_diff") diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 882a9faeb..cb987fb46 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,7 +1,7 @@ +from collections.abc import Callable from typing import Any from pydantic import ConfigDict, Field, SerializationInfo, model_serializer -from pydantic.alias_generators import to_snake from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( @@ -9,7 +9,12 @@ KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel +from kpops.utils.pydantic import ( + CamelCaseConfigModel, + DescConfigModel, + exclude_by_value, + exclude_defaults, +) class StreamsConfig(KafkaStreamsConfig): @@ -73,20 +78,10 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe @model_serializer(mode="wrap", when_used="always") - def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: - result = handler(self) - # if dict(result.items()).get("extraInputTopics"): - # breakpoint() - default_fields = { - field_name: field_info.default - for field_name, field_info in self.model_fields.items() - } - return { - k: v - for k, v in result.items() - if (v != default_fields.get(k) and v is not None) - and (v != default_fields.get(to_snake(k)) and v is not None) - } + def serialize_model( + self, handler: Callable, info: SerializationInfo + ) -> dict[str, Any]: + return exclude_defaults(self, exclude_by_value(handler(self))) class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): From 268d2c736ee66b7c81260bfdca9acf315e397335 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 2 Nov 2023 23:01:11 +0200 Subject: [PATCH 67/96] refactor: get rid of unneeded validator input --- kpops/components/base_components/models/from_section.py | 7 +++---- kpops/components/base_components/models/to_section.py | 7 +++---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index bb82dc780..5f1dae193 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -37,13 +37,12 @@ class FromTopic(DescConfigModel): ) @model_validator(mode="after") - @classmethod - def extra_topic_role(cls, values: Any) -> Any: + def extra_topic_role(self) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" - if values.type == InputTopicTypes.INPUT and values.role: + if self.type == InputTopicTypes.INPUT and self.role: msg = "Define role only if `type` is `pattern` or `None`" raise ValueError(msg) - return values + return self ComponentName = NewType("ComponentName", str) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index d6066da46..56da461c8 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -64,14 +64,13 @@ class TopicConfig(DescConfigModel): populate_by_name=True, ) - # TODO(Ivan Yordanov): check if `values` is needed, refine type annotation @model_validator(mode="after") - def extra_topic_role(cls, values: Any) -> Any: + def extra_topic_role(self) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" - if values.type and values.role: + if self.type and self.role: msg = "Define `role` only if `type` is undefined" raise ValueError(msg) - return values + return self class ToSection(DescConfigModel): From eddd4f812e2dedfb6a715b6afb79acb9722c7593 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 2 Nov 2023 23:22:12 +0200 Subject: [PATCH 68/96] feat: enable nested env vars --- kpops/cli/main.py | 4 +++- kpops/config.py | 6 +----- tests/pipeline/test_pipeline.py | 24 ++++++++++++++++++++++++ 3 files changed, 28 insertions(+), 6 deletions(-) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index c798f5013..be16cbd32 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -19,7 +19,7 @@ from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper -from kpops.config import ENV_PREFIX, KpopsConfig +from kpops.config import KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema @@ -28,6 +28,8 @@ from kpops.components.base_components import PipelineComponent +ENV_PREFIX = KpopsConfig.model_config.get("env_prefix") + LOG_DIVIDER = "#" * 100 app = dtyper.Typer(pretty_exceptions_enable=False) diff --git a/kpops/config.py b/kpops/config.py index 51334820e..6eb631004 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -14,8 +14,6 @@ from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object -ENV_PREFIX = "KPOPS_" - class TopicNameConfig(BaseSettings): """Configure the topic name variables you can use in the pipeline definition.""" @@ -38,8 +36,6 @@ class SchemaRegistryConfig(BaseSettings): description="Whether the Schema Registry handler should be initialized.", ) url: AnyHttpUrl = Field( - # For validating URLs use parse_obj_as - # https://github.com/pydantic/pydantic/issues/1106 default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8081"), description="Address of the Schema Registry.", ) @@ -129,7 +125,7 @@ class KpopsConfig(BaseSettings): description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - model_config = SettingsConfigDict(env_prefix="KPOPS_") + model_config = SettingsConfigDict(env_prefix="KPOPS_", env_nested_delimiter="__") @override @classmethod diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 2fea47858..5d6c69832 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -483,6 +483,30 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): == "env_broker" ) + def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv( + name="KPOPS_SCHEMA_REGISTRY__URL", value="http://somename:1234" + ) + + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--config", + str(RESOURCE_PATH / "custom-config/config.yaml"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + == "http://somename:1234/" + ) + def test_model_serialization(self, snapshot: SnapshotTest): """Test model serialization of component containing pathlib.Path attribute.""" result = runner.invoke( From be30a0f6fc5d001ec600fa82ab00726dd6782f8a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 2 Nov 2023 23:26:36 +0200 Subject: [PATCH 69/96] chore: remove todo, yaml source seems tested already --- kpops/cli/settings_sources.py | 1 - 1 file changed, 1 deletion(-) diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py index f3713cc8d..d93304c58 100644 --- a/kpops/cli/settings_sources.py +++ b/kpops/cli/settings_sources.py @@ -8,7 +8,6 @@ from kpops.utils.yaml_loading import load_yaml_file -# TODO(Ivan Yordanov): Test! class YamlConfigSettingsSource(PydanticBaseSettingsSource): """Loads variables from a YAML file at the project's root.""" From c3cf11b0fb341e528ff176f5f2e090158b3140c3 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 2 Nov 2023 23:29:16 +0200 Subject: [PATCH 70/96] style: fix todo messages --- kpops/component_handlers/kafka_connect/model.py | 2 +- kpops/components/base_components/helm_app.py | 2 +- kpops/components/streams_bootstrap/streams/model.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index bd6b2659d..c25a19cb1 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -56,7 +56,7 @@ def connector_class_must_contain_dot(cls, connector_class: str) -> str: def class_name(self) -> str: return self.connector_class.split(".")[-1] - # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution @model_serializer(mode="wrap", when_used="always") def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: result = exclude_by_value(handler(self), None) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index 4f73b05f5..5d70bacfd 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -162,7 +162,7 @@ def print_helm_diff(self, stdout: str) -> None: self.helm_diff.log_helm_diff(log, current_release, new_release) # HACK: workaround for Pydantic to exclude cached properties during model export - # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution @model_serializer(mode="wrap", when_used="always") def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: return exclude_by_name(handler(self), "helm", "helm_diff") diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index cb987fb46..4e3162cbc 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -76,7 +76,7 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) - # TODO(Ivan Yordanov): Do it properly. Currently hacky and potentially unsafe + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution @model_serializer(mode="wrap", when_used="always") def serialize_model( self, handler: Callable, info: SerializationInfo From 242986c3b235fd4c9bfde54ddd752e56d2aac1a6 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 3 Nov 2023 00:04:03 +0200 Subject: [PATCH 71/96] chore: comment --- hooks/gen_docs/gen_docs_cli_usage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hooks/gen_docs/gen_docs_cli_usage.py b/hooks/gen_docs/gen_docs_cli_usage.py index 469274745..0ee00269c 100644 --- a/hooks/gen_docs/gen_docs_cli_usage.py +++ b/hooks/gen_docs/gen_docs_cli_usage.py @@ -7,7 +7,7 @@ PATH_KPOPS_MAIN = PATH_ROOT / "kpops/cli/main.py" PATH_CLI_COMMANDS_DOC = PATH_ROOT / "docs/docs/user/references/cli-commands.md" -# TODO(@sujuka99): try to use typer_cli.main.docs here instead +# TODO(Ivan Yordanov): try to use typer_cli.main.docs here instead # https://github.com/bakdata/kpops/issues/297 if __name__ == "__main__": From d079008fa860a1ff629df4abde9d0348c45b8185 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 3 Nov 2023 10:57:13 +0200 Subject: [PATCH 72/96] feat: Add dotenv support --- kpops/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/config.py b/kpops/config.py index 6eb631004..359fbf831 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -125,7 +125,7 @@ class KpopsConfig(BaseSettings): description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - model_config = SettingsConfigDict(env_prefix="KPOPS_", env_nested_delimiter="__") + model_config = SettingsConfigDict(env_prefix="KPOPS_", env_nested_delimiter="__", env_file=".env") @override @classmethod From 94b17f5a2d5f0208022318bc09604d37f6b7f6e4 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 3 Nov 2023 11:47:47 +0200 Subject: [PATCH 73/96] feat: Allow custom paths to multiple dotenv files, test --- hooks/gen_docs/gen_docs_env_vars.py | 2 +- kpops/cli/main.py | 33 ++++++++++++++++----- tests/pipeline/resources/dotenv/.env | 3 ++ tests/pipeline/resources/dotenv/config.yaml | 12 ++++++++ tests/pipeline/resources/dotenv/custom.env | 3 ++ tests/pipeline/test_pipeline.py | 24 +++++++++++++++ 6 files changed, 68 insertions(+), 9 deletions(-) create mode 100644 tests/pipeline/resources/dotenv/.env create mode 100644 tests/pipeline/resources/dotenv/config.yaml create mode 100644 tests/pipeline/resources/dotenv/custom.env diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index cf85539a9..ddf0d6c81 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -269,7 +269,7 @@ def fill_csv_pipeline_config(target: Path) -> None: ) -# TODO(Ivan Yordanov): Should we yield both the nested fields and their parents? +# TODO(Ivan Yordanov): Separate complex fields into their "leaves" def collect_fields(settings: type[BaseSettings]) -> Iterator[tuple[str, FieldInfo]]: """Collect and yield all fields in a settings class. diff --git a/kpops/cli/main.py b/kpops/cli/main.py index be16cbd32..55a3c37f0 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -34,6 +34,18 @@ app = dtyper.Typer(pretty_exceptions_enable=False) +DOTENV_PATH_OPTION: list[Path] = typer.Option( + default=[Path(".env")], + exists=True, + dir_okay=False, + file_okay=True, + envvar=f"{ENV_PREFIX}DOTENV_PATH", + help=( + "Path to dotenvfile. Multiple files can be provided. " + "The files will be loaded in order, with each file overriding the previous one." + ), +) + BASE_DIR_PATH_OPTION: Path = typer.Option( default=Path(), exists=True, @@ -197,14 +209,14 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_kpops_config( - config: Path, defaults: Optional[Path], verbose: bool + config: Path, defaults: Optional[Path], verbose: bool, dotenv: list[Path] ) -> KpopsConfig: setup_logging_level(verbose) YamlConfigSettingsSource.path_to_config = config if defaults: - kpops_config = KpopsConfig(defaults_path=defaults) + kpops_config = KpopsConfig(defaults_path=defaults, _env_file=dotenv) else: - kpops_config = KpopsConfig() + kpops_config = KpopsConfig(_env_file=dotenv) kpops_config.defaults_path = config.parent / kpops_config.defaults_path return kpops_config @@ -246,6 +258,7 @@ def generate( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: list[Path] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, template: bool = typer.Option(False, help="Run Helm template"), @@ -253,7 +266,7 @@ def generate( filter_type: FilterType = FILTER_TYPE, verbose: bool = VERBOSE_OPTION, ) -> Pipeline: - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -281,6 +294,7 @@ def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: list[Path] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -288,7 +302,7 @@ def deploy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -306,6 +320,7 @@ def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: list[Path] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -313,7 +328,7 @@ def destroy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -330,6 +345,7 @@ def reset( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: list[Path] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -337,7 +353,7 @@ def reset( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -355,6 +371,7 @@ def clean( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: list[Path] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -362,7 +379,7 @@ def clean( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) diff --git a/tests/pipeline/resources/dotenv/.env b/tests/pipeline/resources/dotenv/.env new file mode 100644 index 000000000..9829b1218 --- /dev/null +++ b/tests/pipeline/resources/dotenv/.env @@ -0,0 +1,3 @@ +KPOPS_environment="default" +KPOPS_schema_registry__enabled="true" +KPOPS_schema_registry__url="http://localhost:8081" diff --git a/tests/pipeline/resources/dotenv/config.yaml b/tests/pipeline/resources/dotenv/config.yaml new file mode 100644 index 000000000..196b583f5 --- /dev/null +++ b/tests/pipeline/resources/dotenv/config.yaml @@ -0,0 +1,12 @@ +# environment: development +defaults_path: ../defaults.yaml +topic_name_config: + default_error_topic_name: "${component_name}-dead-letter-topic" + default_output_topic_name: "${component_name}-test-topic" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" +helm_config: + api_version: "2.1.1" diff --git a/tests/pipeline/resources/dotenv/custom.env b/tests/pipeline/resources/dotenv/custom.env new file mode 100644 index 000000000..3e5371e98 --- /dev/null +++ b/tests/pipeline/resources/dotenv/custom.env @@ -0,0 +1,3 @@ +KPOPS_environment="custom" +KPOPS_schema_registry__enabled="false" +KPOPS_schema_registry__url="http://notlocalhost:8081" diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 5d6c69832..2254ff955 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -527,6 +527,30 @@ def test_model_serialization(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") + def test_dotenv_support(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--defaults", + str(RESOURCE_PATH), + "--config", + str(RESOURCE_PATH / "dotenv/config.yaml"), + "--dotenv", + str(RESOURCE_PATH / "dotenv/.env"), + "--dotenv", + str(RESOURCE_PATH / "dotenv/custom.env") + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert enriched_pipeline["components"][1]["app"]["streams"]["schemaRegistryUrl"] == "http://notlocalhost:8081/" + def test_short_topic_definition(self): result = runner.invoke( app, From ca4425408029efd8a1a8110c56e79a437131b6c5 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 3 Nov 2023 11:54:21 +0200 Subject: [PATCH 74/96] docs: update dotenv information --- .../user/core-concepts/variables/environment_variables.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/docs/user/core-concepts/variables/environment_variables.md b/docs/docs/user/core-concepts/variables/environment_variables.md index 2a57aabea..35ca235d7 100644 --- a/docs/docs/user/core-concepts/variables/environment_variables.md +++ b/docs/docs/user/core-concepts/variables/environment_variables.md @@ -6,10 +6,7 @@ Environment variables can be set by using the [export](https://www.unix.com/man- !!! tip "dotenv files" - Support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20), - but not implemented in KPOps yet. One of the possible ways to still - use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. - This would work in `bash` suppose there are no spaces inside the values. + KPOps currently supports `.env` files only for variables related to the [config](../config.md). Full support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20). One of the possible ways to use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. This would work in `bash` suppose there are no spaces inside the values. From 85e3085987fdd99006c5cbb1d5014ee1b6184f2f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Fri, 3 Nov 2023 12:01:40 +0200 Subject: [PATCH 75/96] tests: pass --- .../docs/resources/variables/cli_env_vars.env | 3 +++ docs/docs/resources/variables/cli_env_vars.md | 15 +++++------ docs/docs/user/references/cli-commands.md | 5 ++++ kpops/cli/main.py | 25 +++++++++++-------- kpops/config.py | 2 +- tests/pipeline/test_pipeline.py | 7 ++++-- 6 files changed, 37 insertions(+), 20 deletions(-) diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index dec1d8b3a..e14fd46c6 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -12,6 +12,9 @@ KPOPS_PIPELINE_BASE_DIR=. KPOPS_CONFIG_PATH=config.yaml # Path to defaults folder KPOPS_DEFAULT_PATH # No default value, not required +# Path to dotenvfile. Multiple files can be provided. The files will +# be loaded in order, with each file overriding the previous one. +KPOPS_DOTENV_PATH # No default value, not required # Path to YAML with pipeline definition KPOPS_PIPELINE_PATH # No default value, required # Comma separated list of steps to apply the command on diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index 763cb936e..ff56a5966 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -1,9 +1,10 @@ These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags. -| Name |Default Value|Required| Description | -|-----------------------|-------------|--------|----------------------------------------------------------------------| -|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory)| -|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | -|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | -|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | -|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | +| Name |Default Value|Required| Description | +|-----------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------| +|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | +|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | +|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | +|KPOPS_DOTENV_PATH | |False |Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one.| +|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | +|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 100f05c4a..2db2b0243 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -40,6 +40,7 @@ $ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -66,6 +67,7 @@ $ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -92,6 +94,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -118,6 +121,7 @@ $ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--template / --no-template`: Run Helm template [default: no-template] @@ -144,6 +148,7 @@ $ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 55a3c37f0..f8c250d7f 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -34,8 +34,8 @@ app = dtyper.Typer(pretty_exceptions_enable=False) -DOTENV_PATH_OPTION: list[Path] = typer.Option( - default=[Path(".env")], +DOTENV_PATH_OPTION: Optional[list[Path]] = typer.Option( + default=None, exists=True, dir_okay=False, file_okay=True, @@ -209,14 +209,19 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_kpops_config( - config: Path, defaults: Optional[Path], verbose: bool, dotenv: list[Path] + config: Path, defaults: Optional[Path], verbose: bool, dotenv: Optional[list[Path]] ) -> KpopsConfig: setup_logging_level(verbose) YamlConfigSettingsSource.path_to_config = config if defaults: - kpops_config = KpopsConfig(defaults_path=defaults, _env_file=dotenv) + kpops_config = KpopsConfig( + defaults_path=defaults, + _env_file=dotenv, # pyright: ignore [reportGeneralTypeIssues] + ) else: - kpops_config = KpopsConfig(_env_file=dotenv) + kpops_config = KpopsConfig( + _env_file=dotenv # pyright: ignore [reportGeneralTypeIssues] + ) kpops_config.defaults_path = config.parent / kpops_config.defaults_path return kpops_config @@ -258,7 +263,7 @@ def generate( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, - dotenv: list[Path] = DOTENV_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, template: bool = typer.Option(False, help="Run Helm template"), @@ -294,7 +299,7 @@ def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, - dotenv: list[Path] = DOTENV_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -320,7 +325,7 @@ def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, - dotenv: list[Path] = DOTENV_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -345,7 +350,7 @@ def reset( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, - dotenv: list[Path] = DOTENV_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -371,7 +376,7 @@ def clean( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, - dotenv: list[Path] = DOTENV_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, diff --git a/kpops/config.py b/kpops/config.py index 359fbf831..6eb631004 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -125,7 +125,7 @@ class KpopsConfig(BaseSettings): description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - model_config = SettingsConfigDict(env_prefix="KPOPS_", env_nested_delimiter="__", env_file=".env") + model_config = SettingsConfigDict(env_prefix="KPOPS_", env_nested_delimiter="__") @override @classmethod diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 2254ff955..9893ed697 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -542,14 +542,17 @@ def test_dotenv_support(self): "--dotenv", str(RESOURCE_PATH / "dotenv/.env"), "--dotenv", - str(RESOURCE_PATH / "dotenv/custom.env") + str(RESOURCE_PATH / "dotenv/custom.env"), ], catch_exceptions=False, ) assert result.exit_code == 0 enriched_pipeline: dict = yaml.safe_load(result.stdout) - assert enriched_pipeline["components"][1]["app"]["streams"]["schemaRegistryUrl"] == "http://notlocalhost:8081/" + assert ( + enriched_pipeline["components"][1]["app"]["streams"]["schemaRegistryUrl"] + == "http://notlocalhost:8081/" + ) def test_short_topic_definition(self): result = runner.invoke( From 96d88804e1f94f87105d8f80147546a031a2ba4a Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Tue, 31 Oct 2023 12:32:32 +0100 Subject: [PATCH 76/96] Add missing HelmApp docs --- .../user/core-concepts/components/helm-app.md | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 docs/docs/user/core-concepts/components/helm-app.md diff --git a/docs/docs/user/core-concepts/components/helm-app.md b/docs/docs/user/core-concepts/components/helm-app.md new file mode 100644 index 000000000..4a7af609b --- /dev/null +++ b/docs/docs/user/core-concepts/components/helm-app.md @@ -0,0 +1,37 @@ +# HelmApp + +### Usage + +Can be used to deploy any app in Kubernetes using Helm, for example, a REST service that serves Kafka data. + +### Configuration + + + +??? example "`pipeline.yaml`" + + ```yaml + --8<-- + ./docs/resources/pipeline-components/helm-app.yaml + --8<-- + ``` + + + +### Operations + +#### deploy + +Deploy using Helm. + +#### destroy + +Uninstall Helm release. + +#### reset + +Do nothing. + +#### clean + +Do nothing. From f4bb546d9a0697b18005de7e1b226f3a9c8c3c19 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 13:32:36 +0200 Subject: [PATCH 77/96] misc: feedback and enable config schema gen --- .../docs/resources/variables/cli_env_vars.env | 2 +- docs/docs/resources/variables/cli_env_vars.md | 16 +- docs/docs/schema/config.json | 341 +++++++----------- docs/docs/user/references/cli-commands.md | 10 +- hooks/gen_schema.py | 2 +- kpops/cli/main.py | 17 +- kpops/config.py | 4 +- 7 files changed, 160 insertions(+), 232 deletions(-) diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index e14fd46c6..dc44ac3a6 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -12,7 +12,7 @@ KPOPS_PIPELINE_BASE_DIR=. KPOPS_CONFIG_PATH=config.yaml # Path to defaults folder KPOPS_DEFAULT_PATH # No default value, not required -# Path to dotenvfile. Multiple files can be provided. The files will +# Path to dotenv file. Multiple files can be provided. The files will # be loaded in order, with each file overriding the previous one. KPOPS_DOTENV_PATH # No default value, not required # Path to YAML with pipeline definition diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index ff56a5966..ed0880bee 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -1,10 +1,10 @@ These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags. -| Name |Default Value|Required| Description | -|-----------------------|-------------|--------|----------------------------------------------------------------------------------------------------------------------------------| -|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | -|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | -|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | -|KPOPS_DOTENV_PATH | |False |Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one.| -|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | -|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | +| Name |Default Value|Required| Description | +|-----------------------|-------------|--------|-----------------------------------------------------------------------------------------------------------------------------------| +|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | +|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | +|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | +|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one.| +|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | +|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 0c4223828..09a848235 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -1,6 +1,5 @@ { - "$ref": "#/definitions/KpopsConfig", - "definitions": { + "$defs": { "HelmConfig": { "description": "Global Helm configuration.", "properties": { @@ -66,14 +65,9 @@ "description": "Configuration for Kafka Connect.", "properties": { "url": { - "default": "http://localhost:8083", + "default": "http://localhost:8083/", "description": "Address of Kafka Connect.", - "env": "KPOPS_KAFKA_CONNECT_URL", - "env_names": [ - "kpops_kafka_connect_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -87,14 +81,9 @@ "description": "Configuration for Kafka REST Proxy.", "properties": { "url": { - "default": "http://localhost:8082", + "default": "http://localhost:8082/", "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_KAFKA_REST_URL", - "env_names": [ - "kpops_kafka_rest_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -103,181 +92,6 @@ "title": "KafkaRestConfig", "type": "object" }, - "KpopsConfig": { - "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", - "properties": { - "create_namespace": { - "default": false, - "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", - "env_names": [ - "kpops_create_namespace" - ], - "title": "Create Namespace", - "type": "boolean" - }, - "defaults_filename_prefix": { - "default": "defaults", - "description": "The name of the defaults file and the prefix of the defaults environment file.", - "env_names": [ - "kpops_defaults_filename_prefix" - ], - "title": "Defaults Filename Prefix", - "type": "string" - }, - "defaults_path": { - "default": ".", - "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", - "env_names": [ - "kpops_defaults_path" - ], - "example": "defaults", - "format": "path", - "title": "Defaults Path", - "type": "string" - }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "env": "KPOPS_ENVIRONMENT", - "env_names": [ - "kpops_environment" - ], - "example": "development", - "title": "Environment", - "type": "string" - }, - "helm_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmConfig" - } - ], - "default": { - "api_version": null, - "context": null, - "debug": false - }, - "description": "Global flags for Helm.", - "env_names": [ - "kpops_helm_config" - ], - "title": "Helm Config" - }, - "helm_diff_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmDiffConfig" - } - ], - "default": { - "ignore": [] - }, - "description": "Configure Helm Diff.", - "env_names": [ - "kpops_helm_diff_config" - ], - "title": "Helm Diff Config" - }, - "kafka_brokers": { - "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_KAFKA_BROKERS", - "env_names": [ - "kpops_kafka_brokers" - ], - "example": "broker1:9092,broker2:9092,broker3:9092", - "title": "Kafka Brokers", - "type": "string" - }, - "kafka_connect": { - "allOf": [ - { - "$ref": "#/definitions/KafkaConnectConfig" - } - ], - "default": { - "url": "http://localhost:8083" - }, - "description": "Configuration for Kafka Connect.", - "env_names": [ - "kpops_kafka_connect" - ], - "title": "Kafka Connect" - }, - "kafka_rest": { - "allOf": [ - { - "$ref": "#/definitions/KafkaRestConfig" - } - ], - "default": { - "url": "http://localhost:8082" - }, - "description": "Configuration for Kafka REST Proxy.", - "env_names": [ - "kpops_kafka_rest" - ], - "title": "Kafka Rest" - }, - "retain_clean_jobs": { - "default": false, - "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", - "env": "KPOPS_RETAIN_CLEAN_JOBS", - "env_names": [ - "kpops_retain_clean_jobs" - ], - "title": "Retain Clean Jobs", - "type": "boolean" - }, - "schema_registry": { - "allOf": [ - { - "$ref": "#/definitions/SchemaRegistryConfig" - } - ], - "default": { - "enabled": false, - "url": "http://localhost:8081" - }, - "description": "Configuration for Schema Registry.", - "env_names": [ - "kpops_schema_registry" - ], - "title": "Schema Registry" - }, - "timeout": { - "default": 300, - "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", - "env": "KPOPS_TIMEOUT", - "env_names": [ - "kpops_timeout" - ], - "title": "Timeout", - "type": "integer" - }, - "topic_name_config": { - "allOf": [ - { - "$ref": "#/definitions/TopicNameConfig" - } - ], - "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" - }, - "description": "Configure the topic name variables you can use in the pipeline definition.", - "env_names": [ - "kpops_topic_name_config" - ], - "title": "Topic Name Config" - } - }, - "required": [ - "environment", - "kafka_brokers" - ], - "title": "KpopsConfig", - "type": "object" - }, "SchemaRegistryConfig": { "additionalProperties": false, "description": "Configuration for Schema Registry.", @@ -285,21 +99,13 @@ "enabled": { "default": false, "description": "Whether the Schema Registry handler should be initialized.", - "env_names": [ - "enabled" - ], "title": "Enabled", "type": "boolean" }, "url": { - "default": "http://localhost:8081", + "default": "http://localhost:8081/", "description": "Address of the Schema Registry.", - "env": "KPOPS_SCHEMA_REGISTRY_URL", - "env_names": [ - "kpops_schema_registry_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -315,18 +121,12 @@ "default_error_topic_name": { "default": "${pipeline_name}-${component_name}-error", "description": "Configures the value for the variable ${error_topic_name}", - "env_names": [ - "default_error_topic_name" - ], "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { "default": "${pipeline_name}-${component_name}", "description": "Configures the value for the variable ${output_topic_name}", - "env_names": [ - "default_output_topic_name" - ], "title": "Default Output Topic Name", "type": "string" } @@ -335,5 +135,136 @@ "type": "object" } }, - "title": "KPOps config schema" + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { + "create_namespace": { + "default": false, + "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", + "title": "Create Namespace", + "type": "boolean" + }, + "defaults_filename_prefix": { + "default": "defaults", + "description": "The name of the defaults file and the prefix of the defaults environment file.", + "title": "Defaults Filename Prefix", + "type": "string" + }, + "defaults_path": { + "default": ".", + "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", + "examples": [ + "defaults", + "." + ], + "format": "path", + "title": "Defaults Path", + "type": "string" + }, + "environment": { + "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", + "examples": [ + "development", + "production" + ], + "title": "Environment", + "type": "string" + }, + "helm_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmConfig" + } + ], + "default": { + "api_version": null, + "context": null, + "debug": false + }, + "description": "Global flags for Helm." + }, + "helm_diff_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmDiffConfig" + } + ], + "default": { + "ignore": [] + }, + "description": "Configure Helm Diff." + }, + "kafka_brokers": { + "description": "The comma separated Kafka brokers address.", + "examples": [ + "broker1:9092,broker2:9092,broker3:9092" + ], + "title": "Kafka Brokers", + "type": "string" + }, + "kafka_connect": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConnectConfig" + } + ], + "default": { + "url": "http://localhost:8083/" + }, + "description": "Configuration for Kafka Connect." + }, + "kafka_rest": { + "allOf": [ + { + "$ref": "#/$defs/KafkaRestConfig" + } + ], + "default": { + "url": "http://localhost:8082/" + }, + "description": "Configuration for Kafka REST Proxy." + }, + "retain_clean_jobs": { + "default": false, + "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", + "title": "Retain Clean Jobs", + "type": "boolean" + }, + "schema_registry": { + "allOf": [ + { + "$ref": "#/$defs/SchemaRegistryConfig" + } + ], + "default": { + "enabled": false, + "url": "http://localhost:8081/" + }, + "description": "Configuration for Schema Registry." + }, + "timeout": { + "default": 300, + "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", + "title": "Timeout", + "type": "integer" + }, + "topic_name_config": { + "allOf": [ + { + "$ref": "#/$defs/TopicNameConfig" + } + ], + "default": { + "default_error_topic_name": "${pipeline_name}-${component_name}-error", + "default_output_topic_name": "${pipeline_name}-${component_name}" + }, + "description": "Configure the topic name variables you can use in the pipeline definition." + } + }, + "required": [ + "environment", + "kafka_brokers" + ], + "title": "KpopsConfig", + "type": "object" } diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 2db2b0243..cb9b2ff5b 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -40,7 +40,7 @@ $ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] -* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -67,7 +67,7 @@ $ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] -* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -94,7 +94,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] -* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -121,7 +121,7 @@ $ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] -* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--template / --no-template`: Run Helm template [default: no-template] @@ -148,7 +148,7 @@ $ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] -* `--dotenv FILE`: Path to dotenvfile. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] diff --git a/hooks/gen_schema.py b/hooks/gen_schema.py index 88d70b7fd..7d6b99f2e 100644 --- a/hooks/gen_schema.py +++ b/hooks/gen_schema.py @@ -27,4 +27,4 @@ def gen_schema(scope: SchemaScope): if __name__ == "__main__": gen_schema(SchemaScope.PIPELINE) - # gen_schema(SchemaScope.CONFIG) + gen_schema(SchemaScope.CONFIG) diff --git a/kpops/cli/main.py b/kpops/cli/main.py index f8c250d7f..78d270c62 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -19,7 +19,7 @@ from kpops.component_handlers.schema_handler.schema_handler import SchemaHandler from kpops.component_handlers.topic.handler import TopicHandler from kpops.component_handlers.topic.proxy_wrapper import ProxyWrapper -from kpops.config import KpopsConfig +from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema @@ -28,8 +28,6 @@ from kpops.components.base_components import PipelineComponent -ENV_PREFIX = KpopsConfig.model_config.get("env_prefix") - LOG_DIVIDER = "#" * 100 app = dtyper.Typer(pretty_exceptions_enable=False) @@ -41,7 +39,7 @@ file_okay=True, envvar=f"{ENV_PREFIX}DOTENV_PATH", help=( - "Path to dotenvfile. Multiple files can be provided. " + "Path to dotenv file. Multiple files can be provided. " "The files will be loaded in order, with each file overriding the previous one." ), ) @@ -213,15 +211,12 @@ def create_kpops_config( ) -> KpopsConfig: setup_logging_level(verbose) YamlConfigSettingsSource.path_to_config = config + kpops_config = KpopsConfig( + _env_file=dotenv # pyright: ignore[reportGeneralTypeIssues] + ) if defaults: - kpops_config = KpopsConfig( - defaults_path=defaults, - _env_file=dotenv, # pyright: ignore [reportGeneralTypeIssues] - ) + kpops_config.defaults_path = defaults else: - kpops_config = KpopsConfig( - _env_file=dotenv # pyright: ignore [reportGeneralTypeIssues] - ) kpops_config.defaults_path = config.parent / kpops_config.defaults_path return kpops_config diff --git a/kpops/config.py b/kpops/config.py index 6eb631004..c2e8ac212 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -14,6 +14,8 @@ from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object +ENV_PREFIX = "KPOPS_" + class TopicNameConfig(BaseSettings): """Configure the topic name variables you can use in the pipeline definition.""" @@ -125,7 +127,7 @@ class KpopsConfig(BaseSettings): description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - model_config = SettingsConfigDict(env_prefix="KPOPS_", env_nested_delimiter="__") + model_config = SettingsConfigDict(env_prefix=ENV_PREFIX, env_nested_delimiter="__") @override @classmethod From fd96130ec070297d1a1934c2e1c001e7ae3aaa8f Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 16:09:30 +0200 Subject: [PATCH 78/96] refactor: move yaml settings source to pydantic module --- kpops/cli/main.py | 2 +- kpops/cli/settings_sources.py | 53 ----------------------------------- kpops/config.py | 2 +- kpops/utils/pydantic.py | 50 +++++++++++++++++++++++++++++++++ 4 files changed, 52 insertions(+), 55 deletions(-) delete mode 100644 kpops/cli/settings_sources.py diff --git a/kpops/cli/main.py b/kpops/cli/main.py index 78d270c62..d4fea2f1a 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -11,7 +11,6 @@ from kpops import __version__ from kpops.cli.custom_formatter import CustomFormatter from kpops.cli.registry import Registry -from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers import ComponentHandlers from kpops.component_handlers.kafka_connect.kafka_connect_handler import ( KafkaConnectHandler, @@ -22,6 +21,7 @@ from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema +from kpops.utils.pydantic import YamlConfigSettingsSource if TYPE_CHECKING: from collections.abc import Iterator diff --git a/kpops/cli/settings_sources.py b/kpops/cli/settings_sources.py deleted file mode 100644 index d93304c58..000000000 --- a/kpops/cli/settings_sources.py +++ /dev/null @@ -1,53 +0,0 @@ -from pathlib import Path -from typing import Any - -from pydantic.fields import FieldInfo -from pydantic_settings import PydanticBaseSettingsSource -from typing_extensions import override - -from kpops.utils.yaml_loading import load_yaml_file - - -class YamlConfigSettingsSource(PydanticBaseSettingsSource): - """Loads variables from a YAML file at the project's root.""" - - path_to_config = Path("config.yaml") - - @override - def get_field_value( - self, - field: FieldInfo, - field_name: str, - ) -> tuple[Any, str, bool]: - if self.path_to_config.exists() and isinstance( - (file_content_yaml := load_yaml_file(self.path_to_config)), dict - ): - field_value = file_content_yaml.get(field_name) - return field_value, field_name, False - return None, field_name, False - - @override - def prepare_field_value( - self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool - ) -> Any: - return value - - @override - def __call__(self) -> dict[str, Any]: - d: dict[str, Any] = {} - - for field_name, field in self.settings_cls.model_fields.items(): - field_value, field_key, value_is_complex = self.get_field_value( - field, - field_name, - ) - field_value = self.prepare_field_value( - field_name, - field, - field_value, - value_is_complex, - ) - if field_value is not None: - d[field_key] = field_value - - return d diff --git a/kpops/config.py b/kpops/config.py index c2e8ac212..850418d21 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -10,9 +10,9 @@ ) from typing_extensions import override -from kpops.cli.settings_sources import YamlConfigSettingsSource from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object +from kpops.utils.pydantic import YamlConfigSettingsSource ENV_PREFIX = "KPOPS_" diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 1cf89d4e9..0981cd926 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,10 +1,15 @@ +from pathlib import Path from typing import Any import humps from pydantic import BaseModel, ConfigDict, Field from pydantic.alias_generators import to_snake +from pydantic.fields import FieldInfo +from pydantic_settings import PydanticBaseSettingsSource +from typing_extensions import override from kpops.utils.docstring import describe_object +from kpops.utils.yaml_loading import load_yaml_file def to_camel(s: str) -> str: @@ -98,3 +103,48 @@ def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: schema["description"] = describe_object(model.__doc__) model_config = ConfigDict(json_schema_extra=json_schema_extra) + + +class YamlConfigSettingsSource(PydanticBaseSettingsSource): + """Loads variables from a YAML file at the project's root.""" + + path_to_config = Path("config.yaml") + + @override + def get_field_value( + self, + field: FieldInfo, + field_name: str, + ) -> tuple[Any, str, bool]: + if self.path_to_config.exists() and isinstance( + (file_content_yaml := load_yaml_file(self.path_to_config)), dict + ): + field_value = file_content_yaml.get(field_name) + return field_value, field_name, False + return None, field_name, False + + @override + def prepare_field_value( + self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool + ) -> Any: + return value + + @override + def __call__(self) -> dict[str, Any]: + d: dict[str, Any] = {} + + for field_name, field in self.settings_cls.model_fields.items(): + field_value, field_key, value_is_complex = self.get_field_value( + field, + field_name, + ) + field_value = self.prepare_field_value( + field_name, + field, + field_value, + value_is_complex, + ) + if field_value is not None: + d[field_key] = field_value + + return d From f91ab02e9bdd104993a17015ae217056bc82eba0 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 16:18:24 +0200 Subject: [PATCH 79/96] refactor: `by_alias` rotate parameters --- docs/docs/schema/pipeline.json | 2 +- kpops/component_handlers/kafka_connect/model.py | 2 +- kpops/utils/pydantic.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 644f51431..3fdef503f 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -434,7 +434,7 @@ }, "KubernetesAppConfig": { "additionalProperties": true, - "description": "Settings specific to Kubernetes Apps.", + "description": "Settings specific to Kubernetes apps.", "properties": {}, "title": "KubernetesAppConfig", "type": "object" diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index c25a19cb1..f6f6e5baf 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -60,7 +60,7 @@ def class_name(self) -> str: @model_serializer(mode="wrap", when_used="always") def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: result = exclude_by_value(handler(self), None) - return {by_alias(name, self): value for name, value in result.items()} + return {by_alias(self, name): value for name, value in result.items()} class ConnectorTask(BaseModel): diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 0981cd926..b1efaf38b 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -27,7 +27,7 @@ def to_dot(s: str) -> str: return s.replace("_", ".") -def by_alias(field_name: str, model: BaseModel) -> str: +def by_alias(model: BaseModel, field_name: str) -> str: """Return field alias if exists else field name. :param field_name: Name of the field to get alias of From 3bd9bd896eae3114a14c7e37d3bd3c6b4828a6ab Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 16:18:41 +0200 Subject: [PATCH 80/96] style: lowercase apps --- kpops/components/base_components/kubernetes_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 797db7742..cae474cee 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -19,7 +19,7 @@ class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): - """Settings specific to Kubernetes Apps.""" + """Settings specific to Kubernetes apps.""" model_config = ConfigDict( extra="allow", From 9206cb0fbb8a1dbad475d2ba2c831d59b7e52083 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 16:19:22 +0200 Subject: [PATCH 81/96] fix: add missing arg in model serializer --- kpops/components/streams_bootstrap/streams/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 4e3162cbc..34c6915fc 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -81,7 +81,7 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: def serialize_model( self, handler: Callable, info: SerializationInfo ) -> dict[str, Any]: - return exclude_defaults(self, exclude_by_value(handler(self))) + return exclude_defaults(self, exclude_by_value(handler(self), None)) class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): From 356adb9fb9e3e9c1a94d35e15f86f245a381d7cb Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 16:29:55 +0200 Subject: [PATCH 82/96] refactor: Add generic type to pydantic module --- kpops/utils/pydantic.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index b1efaf38b..3ac64d82d 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -6,7 +6,7 @@ from pydantic.alias_generators import to_snake from pydantic.fields import FieldInfo from pydantic_settings import PydanticBaseSettingsSource -from typing_extensions import override +from typing_extensions import TypeVar, override from kpops.utils.docstring import describe_object from kpops.utils.yaml_loading import load_yaml_file @@ -36,9 +36,12 @@ def by_alias(model: BaseModel, field_name: str) -> str: return model.model_fields.get(field_name, Field()).alias or field_name +_V = TypeVar("_V") + + def exclude_by_value( - dumped_model: dict[str, Any], *excluded_values: Any -) -> dict[str, Any]: + dumped_model: dict[str, _V], *excluded_values: Any +) -> dict[str, _V]: """Strip all key-value pairs with certain values. :param dumped_model: Dumped model @@ -53,8 +56,8 @@ def exclude_by_value( def exclude_by_name( - dumped_model: dict[str, Any], *excluded_fields: str -) -> dict[str, Any]: + dumped_model: dict[str, _V], *excluded_fields: str +) -> dict[str, _V]: """Strip all key-value pairs with certain field names. :param dumped_model: Dumped model @@ -68,7 +71,7 @@ def exclude_by_name( } -def exclude_defaults(model: BaseModel, dumped_model: dict[str, Any]) -> dict[str, str]: +def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, _V]: """Strip all key-value pairs with default values. :param model: Model From 4a46d3af0976f3c813a4431206d2fff49f851315 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Wed, 8 Nov 2023 16:31:40 +0200 Subject: [PATCH 83/96] chore: remove old comment --- tests/pipeline/test_pipeline.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 9893ed697..21f9b30fd 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -272,9 +272,7 @@ def test_no_user_defined_components(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") - def test_kafka_connect_sink_weave_from_topics( - self, snapshot: SnapshotTest - ): # INTERFERES WITH test_with_env_defaults + def test_kafka_connect_sink_weave_from_topics(self, snapshot: SnapshotTest): """Parse Connector topics from previous component to section.""" result = runner.invoke( app, From dc2f3d763d6b2b7ccd5e7c4be86bbd4107e50c35 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 9 Nov 2023 14:22:29 +0200 Subject: [PATCH 84/96] fix: Allow any type of data under `app.streams.config` --- docs/docs/schema/pipeline.json | 3 --- .../streams_bootstrap/streams/model.py | 2 +- .../defaults.yaml | 24 +++++++++++++++++++ .../pipeline.yaml | 5 ++++ tests/pipeline/test_pipeline.py | 19 +++++++++++++++ 5 files changed, 49 insertions(+), 4 deletions(-) create mode 100644 tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml create mode 100644 tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 3fdef503f..6d8d87cc0 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -913,9 +913,6 @@ "type": "string" }, "config": { - "additionalProperties": { - "type": "string" - }, "default": {}, "description": "Configuration", "title": "Config", diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index 34c6915fc..2c8b952ce 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -51,7 +51,7 @@ class StreamsConfig(KafkaStreamsConfig): error_topic: str | None = Field( default=None, description=describe_attr("error_topic", __doc__) ) - config: dict[str, str] = Field( + config: dict[str, Any] = Field( default={}, description=describe_attr("config", __doc__) ) diff --git a/tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml b/tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml new file mode 100644 index 000000000..ff4864602 --- /dev/null +++ b/tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml @@ -0,0 +1,24 @@ +kubernetes-app: + name: "${component_type}" + namespace: example-namespace + +kafka-app: + app: + streams: + brokers: "${kafka_brokers}" + schema_registry_url: "${schema_registry_url}" + version: "2.4.2" + +streams-app: # inherits from kafka-app + app: + streams: + config: + large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator + to: + topics: + ${error_topic_name}: + type: error + value_schema: com.bakdata.kafka.DeadLetter + partitions_count: 1 + configs: + cleanup.policy: compact,delete diff --git a/tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml b/tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml new file mode 100644 index 000000000..db3f0e50e --- /dev/null +++ b/tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml @@ -0,0 +1,5 @@ +- type: streams-app + app: + streams: + config: + max.poll.records: 100 diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 21f9b30fd..2b5284a43 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -634,3 +634,22 @@ def test_validate_unique_step_names(self): ], catch_exceptions=False, ) + + def test_temp_any_values_in_app_streams_config(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str( + RESOURCE_PATH + / "temp-any-values-in-app-streams-config/pipeline.yaml" + ), + "--defaults", + str(RESOURCE_PATH / "temp-any-values-in-app-streams-config"), + ], + catch_exceptions=False, + ) + + assert result.exit_code == 0 From 683986e1c8dd8868942126f609433624135fdf35 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 9 Nov 2023 15:19:07 +0200 Subject: [PATCH 85/96] test: add temp test for name trimming --- .../defaults.yaml | 1 - .../pipeline.yaml | 1 + tests/pipeline/test_pipeline.py | 15 ++++++++------- 3 files changed, 9 insertions(+), 8 deletions(-) rename tests/pipeline/resources/{temp-any-values-in-app-streams-config => temp-trim-release-name}/defaults.yaml (95%) rename tests/pipeline/resources/{temp-any-values-in-app-streams-config => temp-trim-release-name}/pipeline.yaml (57%) diff --git a/tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml similarity index 95% rename from tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml rename to tests/pipeline/resources/temp-trim-release-name/defaults.yaml index ff4864602..55754eba1 100644 --- a/tests/pipeline/resources/temp-any-values-in-app-streams-config/defaults.yaml +++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml @@ -1,5 +1,4 @@ kubernetes-app: - name: "${component_type}" namespace: example-namespace kafka-app: diff --git a/tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml similarity index 57% rename from tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml rename to tests/pipeline/resources/temp-trim-release-name/pipeline.yaml index db3f0e50e..d61d6c9ba 100644 --- a/tests/pipeline/resources/temp-any-values-in-app-streams-config/pipeline.yaml +++ b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml @@ -1,4 +1,5 @@ - type: streams-app + name: in-order-to-have-len-fifty-two-name-should-end--here app: streams: config: diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index 2b5284a43..894d88c8d 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -635,21 +635,22 @@ def test_validate_unique_step_names(self): catch_exceptions=False, ) - def test_temp_any_values_in_app_streams_config(self): + def test_temp_trim_release_name(self): result = runner.invoke( app, [ "generate", "--pipeline-base-dir", str(PIPELINE_BASE_DIR_PATH), - str( - RESOURCE_PATH - / "temp-any-values-in-app-streams-config/pipeline.yaml" - ), + str(RESOURCE_PATH / "temp-trim-release-name/pipeline.yaml"), "--defaults", - str(RESOURCE_PATH / "temp-any-values-in-app-streams-config"), + str(RESOURCE_PATH / "temp-trim-release-name"), ], catch_exceptions=False, ) - assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["name"] + == "in-order-to-have-len-fifty-two-name-should-end--here" + ) From d6ea12afda11117cb77ad7988d4ab05bf6a7f633 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Mon, 27 Nov 2023 16:59:06 +0200 Subject: [PATCH 86/96] chore: update dependencies --- poetry.lock | 42 +++++++++++++++++++++--------------------- pyproject.toml | 4 ++-- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index eed1ac6d4..c72c5c9c3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "aiofiles" @@ -1543,28 +1543,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.1.1" -description = "An extremely fast Python linter, written in Rust." +version = "0.1.6" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.1-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b7cdc893aef23ccc14c54bd79a8109a82a2c527e11d030b62201d86f6c2b81c5"}, - {file = "ruff-0.1.1-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:620d4b34302538dbd8bbbe8fdb8e8f98d72d29bd47e972e2b59ce6c1e8862257"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a909d3930afdbc2e9fd893b0034479e90e7981791879aab50ce3d9f55205bd6"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3305d1cb4eb8ff6d3e63a48d1659d20aab43b49fe987b3ca4900528342367145"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c34ae501d0ec71acf19ee5d4d889e379863dcc4b796bf8ce2934a9357dc31db7"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6aa7e63c3852cf8fe62698aef31e563e97143a4b801b57f920012d0e07049a8d"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d68367d1379a6b47e61bc9de144a47bcdb1aad7903bbf256e4c3d31f11a87ae"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc11955f6ce3398d2afe81ad7e49d0ebf0a581d8bcb27b8c300281737735e3a3"}, - {file = "ruff-0.1.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd8eead88ea83a250499074e2a8e9d80975f0b324b1e2e679e4594da318c25"}, - {file = "ruff-0.1.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f4780e2bb52f3863a565ec3f699319d3493b83ff95ebbb4993e59c62aaf6e75e"}, - {file = "ruff-0.1.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8f5b24daddf35b6c207619301170cae5d2699955829cda77b6ce1e5fc69340df"}, - {file = "ruff-0.1.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d3f9ac658ba29e07b95c80fa742b059a55aefffa8b1e078bc3c08768bdd4b11a"}, - {file = "ruff-0.1.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3521bf910104bf781e6753282282acc145cbe3eff79a1ce6b920404cd756075a"}, - {file = "ruff-0.1.1-py3-none-win32.whl", hash = "sha256:ba3208543ab91d3e4032db2652dcb6c22a25787b85b8dc3aeff084afdc612e5c"}, - {file = "ruff-0.1.1-py3-none-win_amd64.whl", hash = "sha256:3ff3006c97d9dc396b87fb46bb65818e614ad0181f059322df82bbfe6944e264"}, - {file = "ruff-0.1.1-py3-none-win_arm64.whl", hash = "sha256:e140bd717c49164c8feb4f65c644046fe929c46f42493672853e3213d7bdbce2"}, - {file = "ruff-0.1.1.tar.gz", hash = "sha256:c90461ae4abec261609e5ea436de4a4b5f2822921cf04c16d2cc9327182dbbcc"}, + {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"}, + {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"}, + {file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"}, + {file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"}, + {file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"}, + {file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"}, ] [[package]] @@ -1927,4 +1927,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "ebe06644e4bdad0cf848bff96088d06fc876e619089a9068f6988ab1fedcb91a" +content-hash = "44be274788ef313f3e35e296e42be1632675ce0ab9bee8c3c4db9d040480c50d" diff --git a/pyproject.toml b/pyproject.toml index 7cc42f07a..b68aa03f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ kpops = "kpops.cli.main:app" [tool.poetry.dependencies] python = "^3.10" -pydantic = { extras = ["dotenv"], version = "^2.4.2" } +pydantic = { extras = ["dotenv"], version = "^2.5.2" } pydantic-settings = "^2.0.3" rich = "^12.4.4" PyYAML = "^6.0" @@ -46,7 +46,7 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -ruff = "^0.1.1" +ruff = "^0.1.6" black = "^23.7.0" typer-cli = "^0.0.13" pyright = "^1.1.314" From db3701c6369dee4fb39f2ca38a96a03099142511 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 30 Nov 2023 15:12:49 +0200 Subject: [PATCH 87/96] chore: update lock file --- poetry.lock | 223 ++++++++++++++++++++++++++-------------------------- 1 file changed, 111 insertions(+), 112 deletions(-) diff --git a/poetry.lock b/poetry.lock index c72c5c9c3..c23686e83 100644 --- a/poetry.lock +++ b/poetry.lock @@ -883,18 +883,18 @@ virtualenv = ">=20.0.8" [[package]] name = "pydantic" -version = "2.4.2" +version = "2.5.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, - {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.10.1" +pydantic-core = "2.14.5" typing-extensions = ">=4.6.1" [package.extras] @@ -902,117 +902,116 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.10.1" +version = "2.14.5" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, - {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, - {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, - {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, - {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, - {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, - {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, - {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, - {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, - {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, - {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, - {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, - {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, - {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, - {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, - {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, - {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, - {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, - {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, - {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, - {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, - {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, - {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, - {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, - {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, - {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, - {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, - {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, - {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, - {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, - {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, - {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, - {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, - {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, - {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, ] [package.dependencies] @@ -1927,4 +1926,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "44be274788ef313f3e35e296e42be1632675ce0ab9bee8c3c4db9d040480c50d" +content-hash = "c96ce7b59724952b565b8035f6bb7c098c13aa09c79107732dd9338e3af3118d" From 9ec57577d417fe019660c97b3f209c1a1bc6a4f6 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 7 Dec 2023 14:58:32 +0100 Subject: [PATCH 88/96] Revert optional connector name this fixes the broken validator making sure the required field is set --- docs/docs/schema/pipeline.json | 15 ++++----------- .../kafka_connect/connect_wrapper.py | 5 +---- kpops/component_handlers/kafka_connect/model.py | 3 +-- .../components/base_components/kafka_connector.py | 4 ++-- 4 files changed, 8 insertions(+), 19 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 6d8d87cc0..71ec74366 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -203,20 +203,13 @@ "type": "string" }, "name": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": null, - "title": "Name" + "title": "Name", + "type": "string" } }, "required": [ - "connector.class" + "connector.class", + "name" ], "title": "KafkaConnectorConfig", "type": "object" diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 4d92bad03..06f21eff2 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -63,16 +63,13 @@ def create_connector( self.create_connector(connector_config) raise KafkaConnectError(response) - def get_connector(self, connector_name: str | None) -> KafkaConnectResponse: + def get_connector(self, connector_name: str) -> KafkaConnectResponse: """Get information about the connector. API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. """ - if connector_name is None: - msg = "Connector name not set" - raise Exception(msg) response = httpx.get( url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index f6f6e5baf..1cc887d98 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -4,7 +4,6 @@ from pydantic import ( BaseModel, ConfigDict, - Field, SerializationInfo, field_validator, model_serializer, @@ -29,7 +28,7 @@ class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors.""" connector_class: str - name: str | None = Field(default=None) + name: str # TODO: hide from schema @override @staticmethod diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index b4ff7e79b..1a0625860 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -72,7 +72,7 @@ class KafkaConnector(PipelineComponent, ABC): ) _connector_type: KafkaConnectorType = PrivateAttr() - @field_validator("app") + @field_validator("app", mode="before") @classmethod def connector_config_should_have_component_name( cls, @@ -81,7 +81,7 @@ def connector_config_should_have_component_name( ) -> Any: if isinstance(app, KafkaConnectorConfig): app = app.model_dump() - component_name = info.data["prefix"] + info.data["name"] + component_name: str = info.data["prefix"] + info.data["name"] connector_name: str | None = app.get("name") if connector_name is not None and connector_name != component_name: msg = f"Connector name '{connector_name}' should be the same as component name '{component_name}'" From 1192c2b0a349679a97ee3ab75a5c79680de20260 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 7 Dec 2023 16:04:31 +0200 Subject: [PATCH 89/96] refactor: hide connector name from schema --- docs/docs/schema/pipeline.json | 7 +------ kpops/component_handlers/kafka_connect/model.py | 3 ++- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 71ec74366..0882ccfa5 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -201,15 +201,10 @@ "connector.class": { "title": "Connector.Class", "type": "string" - }, - "name": { - "title": "Name", - "type": "string" } }, "required": [ - "connector.class", - "name" + "connector.class" ], "title": "KafkaConnectorConfig", "type": "object" diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index 1cc887d98..a7ec45af9 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -8,6 +8,7 @@ field_validator, model_serializer, ) +from pydantic.json_schema import SkipJsonSchema from typing_extensions import override from kpops.utils.pydantic import ( @@ -28,7 +29,7 @@ class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors.""" connector_class: str - name: str # TODO: hide from schema + name: SkipJsonSchema[str] @override @staticmethod From 10d7b542030f70b96d5e76571163a581dbb510bd Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Thu, 7 Dec 2023 16:04:38 +0200 Subject: [PATCH 90/96] chore: bum ruff version --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index c23686e83..cca6dfeab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1542,28 +1542,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.1.6" +version = "0.1.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"}, - {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"}, - {file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"}, - {file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"}, - {file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"}, - {file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"}, - {file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"}, - {file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"}, - {file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"}, - {file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"}, - {file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"}, + {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7f80496854fdc65b6659c271d2c26e90d4d401e6a4a31908e7e334fab4645aac"}, + {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1ea109bdb23c2a4413f397ebd8ac32cb498bee234d4191ae1a310af760e5d287"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0c2de9dd9daf5e07624c24add25c3a490dbf74b0e9bca4145c632457b3b42a"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:69a4bed13bc1d5dabf3902522b5a2aadfebe28226c6269694283c3b0cecb45fd"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de02ca331f2143195a712983a57137c5ec0f10acc4aa81f7c1f86519e52b92a1"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45b38c3f8788a65e6a2cab02e0f7adfa88872696839d9882c13b7e2f35d64c5f"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c64cb67b2025b1ac6d58e5ffca8f7b3f7fd921f35e78198411237e4f0db8e73"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dcc6bb2f4df59cb5b4b40ff14be7d57012179d69c6565c1da0d1f013d29951b"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2bb4bb6bbe921f6b4f5b6fdd8d8468c940731cb9406f274ae8c5ed7a78c478"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:276a89bcb149b3d8c1b11d91aa81898fe698900ed553a08129b38d9d6570e717"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:90c958fe950735041f1c80d21b42184f1072cc3975d05e736e8d66fc377119ea"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b05e3b123f93bb4146a761b7a7d57af8cb7384ccb2502d29d736eaade0db519"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:290ecab680dce94affebefe0bbca2322a6277e83d4f29234627e0f8f6b4fa9ce"}, + {file = "ruff-0.1.7-py3-none-win32.whl", hash = "sha256:416dfd0bd45d1a2baa3b1b07b1b9758e7d993c256d3e51dc6e03a5e7901c7d80"}, + {file = "ruff-0.1.7-py3-none-win_amd64.whl", hash = "sha256:4af95fd1d3b001fc41325064336db36e3d27d2004cdb6d21fd617d45a172dd96"}, + {file = "ruff-0.1.7-py3-none-win_arm64.whl", hash = "sha256:0683b7bfbb95e6df3c7c04fe9d78f631f8e8ba4868dfc932d43d690698057e2e"}, + {file = "ruff-0.1.7.tar.gz", hash = "sha256:dffd699d07abf54833e5f6cc50b85a6ff043715da8788c4a79bcd4ab4734d306"}, ] [[package]] @@ -1926,4 +1926,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "c96ce7b59724952b565b8035f6bb7c098c13aa09c79107732dd9338e3af3118d" +content-hash = "ab89e863a47e28758e885ea10a7c88f7829fa88ecc503817a8bbf7c7bc5d5e81" diff --git a/pyproject.toml b/pyproject.toml index b68aa03f1..7986021e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -ruff = "^0.1.6" +ruff = "^0.1.7" black = "^23.7.0" typer-cli = "^0.0.13" pyright = "^1.1.314" From 4c8c75e355351efc297f8053cfc2f224f7deb085 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 7 Dec 2023 18:58:26 +0100 Subject: [PATCH 91/96] Change Kafka topic configs value type to `Any` --- kpops/component_handlers/topic/model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index a6c69cba7..5c0cf024d 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -1,4 +1,5 @@ from enum import Enum +from typing import Any from pydantic import BaseModel, ConfigDict @@ -7,7 +8,7 @@ class TopicSpec(BaseModel): topic_name: str partitions_count: int | None = None replication_factor: int | None = None - configs: list[dict[str, str]] | None = None + configs: list[dict[str, Any]] | None = None class TopicResponse(BaseModel): From 82e025f568ee7884333ab2aa80177b25886b1856 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 11 Dec 2023 09:10:46 +0100 Subject: [PATCH 92/96] Trigger CI From 94d3f9a2019ecb8281d1b34e8560d0f032219f9c Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 11 Dec 2023 10:12:15 +0100 Subject: [PATCH 93/96] Format --- kpops/utils/gen_schema.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index 7f28b4fea..18ac3c5a4 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -110,12 +110,8 @@ def gen_pipeline_schema( default=component.type, exclude=True, ) - core_schema: DefinitionsSchema = ( - component.__pydantic_core_schema__ - ) # pyright:ignore[reportGeneralTypeIssues] - model_schema: ModelFieldsSchema = core_schema["schema"][ - "schema" - ] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess] + core_schema: DefinitionsSchema = component.__pydantic_core_schema__ # pyright:ignore[reportGeneralTypeIssues] + model_schema: ModelFieldsSchema = core_schema["schema"]["schema"] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess] model_schema["fields"]["type"] = ModelField( type="model-field", schema=LiteralSchema( From b82dd58615eccfc302c5e5eb1770d8e712e7c240 Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Mon, 11 Dec 2023 10:17:43 +0100 Subject: [PATCH 94/96] Fix docs index indentation --- docs/mkdocs.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index cc3882eef..c6ef09c16 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -83,7 +83,7 @@ nav: KPOps Documentation: index.md - User Guide: - What is KPOps: user/what-is-kpops.md - - Changelog: user/changelog.md + - Changelog: user/changelog.md - Getting Started: - Setup: user/getting-started/setup.md - Quick start: user/getting-started/quick-start.md @@ -115,7 +115,7 @@ nav: - CI integration: - GitHub Actions: user/references/ci-integration/github-actions.md - Developer Guide: - - Getting Started: developer/getting-started.md - - Contributing: developer/contributing.md - - Code base: - - Auto generation: developer/auto-generation.md + - Getting Started: developer/getting-started.md + - Contributing: developer/contributing.md + - Code base: + - Auto generation: developer/auto-generation.md From 8ddb8d8626c2b72e25353fba87b4ca853dd2d619 Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 12 Dec 2023 11:37:05 +0200 Subject: [PATCH 95/96] Fix documentation generation (#384) closes #366 --- .../resources/variables/config_env_vars.env | 63 +++++++++++-------- .../resources/variables/config_env_vars.md | 34 +++++----- hooks/gen_docs/gen_docs_env_vars.py | 63 ++++++++++++++----- kpops/utils/dict_ops.py | 13 +++- tests/utils/test_doc_gen.py | 14 ----- 5 files changed, 112 insertions(+), 75 deletions(-) diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 1e38d4d9d..2b99d2172 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -8,47 +8,58 @@ # The path to the folder containing the defaults.yaml file and the # environment defaults files. Paths can either be absolute or relative # to `config.yaml` -defaults_path=. +KPOPS_DEFAULTS_PATH=. # environment # The environment you want to generate and deploy the pipeline to. # Suffix your environment files with this value (e.g. # defaults_development.yaml for environment=development). -environment=PydanticUndefined +KPOPS_ENVIRONMENT # No default value, required # kafka_brokers # The comma separated Kafka brokers address. -kafka_brokers=PydanticUndefined +KPOPS_KAFKA_BROKERS # No default value, required # defaults_filename_prefix # The name of the defaults file and the prefix of the defaults # environment file. -defaults_filename_prefix=defaults -# topic_name_config -# Configure the topic name variables you can use in the pipeline -# definition. -topic_name_config=default_output_topic_name='${pipeline_name}-${component_name}' default_error_topic_name='${pipeline_name}-${component_name}-error' -# schema_registry -# Configuration for Schema Registry. -schema_registry=enabled=False url=Url('http://localhost:8081/') -# kafka_rest -# Configuration for Kafka REST Proxy. -kafka_rest=url=Url('http://localhost:8082/') -# kafka_connect -# Configuration for Kafka Connect. -kafka_connect=url=Url('http://localhost:8083/') +KPOPS_DEFAULTS_FILENAME_PREFIX=defaults +# topic_name_config.default_output_topic_name +# Configures the value for the variable ${output_topic_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component_name} +# topic_name_config.default_error_topic_name +# Configures the value for the variable ${error_topic_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component_name}-error +# schema_registry.enabled +# Whether the Schema Registry handler should be initialized. +KPOPS_SCHEMA_REGISTRY__ENABLED=False +# schema_registry.url +# Address of the Schema Registry. +KPOPS_SCHEMA_REGISTRY__URL=http://localhost:8081/ +# kafka_rest.url +# Address of the Kafka REST Proxy. +KPOPS_KAFKA_REST__URL=http://localhost:8082/ +# kafka_connect.url +# Address of Kafka Connect. +KPOPS_KAFKA_CONNECT__URL=http://localhost:8083/ # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. -timeout=300 +KPOPS_TIMEOUT=300 # create_namespace # Flag for `helm upgrade --install`. Create the release namespace if # not present. -create_namespace=False -# helm_config -# Global flags for Helm. -helm_config=context=None debug=False api_version=None -# helm_diff_config -# Configure Helm Diff. -helm_diff_config=ignore=set() +KPOPS_CREATE_NAMESPACE=False +# helm_config.context +# Name of kubeconfig context (`--kube-context`) +KPOPS_HELM_CONFIG__CONTEXT # No default value, not required +# helm_config.debug +# Run Helm in Debug mode +KPOPS_HELM_CONFIG__DEBUG=False +# helm_config.api_version +# Kubernetes API version used for Capabilities.APIVersions +KPOPS_HELM_CONFIG__API_VERSION # No default value, not required +# helm_diff_config.ignore +# Set of keys that should not be checked. +KPOPS_HELM_DIFF_CONFIG__IGNORE # No default value, required # retain_clean_jobs # Whether to retain clean up jobs in the cluster or uninstall the, # after completion. -retain_clean_jobs=False +KPOPS_RETAIN_CLEAN_JOBS=False diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 9c1c704a9..f81eb8f56 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,17 +1,21 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value |Required| Description | Setting name | -|------------------------|----------------------------------------------------------------------------------------------------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------| -|defaults_path |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | -|environment |PydanticUndefined |False |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|kafka_brokers |PydanticUndefined |False |The comma separated Kafka brokers address. |kafka_brokers | -|defaults_filename_prefix|defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix| -|topic_name_config |default_output_topic_name='${pipeline_name}-${component_name}' default_error_topic_name='${pipeline_name}-${component_name}-error'|False |Configure the topic name variables you can use in the pipeline definition. |topic_name_config | -|schema_registry |enabled=False url=Url('http://localhost:8081/') |False |Configuration for Schema Registry. |schema_registry | -|kafka_rest |url=Url('http://localhost:8082/') |False |Configuration for Kafka REST Proxy. |kafka_rest | -|kafka_connect |url=Url('http://localhost:8083/') |False |Configuration for Kafka Connect. |kafka_connect | -|timeout |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|create_namespace |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | -|helm_config |context=None debug=False api_version=None |False |Global flags for Helm. |helm_config | -|helm_diff_config |ignore=set() |False |Configure Helm Diff. |helm_diff_config | -|retain_clean_jobs |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | +| Name | Default Value |Required| Description | Setting name | +|--------------------------------------------------|----------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| +|KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | +|KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component_name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component_name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | +|KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | +|KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | +|KPOPS_KAFKA_CONNECT__URL |http://localhost:8083/ |False |Address of Kafka Connect. |kafka_connect.url | +|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | +|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context | +|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug | +|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version | +|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | +|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 2638bd8a6..30a7e15bf 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -2,17 +2,20 @@ import csv import shutil -from collections.abc import Callable, Iterator +from collections.abc import Callable +from contextlib import suppress from dataclasses import dataclass from pathlib import Path from textwrap import fill -from typing import Any, get_args +from typing import Any -from pydantic.fields import FieldInfo -from pydantic_settings import BaseSettings +from pydantic import BaseModel +from pydantic_core import PydanticUndefined from pytablewriter import MarkdownTableWriter from typer.models import ArgumentInfo, OptionInfo +from kpops.utils.dict_ops import generate_substitution + try: from typing import Self except ImportError: @@ -127,7 +130,7 @@ def csv_append_env_var( width=68, ) required = False - if default_value == Ellipsis: + if default_value in [Ellipsis, PydanticUndefined]: required = True default_value = "" elif default_value is None: @@ -254,7 +257,13 @@ def fill_csv_pipeline_config(target: Path) -> None: :param target: The path to the `.csv` file. Note that it must already contain the column names """ - for field_name, field_value in collect_fields(KpopsConfig): + for (field_name, field_value), env_var_name in zip( + generate_substitution(collect_fields(KpopsConfig), separator=".").items(), + generate_substitution(collect_fields(KpopsConfig), separator="__").keys(), + strict=True, + ): + with suppress(KeyError): # In case the prefix is ever removed from KpopsConfig + env_var_name = KpopsConfig.model_config["env_prefix"] + env_var_name field_description: str = ( field_value.description or "No description available, please refer to the pipeline config documentation." @@ -262,26 +271,46 @@ def fill_csv_pipeline_config(target: Path) -> None: field_default = field_value.default csv_append_env_var( target, - field_value.serialization_alias or field_name, + env_var_name.upper(), field_default, field_description, field_name, ) -# TODO(Ivan Yordanov): Separate complex fields into their "leaves" -def collect_fields(settings: type[BaseSettings]) -> Iterator[tuple[str, FieldInfo]]: - """Collect and yield all fields in a settings class. +def collect_fields(model: type[BaseModel]) -> dict[str, Any]: + """Collect and return a ``dict`` of all fields in a settings class. :param model: settings class - :yield: all settings including nested ones in settings classes + :return: ``dict`` of all fields in a settings class """ - for field_name, field_value in settings.model_fields.items(): - if field_value.annotation: - for field_type in get_args(field_value.annotation): - if field_type and issubclass(field_type, BaseSettings): - yield from collect_fields(field_type) - yield field_name, field_value + + def patched_issubclass_of_basemodel(cls): + """Pydantic breaks issubclass. + + ``issubclass(set[str], set) # True`` + ``issubclass(BaseSettings, BaseModel) # True`` + ``issubclass(set[str], BaseModel) # raises exception`` + + :param cls: class to check + :return: Whether cls is subclass of ``BaseModel`` + """ + try: + return issubclass(cls, BaseModel) + except TypeError as e: + if str(e) == "issubclass() arg 1 must be a class": + return False + raise + + seen_fields = {} + for field_name, field_value in model.model_fields.items(): + if field_value.annotation and patched_issubclass_of_basemodel( + field_value.annotation + ): + seen_fields[field_name] = collect_fields(field_value.annotation) + else: + seen_fields[field_name] = field_value + return seen_fields def fill_csv_cli(target: Path) -> None: diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 14cc849e3..210bbd87a 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -66,7 +66,7 @@ def flatten_mapping( if prefix: key = prefix + separator + key if isinstance(value, Mapping): - nested_mapping = flatten_mapping(value, key) + nested_mapping = flatten_mapping(value, key, separator) top = update_nested_pair(top, nested_mapping) else: top[key] = value @@ -77,7 +77,8 @@ def generate_substitution( input: dict, prefix: str | None = None, existing_substitution: dict | None = None, -) -> dict: + separator: str | None = None, +) -> dict[Any, Any]: """Generate a complete substitution dict from a given dict. Finds all attributes that belong to a model and expands them to create @@ -88,4 +89,10 @@ def generate_substitution( :param substitution: existing substitution to include :returns: Substitution dict of all variables related to the model. """ - return update_nested(existing_substitution or {}, flatten_mapping(input, prefix)) + if separator is None: + return update_nested( + existing_substitution or {}, flatten_mapping(input, prefix) + ) + return update_nested( + existing_substitution or {}, flatten_mapping(input, prefix, separator) + ) diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py index 308723386..5ad065f2c 100644 --- a/tests/utils/test_doc_gen.py +++ b/tests/utils/test_doc_gen.py @@ -2,31 +2,17 @@ from typing import Any import pytest -from pydantic_core import PydanticUndefined from hooks.gen_docs.gen_docs_env_vars import ( EnvVarAttrs, append_csv_to_dotenv_file, - collect_fields, csv_append_env_var, write_csv_to_md_file, write_title_to_dotenv_file, ) -from tests.utils.resources.nested_base_settings import ParentSettings class TestEnvDocGen: - def test_collect_fields(self): - expected: list[Any] = [ - "not_nested_field", - PydanticUndefined, - PydanticUndefined, - ] - actual = [ - field_value.default for _, field_value in collect_fields(ParentSettings) - ] - assert actual == expected - @pytest.mark.parametrize( ("var_name", "default_value", "description", "extra_args", "expected_outcome"), [ From 11f7f05bc294de44f9084b8b64d54ea53cc7943a Mon Sep 17 00:00:00 2001 From: Ivan Yordanov Date: Tue, 12 Dec 2023 12:47:07 +0200 Subject: [PATCH 96/96] refactor: narrow down type hints --- kpops/components/base_components/kafka_connector.py | 4 ++-- kpops/utils/dict_ops.py | 9 ++++++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 1a0625860..7af2c5ae4 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,7 +3,7 @@ import logging from abc import ABC from functools import cached_property -from typing import Any, NoReturn +from typing import NoReturn from pydantic import Field, PrivateAttr, ValidationInfo, field_validator from typing_extensions import override @@ -78,7 +78,7 @@ def connector_config_should_have_component_name( cls, app: KafkaConnectorConfig | dict[str, str], info: ValidationInfo, - ) -> Any: + ) -> KafkaConnectorConfig: if isinstance(app, KafkaConnectorConfig): app = app.model_dump() component_name: str = info.data["prefix"] + info.data["name"] diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 210bbd87a..c53cc383d 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from typing import Any +from typing import Any, TypeVar def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: @@ -73,12 +73,15 @@ def flatten_mapping( return top +_V = TypeVar("_V") + + def generate_substitution( - input: dict, + input: dict[str, _V], prefix: str | None = None, existing_substitution: dict | None = None, separator: str | None = None, -) -> dict[Any, Any]: +) -> dict[str, _V]: """Generate a complete substitution dict from a given dict. Finds all attributes that belong to a model and expands them to create