diff --git a/docs/docs/resources/variables/cli_env_vars.env b/docs/docs/resources/variables/cli_env_vars.env index dec1d8b3a..dc44ac3a6 100644 --- a/docs/docs/resources/variables/cli_env_vars.env +++ b/docs/docs/resources/variables/cli_env_vars.env @@ -12,6 +12,9 @@ KPOPS_PIPELINE_BASE_DIR=. KPOPS_CONFIG_PATH=config.yaml # Path to defaults folder KPOPS_DEFAULT_PATH # No default value, not required +# Path to dotenv file. Multiple files can be provided. The files will +# be loaded in order, with each file overriding the previous one. +KPOPS_DOTENV_PATH # No default value, not required # Path to YAML with pipeline definition KPOPS_PIPELINE_PATH # No default value, required # Comma separated list of steps to apply the command on diff --git a/docs/docs/resources/variables/cli_env_vars.md b/docs/docs/resources/variables/cli_env_vars.md index 763cb936e..ed0880bee 100644 --- a/docs/docs/resources/variables/cli_env_vars.md +++ b/docs/docs/resources/variables/cli_env_vars.md @@ -1,9 +1,10 @@ These variables are a lower priority alternative to the commands' flags. If a variable is set, the corresponding flag does not have to be specified in commands. Variables marked as required can instead be set as flags. -| Name |Default Value|Required| Description | -|-----------------------|-------------|--------|----------------------------------------------------------------------| -|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory)| -|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | -|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | -|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | -|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | +| Name |Default Value|Required| Description | +|-----------------------|-------------|--------|-----------------------------------------------------------------------------------------------------------------------------------| +|KPOPS_PIPELINE_BASE_DIR|. |False |Base directory to the pipelines (default is current working directory) | +|KPOPS_CONFIG_PATH |config.yaml |False |Path to the config.yaml file | +|KPOPS_DEFAULT_PATH | |False |Path to defaults folder | +|KPOPS_DOTENV_PATH | |False |Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one.| +|KPOPS_PIPELINE_PATH | |True |Path to YAML with pipeline definition | +|KPOPS_PIPELINE_STEPS | |False |Comma separated list of steps to apply the command on | diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 00bef6a4c..2b99d2172 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -4,6 +4,11 @@ # alternative to the settings in `config.yaml`. Variables marked as # required can instead be set in the pipeline config. # +# defaults_path +# The path to the folder containing the defaults.yaml file and the +# environment defaults files. Paths can either be absolute or relative +# to `config.yaml` +KPOPS_DEFAULTS_PATH=. # environment # The environment you want to generate and deploy the pipeline to. # Suffix your environment files with this value (e.g. @@ -12,19 +17,48 @@ KPOPS_ENVIRONMENT # No default value, required # kafka_brokers # The comma separated Kafka brokers address. KPOPS_KAFKA_BROKERS # No default value, required -# url +# defaults_filename_prefix +# The name of the defaults file and the prefix of the defaults +# environment file. +KPOPS_DEFAULTS_FILENAME_PREFIX=defaults +# topic_name_config.default_output_topic_name +# Configures the value for the variable ${output_topic_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME=${pipeline_name}-${component_name} +# topic_name_config.default_error_topic_name +# Configures the value for the variable ${error_topic_name} +KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME=${pipeline_name}-${component_name}-error +# schema_registry.enabled +# Whether the Schema Registry handler should be initialized. +KPOPS_SCHEMA_REGISTRY__ENABLED=False +# schema_registry.url # Address of the Schema Registry. -KPOPS_SCHEMA_REGISTRY_URL=http://localhost:8081 -# url +KPOPS_SCHEMA_REGISTRY__URL=http://localhost:8081/ +# kafka_rest.url # Address of the Kafka REST Proxy. -KPOPS_KAFKA_REST_URL=http://localhost:8082 -# url +KPOPS_KAFKA_REST__URL=http://localhost:8082/ +# kafka_connect.url # Address of Kafka Connect. -KPOPS_KAFKA_CONNECT_URL=http://localhost:8083 +KPOPS_KAFKA_CONNECT__URL=http://localhost:8083/ # timeout # The timeout in seconds that specifies when actions like deletion or # deploy timeout. KPOPS_TIMEOUT=300 +# create_namespace +# Flag for `helm upgrade --install`. Create the release namespace if +# not present. +KPOPS_CREATE_NAMESPACE=False +# helm_config.context +# Name of kubeconfig context (`--kube-context`) +KPOPS_HELM_CONFIG__CONTEXT # No default value, not required +# helm_config.debug +# Run Helm in Debug mode +KPOPS_HELM_CONFIG__DEBUG=False +# helm_config.api_version +# Kubernetes API version used for Capabilities.APIVersions +KPOPS_HELM_CONFIG__API_VERSION # No default value, not required +# helm_diff_config.ignore +# Set of keys that should not be checked. +KPOPS_HELM_DIFF_CONFIG__IGNORE # No default value, required # retain_clean_jobs # Whether to retain clean up jobs in the cluster or uninstall the, # after completion. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index 2419de11d..f81eb8f56 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -1,11 +1,21 @@ These variables are a lower priority alternative to the settings in `config.yaml`. Variables marked as required can instead be set in the pipeline config. -| Name | Default Value |Required| Description | Setting name | -|-------------------------|---------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| -|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | -|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | -|KPOPS_SCHEMA_REGISTRY_URL|http://localhost:8081|False |Address of the Schema Registry. |url | -|KPOPS_KAFKA_REST_URL |http://localhost:8082|False |Address of the Kafka REST Proxy. |url | -|KPOPS_KAFKA_CONNECT_URL |http://localhost:8083|False |Address of Kafka Connect. |url | -|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | -|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs| +| Name | Default Value |Required| Description | Setting name | +|--------------------------------------------------|----------------------------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| +|KPOPS_DEFAULTS_PATH |. |False |The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml` |defaults_path | +|KPOPS_ENVIRONMENT | |True |The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).|environment | +|KPOPS_KAFKA_BROKERS | |True |The comma separated Kafka brokers address. |kafka_brokers | +|KPOPS_DEFAULTS_FILENAME_PREFIX |defaults |False |The name of the defaults file and the prefix of the defaults environment file. |defaults_filename_prefix | +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_OUTPUT_TOPIC_NAME|${pipeline_name}-${component_name} |False |Configures the value for the variable ${output_topic_name} |topic_name_config.default_output_topic_name| +|KPOPS_TOPIC_NAME_CONFIG__DEFAULT_ERROR_TOPIC_NAME |${pipeline_name}-${component_name}-error|False |Configures the value for the variable ${error_topic_name} |topic_name_config.default_error_topic_name | +|KPOPS_SCHEMA_REGISTRY__ENABLED |False |False |Whether the Schema Registry handler should be initialized. |schema_registry.enabled | +|KPOPS_SCHEMA_REGISTRY__URL |http://localhost:8081/ |False |Address of the Schema Registry. |schema_registry.url | +|KPOPS_KAFKA_REST__URL |http://localhost:8082/ |False |Address of the Kafka REST Proxy. |kafka_rest.url | +|KPOPS_KAFKA_CONNECT__URL |http://localhost:8083/ |False |Address of Kafka Connect. |kafka_connect.url | +|KPOPS_TIMEOUT |300 |False |The timeout in seconds that specifies when actions like deletion or deploy timeout. |timeout | +|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | +|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context | +|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug | +|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version | +|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | +|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 391a0f2b5..09a848235 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -1,19 +1,36 @@ { - "$ref": "#/definitions/KpopsConfig", - "definitions": { + "$defs": { "HelmConfig": { "description": "Global Helm configuration.", "properties": { "api_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Kubernetes API version used for Capabilities.APIVersions", - "title": "API version", - "type": "string" + "title": "API version" }, "context": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Name of kubeconfig context (`--kube-context`)", - "example": "dev-storage", - "title": "Context", - "type": "string" + "examples": [ + "dev-storage" + ], + "title": "Context" }, "debug": { "default": false, @@ -29,7 +46,9 @@ "properties": { "ignore": { "description": "Set of keys that should not be checked.", - "example": "- name\n- imageTag", + "examples": [ + "- name\n- imageTag" + ], "items": { "type": "string" }, @@ -46,14 +65,9 @@ "description": "Configuration for Kafka Connect.", "properties": { "url": { - "default": "http://localhost:8083", + "default": "http://localhost:8083/", "description": "Address of Kafka Connect.", - "env": "KPOPS_KAFKA_CONNECT_URL", - "env_names": [ - "kpops_kafka_connect_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -67,14 +81,9 @@ "description": "Configuration for Kafka REST Proxy.", "properties": { "url": { - "default": "http://localhost:8082", + "default": "http://localhost:8082/", "description": "Address of the Kafka REST Proxy.", - "env": "KPOPS_KAFKA_REST_URL", - "env_names": [ - "kpops_kafka_rest_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -83,181 +92,6 @@ "title": "KafkaRestConfig", "type": "object" }, - "KpopsConfig": { - "additionalProperties": false, - "description": "Pipeline configuration unrelated to the components.", - "properties": { - "create_namespace": { - "default": false, - "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", - "env_names": [ - "kpops_create_namespace" - ], - "title": "Create Namespace", - "type": "boolean" - }, - "defaults_filename_prefix": { - "default": "defaults", - "description": "The name of the defaults file and the prefix of the defaults environment file.", - "env_names": [ - "kpops_defaults_filename_prefix" - ], - "title": "Defaults Filename Prefix", - "type": "string" - }, - "defaults_path": { - "default": ".", - "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", - "env_names": [ - "kpops_defaults_path" - ], - "example": "defaults", - "format": "path", - "title": "Defaults Path", - "type": "string" - }, - "environment": { - "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", - "env": "KPOPS_ENVIRONMENT", - "env_names": [ - "kpops_environment" - ], - "example": "development", - "title": "Environment", - "type": "string" - }, - "helm_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmConfig" - } - ], - "default": { - "api_version": null, - "context": null, - "debug": false - }, - "description": "Global flags for Helm.", - "env_names": [ - "kpops_helm_config" - ], - "title": "Helm Config" - }, - "helm_diff_config": { - "allOf": [ - { - "$ref": "#/definitions/HelmDiffConfig" - } - ], - "default": { - "ignore": [] - }, - "description": "Configure Helm Diff.", - "env_names": [ - "kpops_helm_diff_config" - ], - "title": "Helm Diff Config" - }, - "kafka_brokers": { - "description": "The comma separated Kafka brokers address.", - "env": "KPOPS_KAFKA_BROKERS", - "env_names": [ - "kpops_kafka_brokers" - ], - "example": "broker1:9092,broker2:9092,broker3:9092", - "title": "Kafka Brokers", - "type": "string" - }, - "kafka_connect": { - "allOf": [ - { - "$ref": "#/definitions/KafkaConnectConfig" - } - ], - "default": { - "url": "http://localhost:8083" - }, - "description": "Configuration for Kafka Connect.", - "env_names": [ - "kpops_kafka_connect" - ], - "title": "Kafka Connect" - }, - "kafka_rest": { - "allOf": [ - { - "$ref": "#/definitions/KafkaRestConfig" - } - ], - "default": { - "url": "http://localhost:8082" - }, - "description": "Configuration for Kafka REST Proxy.", - "env_names": [ - "kpops_kafka_rest" - ], - "title": "Kafka Rest" - }, - "retain_clean_jobs": { - "default": false, - "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", - "env": "KPOPS_RETAIN_CLEAN_JOBS", - "env_names": [ - "kpops_retain_clean_jobs" - ], - "title": "Retain Clean Jobs", - "type": "boolean" - }, - "schema_registry": { - "allOf": [ - { - "$ref": "#/definitions/SchemaRegistryConfig" - } - ], - "default": { - "enabled": false, - "url": "http://localhost:8081" - }, - "description": "Configuration for Schema Registry.", - "env_names": [ - "kpops_schema_registry" - ], - "title": "Schema Registry" - }, - "timeout": { - "default": 300, - "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", - "env": "KPOPS_TIMEOUT", - "env_names": [ - "kpops_timeout" - ], - "title": "Timeout", - "type": "integer" - }, - "topic_name_config": { - "allOf": [ - { - "$ref": "#/definitions/TopicNameConfig" - } - ], - "default": { - "default_error_topic_name": "${pipeline_name}-${component_name}-error", - "default_output_topic_name": "${pipeline_name}-${component_name}" - }, - "description": "Configure the topic name variables you can use in the pipeline definition.", - "env_names": [ - "kpops_topic_name_config" - ], - "title": "Topic Name Config" - } - }, - "required": [ - "environment", - "kafka_brokers" - ], - "title": "KpopsConfig", - "type": "object" - }, "SchemaRegistryConfig": { "additionalProperties": false, "description": "Configuration for Schema Registry.", @@ -265,21 +99,13 @@ "enabled": { "default": false, "description": "Whether the Schema Registry handler should be initialized.", - "env_names": [ - "enabled" - ], "title": "Enabled", "type": "boolean" }, "url": { - "default": "http://localhost:8081", + "default": "http://localhost:8081/", "description": "Address of the Schema Registry.", - "env": "KPOPS_SCHEMA_REGISTRY_URL", - "env_names": [ - "kpops_schema_registry_url" - ], "format": "uri", - "maxLength": 65536, "minLength": 1, "title": "Url", "type": "string" @@ -295,18 +121,12 @@ "default_error_topic_name": { "default": "${pipeline_name}-${component_name}-error", "description": "Configures the value for the variable ${error_topic_name}", - "env_names": [ - "default_error_topic_name" - ], "title": "Default Error Topic Name", "type": "string" }, "default_output_topic_name": { "default": "${pipeline_name}-${component_name}", "description": "Configures the value for the variable ${output_topic_name}", - "env_names": [ - "default_output_topic_name" - ], "title": "Default Output Topic Name", "type": "string" } @@ -315,5 +135,136 @@ "type": "object" } }, - "title": "KPOps config schema" + "additionalProperties": false, + "description": "Pipeline configuration unrelated to the components.", + "properties": { + "create_namespace": { + "default": false, + "description": "Flag for `helm upgrade --install`. Create the release namespace if not present.", + "title": "Create Namespace", + "type": "boolean" + }, + "defaults_filename_prefix": { + "default": "defaults", + "description": "The name of the defaults file and the prefix of the defaults environment file.", + "title": "Defaults Filename Prefix", + "type": "string" + }, + "defaults_path": { + "default": ".", + "description": "The path to the folder containing the defaults.yaml file and the environment defaults files. Paths can either be absolute or relative to `config.yaml`", + "examples": [ + "defaults", + "." + ], + "format": "path", + "title": "Defaults Path", + "type": "string" + }, + "environment": { + "description": "The environment you want to generate and deploy the pipeline to. Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", + "examples": [ + "development", + "production" + ], + "title": "Environment", + "type": "string" + }, + "helm_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmConfig" + } + ], + "default": { + "api_version": null, + "context": null, + "debug": false + }, + "description": "Global flags for Helm." + }, + "helm_diff_config": { + "allOf": [ + { + "$ref": "#/$defs/HelmDiffConfig" + } + ], + "default": { + "ignore": [] + }, + "description": "Configure Helm Diff." + }, + "kafka_brokers": { + "description": "The comma separated Kafka brokers address.", + "examples": [ + "broker1:9092,broker2:9092,broker3:9092" + ], + "title": "Kafka Brokers", + "type": "string" + }, + "kafka_connect": { + "allOf": [ + { + "$ref": "#/$defs/KafkaConnectConfig" + } + ], + "default": { + "url": "http://localhost:8083/" + }, + "description": "Configuration for Kafka Connect." + }, + "kafka_rest": { + "allOf": [ + { + "$ref": "#/$defs/KafkaRestConfig" + } + ], + "default": { + "url": "http://localhost:8082/" + }, + "description": "Configuration for Kafka REST Proxy." + }, + "retain_clean_jobs": { + "default": false, + "description": "Whether to retain clean up jobs in the cluster or uninstall the, after completion.", + "title": "Retain Clean Jobs", + "type": "boolean" + }, + "schema_registry": { + "allOf": [ + { + "$ref": "#/$defs/SchemaRegistryConfig" + } + ], + "default": { + "enabled": false, + "url": "http://localhost:8081/" + }, + "description": "Configuration for Schema Registry." + }, + "timeout": { + "default": 300, + "description": "The timeout in seconds that specifies when actions like deletion or deploy timeout.", + "title": "Timeout", + "type": "integer" + }, + "topic_name_config": { + "allOf": [ + { + "$ref": "#/$defs/TopicNameConfig" + } + ], + "default": { + "default_error_topic_name": "${pipeline_name}-${component_name}-error", + "default_output_topic_name": "${pipeline_name}-${component_name}" + }, + "description": "Configure the topic name variables you can use in the pipeline definition." + } + }, + "required": [ + "environment", + "kafka_brokers" + ], + "title": "KpopsConfig", + "type": "object" } diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 2fe9aeeac..0882ccfa5 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -1,12 +1,12 @@ { - "definitions": { + "$defs": { "FromSection": { "additionalProperties": false, "description": "Holds multiple input topics.", "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -15,7 +15,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -31,16 +31,28 @@ "description": "Input topic.", "properties": { "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/InputTopicTypes" + "$ref": "#/$defs/InputTopicTypes" + }, + { + "type": "null" } ], + "default": null, "description": "Topic type" } }, @@ -48,23 +60,27 @@ "type": "object" }, "HelmApp": { + "additionalProperties": true, "description": "Kubernetes app managed through Helm with an associated Helm chart.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KubernetesAppConfig" + "$ref": "#/$defs/KubernetesAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -85,36 +101,41 @@ "type": "string" }, "repo_config": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/HelmRepoConfig" + }, { - "$ref": "#/definitions/HelmRepoConfig" + "type": "null" } ], - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "default": null, + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "helm-app", - "description": "Kubernetes app managed through Helm with an associated Helm chart.", - "enum": [ - "helm-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -131,7 +152,7 @@ "repo_auth_flags": { "allOf": [ { - "$ref": "#/definitions/RepoAuthFlags" + "$ref": "#/$defs/RepoAuthFlags" } ], "default": { @@ -141,8 +162,7 @@ "password": null, "username": null }, - "description": "Authorisation-related flags", - "title": "Repo Auth Flags" + "description": "Authorisation-related flags" }, "repository_name": { "description": "Name of the Helm repository", @@ -172,7 +192,8 @@ "type": "string" }, "KafkaConnectorConfig": { - "additionalProperties": { + "additionalProperties": true, + "additional_properties": { "type": "string" }, "description": "Settings specific to Kafka Connectors.", @@ -189,23 +210,27 @@ "type": "object" }, "KafkaSinkConnector": { + "additionalProperties": true, "description": "Kafka sink connector model.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -228,7 +253,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -242,8 +267,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -251,28 +275,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-sink-connector", - "description": "Kafka sink connector model.", - "enum": [ - "kafka-sink-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -284,23 +309,27 @@ "type": "object" }, "KafkaSourceConnector": { + "additionalProperties": true, "description": "Kafka source connector model.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/KafkaConnectorConfig" + "$ref": "#/$defs/KafkaConnectorConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -315,9 +344,17 @@ "type": "string" }, "offset_topic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running", - "title": "Offset Topic", - "type": "string" + "title": "Offset Topic" }, "prefix": { "default": "${pipeline_name}-", @@ -328,7 +365,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -342,8 +379,7 @@ "repository_name": "bakdata-kafka-connect-resetter", "url": "https://bakdata.github.io/kafka-connect-resetter/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "resetter_values": { "description": "Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc.", @@ -351,28 +387,29 @@ "type": "object" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "kafka-source-connector", - "description": "Kafka source connector model.", - "enum": [ - "kafka-source-connector" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "1.0.4", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -384,6 +421,7 @@ "type": "object" }, "KubernetesAppConfig": { + "additionalProperties": true, "description": "Settings specific to Kubernetes apps.", "properties": {}, "title": "KubernetesAppConfig", @@ -399,18 +437,19 @@ "type": "string" }, "ProducerApp": { + "additionalProperties": true, "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/ProducerValues" + "$ref": "#/$defs/ProducerValues" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { + "default": null, "description": "Producer doesn't support FromSection", "title": "From", "type": "null" @@ -434,7 +473,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -448,32 +487,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "producer-app", - "description": "Producer component.\nThis producer holds configuration to use as values for the streams bootstrap producer helm chart. Note that the producer does not support error topics.", - "enum": [ - "producer-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -485,6 +524,7 @@ "type": "object" }, "ProducerStreamsConfig": { + "additionalProperties": true, "description": "Kafka Streams settings specific to Producer.", "properties": { "brokers": { @@ -502,14 +542,30 @@ "type": "object" }, "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Output topic", - "title": "Outputtopic", - "type": "string" + "title": "Outputtopic" }, "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" + "title": "Schemaregistryurl" } }, "required": [ @@ -519,21 +575,29 @@ "type": "object" }, "ProducerValues": { + "additionalProperties": true, "description": "Settings specific to producers.", "properties": { "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/ProducerStreamsConfig" + "$ref": "#/$defs/ProducerStreamsConfig" } ], - "description": "Kafka Streams settings", - "title": "Streams" + "description": "Kafka Streams settings" } }, "required": [ @@ -546,16 +610,32 @@ "description": "Authorisation-related flags for `helm repo`.", "properties": { "ca_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Path to CA bundle file to verify certificates of HTTPS-enabled servers", - "format": "path", - "title": "Ca File", - "type": "string" + "title": "Ca File" }, "cert_file": { + "anyOf": [ + { + "format": "path", + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Path to SSL certificate file to identify HTTPS client", - "format": "path", - "title": "Cert File", - "type": "string" + "title": "Cert File" }, "insecure_skip_tls_verify": { "default": false, @@ -564,37 +644,57 @@ "type": "boolean" }, "password": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Password", - "title": "Password", - "type": "string" + "title": "Password" }, "username": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Username", - "title": "Username", - "type": "string" + "title": "Username" } }, "title": "RepoAuthFlags", "type": "object" }, "StreamsApp": { + "additionalProperties": true, "description": "StreamsApp component that configures a streams bootstrap app.", "properties": { "app": { "allOf": [ { - "$ref": "#/definitions/StreamsAppConfig" + "$ref": "#/$defs/StreamsAppConfig" } ], - "description": "Application-specific settings", - "title": "App" + "description": "Application-specific settings" }, "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -617,7 +717,7 @@ "repo_config": { "allOf": [ { - "$ref": "#/definitions/HelmRepoConfig" + "$ref": "#/$defs/HelmRepoConfig" } ], "default": { @@ -631,32 +731,32 @@ "repository_name": "bakdata-streams-bootstrap", "url": "https://bakdata.github.io/streams-bootstrap/" }, - "description": "Configuration of the Helm chart repo to be used for deploying the component", - "title": "Repo Config" + "description": "Configuration of the Helm chart repo to be used for deploying the component" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "streams-app", - "description": "StreamsApp component that configures a streams bootstrap app.", - "enum": [ - "streams-app" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" }, "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "default": "2.9.0", "description": "Helm chart version", - "title": "Version", - "type": "string" + "title": "Version" } }, "required": [ @@ -668,6 +768,7 @@ "type": "object" }, "StreamsAppAutoScaling": { + "additionalProperties": true, "description": "Kubernetes Event-driven Autoscaling config.", "properties": { "consumerGroup": { @@ -683,13 +784,22 @@ }, "enabled": { "default": false, + "description": "", "title": "Enabled", "type": "boolean" }, "idleReplicas": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "If this property is set, KEDA will scale the resource down to this number of replicas. https://keda.sh/docs/2.9/concepts/scaling-deployments/#idlereplicacount", - "title": "Idle replica count", - "type": "integer" + "title": "Idle replica count" }, "lagThreshold": { "description": "Average target value to trigger scaling actions.", @@ -738,30 +848,41 @@ "type": "object" }, "StreamsAppConfig": { + "additionalProperties": true, "description": "StreamsBoostrap app configurations.\nThe attributes correspond to keys and values that are used as values for the streams bootstrap helm chart.", "properties": { "autoscaling": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/StreamsAppAutoScaling" + }, { - "$ref": "#/definitions/StreamsAppAutoScaling" + "type": "null" } ], - "description": "Kubernetes Event-driven Autoscaling config", - "title": "Autoscaling" + "default": null, + "description": "Kubernetes Event-driven Autoscaling config" }, "nameOverride": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Override name with this value", - "title": "Nameoverride", - "type": "string" + "title": "Nameoverride" }, "streams": { "allOf": [ { - "$ref": "#/definitions/StreamsConfig" + "$ref": "#/$defs/StreamsConfig" } ], - "description": "Streams Bootstrap streams section", - "title": "Streams" + "description": "Streams Bootstrap streams section" } }, "required": [ @@ -771,6 +892,7 @@ "type": "object" }, "StreamsConfig": { + "additionalProperties": true, "description": "Streams Bootstrap streams section.", "properties": { "brokers": { @@ -779,18 +901,23 @@ "type": "string" }, "config": { - "additionalProperties": { - "type": "string" - }, "default": {}, "description": "Configuration", "title": "Config", "type": "object" }, "errorTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Error topic", - "title": "Errortopic", - "type": "string" + "title": "Errortopic" }, "extraInputPatterns": { "additionalProperties": { @@ -823,9 +950,17 @@ "type": "object" }, "inputPattern": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Input pattern", - "title": "Inputpattern", - "type": "string" + "title": "Inputpattern" }, "inputTopics": { "default": [], @@ -837,14 +972,30 @@ "type": "array" }, "outputTopic": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Output topic", - "title": "Outputtopic", - "type": "string" + "title": "Outputtopic" }, "schemaRegistryUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "URL of the schema registry", - "title": "Schemaregistryurl", - "type": "string" + "title": "Schemaregistryurl" } }, "required": [ @@ -854,6 +1005,7 @@ "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics.", "properties": { "models": { @@ -867,7 +1019,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -899,38 +1051,82 @@ "type": "object" }, "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Key schema class name", - "title": "Key schema", - "type": "string" + "title": "Key schema" }, "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Number of partitions into which the topic is divided", - "title": "Partitions count", - "type": "integer" + "title": "Partitions count" }, "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Replication factor of the topic", - "title": "Replication factor", - "type": "integer" + "title": "Replication factor" }, "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, { - "$ref": "#/definitions/OutputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type", "title": "Topic type" }, "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Value schema class name", - "title": "Value schema", - "type": "string" + "title": "Value schema" } }, "title": "TopicConfig", @@ -940,32 +1136,32 @@ "items": { "discriminator": { "mapping": { - "helm-app": "#/definitions/HelmApp", - "kafka-sink-connector": "#/definitions/KafkaSinkConnector", - "kafka-source-connector": "#/definitions/KafkaSourceConnector", - "producer-app": "#/definitions/ProducerApp", - "streams-app": "#/definitions/StreamsApp" + "helm-app": "#/$defs/HelmApp", + "kafka-sink-connector": "#/$defs/KafkaSinkConnector", + "kafka-source-connector": "#/$defs/KafkaSourceConnector", + "producer-app": "#/$defs/ProducerApp", + "streams-app": "#/$defs/StreamsApp" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/definitions/HelmApp" + "$ref": "#/$defs/HelmApp" }, { - "$ref": "#/definitions/KafkaSinkConnector" + "$ref": "#/$defs/KafkaSinkConnector" }, { - "$ref": "#/definitions/KafkaSourceConnector" + "$ref": "#/$defs/KafkaSourceConnector" }, { - "$ref": "#/definitions/ProducerApp" + "$ref": "#/$defs/ProducerApp" }, { - "$ref": "#/definitions/StreamsApp" + "$ref": "#/$defs/StreamsApp" } ] }, - "title": "KPOps pipeline schema", + "title": "PipelineSchema", "type": "array" } diff --git a/docs/docs/user/core-concepts/variables/environment_variables.md b/docs/docs/user/core-concepts/variables/environment_variables.md index 2a57aabea..35ca235d7 100644 --- a/docs/docs/user/core-concepts/variables/environment_variables.md +++ b/docs/docs/user/core-concepts/variables/environment_variables.md @@ -6,10 +6,7 @@ Environment variables can be set by using the [export](https://www.unix.com/man- !!! tip "dotenv files" - Support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20), - but not implemented in KPOps yet. One of the possible ways to still - use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. - This would work in `bash` suppose there are no spaces inside the values. + KPOps currently supports `.env` files only for variables related to the [config](../config.md). Full support for `.env` files is on the [roadmap](https://github.com/bakdata/kpops/issues/20). One of the possible ways to use one and export the contents manually is with the following command: `#!sh export $(xargs < .env)`. This would work in `bash` suppose there are no spaces inside the values. diff --git a/docs/docs/user/references/cli-commands.md b/docs/docs/user/references/cli-commands.md index 100f05c4a..cb9b2ff5b 100644 --- a/docs/docs/user/references/cli-commands.md +++ b/docs/docs/user/references/cli-commands.md @@ -40,6 +40,7 @@ $ kpops clean [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -66,6 +67,7 @@ $ kpops deploy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -92,6 +94,7 @@ $ kpops destroy [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] @@ -118,6 +121,7 @@ $ kpops generate [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--template / --no-template`: Run Helm template [default: no-template] @@ -144,6 +148,7 @@ $ kpops reset [OPTIONS] PIPELINE_PATH [COMPONENTS_MODULE] **Options**: * `--pipeline-base-dir DIRECTORY`: Base directory to the pipelines (default is current working directory) [env var: KPOPS_PIPELINE_BASE_DIR; default: .] +* `--dotenv FILE`: Path to dotenv file. Multiple files can be provided. The files will be loaded in order, with each file overriding the previous one. [env var: KPOPS_DOTENV_PATH] * `--defaults DIRECTORY`: Path to defaults folder [env var: KPOPS_DEFAULT_PATH] * `--config FILE`: Path to the config.yaml file [env var: KPOPS_CONFIG_PATH; default: config.yaml] * `--steps TEXT`: Comma separated list of steps to apply the command on [env var: KPOPS_PIPELINE_STEPS] diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index c288a66b9..c6ef09c16 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -82,39 +82,40 @@ nav: - Home: KPOps Documentation: index.md - User Guide: - - What is KPOps: user/what-is-kpops.md - - Changelog: user/changelog.md - - Getting Started: - - Setup: user/getting-started/setup.md - - Quick start: user/getting-started/quick-start.md - - Teardown: user/getting-started/teardown.md - - Examples: - - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md - - Core Concepts: - - Components: - - Overview: user/core-concepts/components/overview.md - - KubernetesApp: user/core-concepts/components/kubernetes-app.md - - KafkaApp: user/core-concepts/components/kafka-app.md - - StreamsApp: user/core-concepts/components/streams-app.md - - ProducerApp: user/core-concepts/components/producer-app.md - - KafkaConnector: user/core-concepts/components/kafka-connector.md - - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md - - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md - - Config: user/core-concepts/config.md - - Defaults: user/core-concepts/defaults.md - - Variables: - - Environment variables: user/core-concepts/variables/environment_variables.md - - Substitution: user/core-concepts/variables/substitution.md - - References: - - Migration guide: - - Migrate from v1 to v2: user/migration-guide/v1-v2.md - - Migrate from v2 to v3: user/migration-guide/v2-v3.md - - CLI usage: user/references/cli-commands.md - - Editor integration: user/references/editor-integration.md - - CI integration: - - GitHub Actions: user/references/ci-integration/github-actions.md + - What is KPOps: user/what-is-kpops.md + - Changelog: user/changelog.md + - Getting Started: + - Setup: user/getting-started/setup.md + - Quick start: user/getting-started/quick-start.md + - Teardown: user/getting-started/teardown.md + - Examples: + - ATM Fraud detection pipeline: user/examples/atm-fraud-pipeline.md + - Core Concepts: + - Components: + - Overview: user/core-concepts/components/overview.md + - KubernetesApp: user/core-concepts/components/kubernetes-app.md + - HelmApp: user/core-concepts/components/helm-app.md + - KafkaApp: user/core-concepts/components/kafka-app.md + - StreamsApp: user/core-concepts/components/streams-app.md + - ProducerApp: user/core-concepts/components/producer-app.md + - KafkaConnector: user/core-concepts/components/kafka-connector.md + - KafkaSinkConnector: user/core-concepts/components/kafka-sink-connector.md + - KafkaSourceConnector: user/core-concepts/components/kafka-source-connector.md + - Config: user/core-concepts/config.md + - Defaults: user/core-concepts/defaults.md + - Variables: + - Environment variables: user/core-concepts/variables/environment_variables.md + - Substitution: user/core-concepts/variables/substitution.md + - References: + - Migration guide: + - Migrate from v1 to v2: user/migration-guide/v1-v2.md + - Migrate from v2 to v3: user/migration-guide/v2-v3.md + - CLI usage: user/references/cli-commands.md + - Editor integration: user/references/editor-integration.md + - CI integration: + - GitHub Actions: user/references/ci-integration/github-actions.md - Developer Guide: - - Getting Started: developer/getting-started.md - - Contributing: developer/contributing.md - - Code base: - - Auto generation: developer/auto-generation.md + - Getting Started: developer/getting-started.md + - Contributing: developer/contributing.md + - Code base: + - Auto generation: developer/auto-generation.md diff --git a/hooks/gen_docs/gen_docs_cli_usage.py b/hooks/gen_docs/gen_docs_cli_usage.py index 25f7ecd8c..84476e69f 100644 --- a/hooks/gen_docs/gen_docs_cli_usage.py +++ b/hooks/gen_docs/gen_docs_cli_usage.py @@ -7,7 +7,7 @@ PATH_KPOPS_MAIN = ROOT / "kpops/cli/main.py" PATH_CLI_COMMANDS_DOC = ROOT / "docs/docs/user/references/cli-commands.md" -# TODO(@sujuka99): try to use typer_cli.main.docs here instead +# TODO(Ivan Yordanov): try to use typer_cli.main.docs here instead # https://github.com/bakdata/kpops/issues/297 if __name__ == "__main__": diff --git a/hooks/gen_docs/gen_docs_components.py b/hooks/gen_docs/gen_docs_components.py index 6fb78f767..203294c05 100644 --- a/hooks/gen_docs/gen_docs_components.py +++ b/hooks/gen_docs/gen_docs_components.py @@ -40,11 +40,12 @@ ).type for component in KPOPS_COMPONENTS } + KPOPS_COMPONENTS_SECTIONS = { component.type: [ field_name - for field_name, model in component.__fields__.items() - if not model.field_info.exclude + for field_name, field_info in component.model_fields.items() + if not field_info.exclude ] for component in KPOPS_COMPONENTS } diff --git a/hooks/gen_docs/gen_docs_env_vars.py b/hooks/gen_docs/gen_docs_env_vars.py index 3a2bd5587..30a7e15bf 100644 --- a/hooks/gen_docs/gen_docs_env_vars.py +++ b/hooks/gen_docs/gen_docs_env_vars.py @@ -2,17 +2,20 @@ import csv import shutil -from collections.abc import Callable, Iterator +from collections.abc import Callable +from contextlib import suppress from dataclasses import dataclass from pathlib import Path from textwrap import fill from typing import Any -from pydantic import BaseSettings -from pydantic.fields import ModelField +from pydantic import BaseModel +from pydantic_core import PydanticUndefined from pytablewriter import MarkdownTableWriter from typer.models import ArgumentInfo, OptionInfo +from kpops.utils.dict_ops import generate_substitution + try: from typing import Self except ImportError: @@ -127,7 +130,7 @@ def csv_append_env_var( width=68, ) required = False - if default_value == Ellipsis: + if default_value in [Ellipsis, PydanticUndefined]: required = True default_value = "" elif default_value is None: @@ -254,35 +257,60 @@ def fill_csv_pipeline_config(target: Path) -> None: :param target: The path to the `.csv` file. Note that it must already contain the column names """ - for field in collect_fields(KpopsConfig): - field_info = KpopsConfig.Config.get_field_info(field.name) + for (field_name, field_value), env_var_name in zip( + generate_substitution(collect_fields(KpopsConfig), separator=".").items(), + generate_substitution(collect_fields(KpopsConfig), separator="__").keys(), + strict=True, + ): + with suppress(KeyError): # In case the prefix is ever removed from KpopsConfig + env_var_name = KpopsConfig.model_config["env_prefix"] + env_var_name field_description: str = ( - field.field_info.description + field_value.description or "No description available, please refer to the pipeline config documentation." ) - field_default = field.field_info.default - if config_env_var := field_info.get( - "env", - ) or field.field_info.extra.get("env"): - csv_append_env_var( - target, - config_env_var, - field_default, - field_description, - field.name, - ) + field_default = field_value.default + csv_append_env_var( + target, + env_var_name.upper(), + field_default, + field_description, + field_name, + ) -def collect_fields(settings: type[BaseSettings]) -> Iterator[ModelField]: - """Collect and yield all fields in a settings class. +def collect_fields(model: type[BaseModel]) -> dict[str, Any]: + """Collect and return a ``dict`` of all fields in a settings class. :param model: settings class - :yield: all settings including nested ones in settings classes + :return: ``dict`` of all fields in a settings class """ - for field in settings.__fields__.values(): - if issubclass(field_type := field.type_, BaseSettings): - yield from collect_fields(field_type) - yield field + + def patched_issubclass_of_basemodel(cls): + """Pydantic breaks issubclass. + + ``issubclass(set[str], set) # True`` + ``issubclass(BaseSettings, BaseModel) # True`` + ``issubclass(set[str], BaseModel) # raises exception`` + + :param cls: class to check + :return: Whether cls is subclass of ``BaseModel`` + """ + try: + return issubclass(cls, BaseModel) + except TypeError as e: + if str(e) == "issubclass() arg 1 must be a class": + return False + raise + + seen_fields = {} + for field_name, field_value in model.model_fields.items(): + if field_value.annotation and patched_issubclass_of_basemodel( + field_value.annotation + ): + seen_fields[field_name] = collect_fields(field_value.annotation) + else: + seen_fields[field_name] = field_value + return seen_fields def fill_csv_cli(target: Path) -> None: diff --git a/kpops/cli/main.py b/kpops/cli/main.py index e4331113e..0aa0d5f67 100644 --- a/kpops/cli/main.py +++ b/kpops/cli/main.py @@ -21,6 +21,7 @@ from kpops.config import ENV_PREFIX, KpopsConfig from kpops.pipeline_generator.pipeline import Pipeline from kpops.utils.gen_schema import SchemaScope, gen_config_schema, gen_pipeline_schema +from kpops.utils.pydantic import YamlConfigSettingsSource if TYPE_CHECKING: from collections.abc import Iterator @@ -31,6 +32,18 @@ app = dtyper.Typer(pretty_exceptions_enable=False) +DOTENV_PATH_OPTION: Optional[list[Path]] = typer.Option( + default=None, + exists=True, + dir_okay=False, + file_okay=True, + envvar=f"{ENV_PREFIX}DOTENV_PATH", + help=( + "Path to dotenv file. Multiple files can be provided. " + "The files will be loaded in order, with each file overriding the previous one." + ), +) + BASE_DIR_PATH_OPTION: Path = typer.Option( default=Path(), exists=True, @@ -194,14 +207,16 @@ def log_action(action: str, pipeline_component: PipelineComponent): def create_kpops_config( - config: Path, defaults: Optional[Path], verbose: bool + config: Path, defaults: Optional[Path], verbose: bool, dotenv: Optional[list[Path]] ) -> KpopsConfig: setup_logging_level(verbose) - KpopsConfig.Config.config_path = config + YamlConfigSettingsSource.path_to_config = config + kpops_config = KpopsConfig( + _env_file=dotenv # pyright: ignore[reportGeneralTypeIssues] + ) if defaults: - kpops_config = KpopsConfig(defaults_path=defaults) + kpops_config.defaults_path = defaults else: - kpops_config = KpopsConfig() kpops_config.defaults_path = config.parent / kpops_config.defaults_path return kpops_config @@ -243,6 +258,7 @@ def generate( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, template: bool = typer.Option(False, help="Run Helm template"), @@ -250,7 +266,7 @@ def generate( filter_type: FilterType = FILTER_TYPE, verbose: bool = VERBOSE_OPTION, ) -> Pipeline: - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -276,6 +292,7 @@ def deploy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -283,7 +300,7 @@ def deploy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -299,6 +316,7 @@ def destroy( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -306,7 +324,7 @@ def destroy( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -321,6 +339,7 @@ def reset( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -328,7 +347,7 @@ def reset( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) @@ -344,6 +363,7 @@ def clean( pipeline_path: Path = PIPELINE_PATH_ARG, components_module: Optional[str] = COMPONENTS_MODULES, pipeline_base_dir: Path = BASE_DIR_PATH_OPTION, + dotenv: Optional[list[Path]] = DOTENV_PATH_OPTION, defaults: Optional[Path] = DEFAULT_PATH_OPTION, config: Path = CONFIG_PATH_OPTION, steps: Optional[str] = PIPELINE_STEPS, @@ -351,7 +371,7 @@ def clean( dry_run: bool = DRY_RUN, verbose: bool = VERBOSE_OPTION, ): - kpops_config = create_kpops_config(config, defaults, verbose) + kpops_config = create_kpops_config(config, defaults, verbose, dotenv) pipeline = setup_pipeline( pipeline_base_dir, pipeline_path, components_module, kpops_config ) diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index af21abb3f..0a155bb0d 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -3,23 +3,23 @@ from pathlib import Path import yaml -from pydantic import BaseConfig, BaseModel, Extra, Field +from pydantic import BaseModel, ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.exception import ParseError from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class HelmDiffConfig(BaseModel): ignore: set[str] = Field( default_factory=set, description="Set of keys that should not be checked.", - example="- name\n- imageTag", + examples=["- name\n- imageTag"], ) -class RepoAuthFlags(BaseModel): +class RepoAuthFlags(DescConfigModel): """Authorisation-related flags for `helm repo`. :param username: Username, defaults to None @@ -46,9 +46,6 @@ class RepoAuthFlags(BaseModel): default=False, description=describe_attr("insecure_skip_tls_verify", __doc__) ) - class Config(DescConfig): - pass - def to_command(self) -> list[str]: command: list[str] = [] if self.username: @@ -64,7 +61,7 @@ def to_command(self) -> list[str]: return command -class HelmRepoConfig(BaseModel): +class HelmRepoConfig(DescConfigModel): """Helm repository configuration. :param repository_name: Name of the Helm repository @@ -80,11 +77,8 @@ class HelmRepoConfig(BaseModel): default=RepoAuthFlags(), description=describe_attr("repo_auth_flags", __doc__) ) - class Config(DescConfig): - pass - -class HelmConfig(BaseModel): +class HelmConfig(DescConfigModel): """Global Helm configuration. :param context: Name of kubeconfig context (`--kube-context`) @@ -95,7 +89,7 @@ class HelmConfig(BaseModel): context: str | None = Field( default=None, description=describe_attr("context", __doc__), - example="dev-storage", + examples=["dev-storage"], ) debug: bool = Field( default=False, @@ -107,9 +101,6 @@ class HelmConfig(BaseModel): description=describe_attr("api_version", __doc__), ) - class Config(DescConfig): - pass - class HelmFlags(RepoAuthFlags): set_file: dict[str, Path] = Field(default_factory=dict) @@ -120,8 +111,9 @@ class HelmFlags(RepoAuthFlags): wait: bool = True wait_for_jobs: bool = False - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) @override def to_command(self) -> list[str]: diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 07bbf38e6..06f21eff2 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -46,10 +46,10 @@ def create_connector( :param connector_config: The config of the connector :return: The current connector info if successful. """ - config_json = connector_config.dict() + config_json = connector_config.model_dump() connect_data = {"name": connector_config.name, "config": config_json} response = httpx.post( - url=f"{self.url}/connectors", headers=HEADERS, json=connect_data + url=f"{self.url}connectors", headers=HEADERS, json=connect_data ) if response.status_code == httpx.codes.CREATED: log.info(f"Connector {connector_config.name} created.") @@ -66,14 +66,12 @@ def create_connector( def get_connector(self, connector_name: str) -> KafkaConnectResponse: """Get information about the connector. - API Reference: - https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) - + API Reference: https://docs.confluent.io/platform/current/connect/references/restapi.html#get--connectors-(string-name) :param connector_name: Nameof the crated connector :return: Information about the connector. """ response = httpx.get( - url=f"{self.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.OK: log.info(f"Connector {connector_name} exists.") @@ -102,9 +100,9 @@ def update_connector_config( :return: Information about the connector after the change has been made. """ connector_name = connector_config.name - config_json = connector_config.dict() + config_json = connector_config.model_dump() response = httpx.put( - url=f"{self.url}/connectors/{connector_name}/config", + url=f"{self.url}connectors/{connector_name}/config", headers=HEADERS, json=config_json, ) @@ -135,9 +133,9 @@ def validate_connector_config( :return: List of all found errors """ response = httpx.put( - url=f"{self.url}/connector-plugins/{connector_config.class_name}/config/validate", + url=f"{self.url}connector-plugins/{connector_config.class_name}/config/validate", headers=HEADERS, - json=connector_config.dict(), + json=connector_config.model_dump(), ) if response.status_code == httpx.codes.OK: @@ -165,7 +163,7 @@ def delete_connector(self, connector_name: str) -> None: :raises ConnectorNotFoundException: Connector not found """ response = httpx.delete( - url=f"{self.url}/connectors/{connector_name}", headers=HEADERS + url=f"{self.url}connectors/{connector_name}", headers=HEADERS ) if response.status_code == httpx.codes.NO_CONTENT: log.info(f"Connector {connector_name} deleted.") diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 744662796..fb644dd7a 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -97,14 +97,14 @@ def __dry_run_connector_creation( connector = self._connect_wrapper.get_connector(connector_name) log.info(f"Connector Creation: connector {connector_name} already exists.") - if diff := render_diff(connector.config, connector_config.dict()): + if diff := render_diff(connector.config, connector_config.model_dump()): log.info(f"Updating config:\n{diff}") - log.debug(connector_config.dict()) + log.debug(connector_config.model_dump()) log.debug(f"PUT /connectors/{connector_name}/config HTTP/1.1") log.debug(f"HOST: {self._connect_wrapper.url}") except ConnectorNotFoundException: - diff = render_diff({}, connector_config.dict()) + diff = render_diff({}, connector_config.model_dump()) log.info( f"Connector Creation: connector {connector_name} does not exist. Creating connector with config:\n{diff}" ) diff --git a/kpops/component_handlers/kafka_connect/model.py b/kpops/component_handlers/kafka_connect/model.py index e83e33e5d..a7ec45af9 100644 --- a/kpops/component_handlers/kafka_connect/model.py +++ b/kpops/component_handlers/kafka_connect/model.py @@ -1,10 +1,23 @@ from enum import Enum from typing import Any, Literal -from pydantic import BaseConfig, BaseModel, Extra, Field, validator +from pydantic import ( + BaseModel, + ConfigDict, + SerializationInfo, + field_validator, + model_serializer, +) +from pydantic.json_schema import SkipJsonSchema from typing_extensions import override -from kpops.utils.pydantic import CamelCaseConfig, DescConfig, to_dot +from kpops.utils.pydantic import ( + CamelCaseConfigModel, + DescConfigModel, + by_alias, + exclude_by_value, + to_dot, +) class KafkaConnectorType(str, Enum): @@ -12,23 +25,27 @@ class KafkaConnectorType(str, Enum): SOURCE = "source" -class KafkaConnectorConfig(BaseModel): +class KafkaConnectorConfig(DescConfigModel): """Settings specific to Kafka Connectors.""" connector_class: str - name: str = Field(default=..., hidden_from_schema=True) + name: SkipJsonSchema[str] - class Config(DescConfig): - extra = Extra.allow - alias_generator = to_dot - - @override - @classmethod - def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: - super().schema_extra(schema, model) - schema["additionalProperties"] = {"type": "string"} - - @validator("connector_class") + @override + @staticmethod + def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: + super(KafkaConnectorConfig, KafkaConnectorConfig).json_schema_extra( + schema, model + ) + schema["additional_properties"] = {"type": "string"} + + model_config = ConfigDict( + extra="allow", + alias_generator=to_dot, + json_schema_extra=json_schema_extra, + ) + + @field_validator("connector_class") def connector_class_must_contain_dot(cls, connector_class: str) -> str: if "." not in connector_class: msg = f"Invalid connector class {connector_class}" @@ -39,9 +56,11 @@ def connector_class_must_contain_dot(cls, connector_class: str) -> str: def class_name(self) -> str: return self.connector_class.split(".")[-1] - @override - def dict(self, **_) -> dict[str, Any]: - return super().dict(by_alias=True, exclude_none=True) + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + result = exclude_by_value(handler(self), None) + return {by_alias(self, name): value for name, value in result.items()} class ConnectorTask(BaseModel): @@ -53,10 +72,9 @@ class KafkaConnectResponse(BaseModel): name: str config: dict[str, str] tasks: list[ConnectorTask] - type: str | None + type: str | None = None - class Config(BaseConfig): - extra = Extra.forbid + model_config = ConfigDict(extra="forbid") class KafkaConnectConfigError(BaseModel): @@ -74,24 +92,21 @@ class KafkaConnectConfigErrorResponse(BaseModel): configs: list[KafkaConnectConfigDescription] -class KafkaConnectResetterConfig(BaseModel): +class KafkaConnectResetterConfig(CamelCaseConfigModel): brokers: str connector: str delete_consumer_group: bool | None = None offset_topic: str | None = None - class Config(CamelCaseConfig): - pass - -class KafkaConnectResetterValues(BaseModel): +class KafkaConnectResetterValues(CamelCaseConfigModel): connector_type: Literal["source", "sink"] config: KafkaConnectResetterConfig name_override: str - class Config(CamelCaseConfig): - pass - + # TODO(Ivan Yordanov): Replace with a function decorated with `@model_serializer` + # BEWARE! All default values are enforced, hard to replicate without + # access to ``model_dump`` @override - def dict(self, **_) -> dict[str, Any]: - return super().dict(by_alias=True, exclude_none=True) + def model_dump(self, **_) -> dict[str, Any]: + return super().model_dump(by_alias=True, exclude_none=True) diff --git a/kpops/component_handlers/schema_handler/schema_handler.py b/kpops/component_handlers/schema_handler/schema_handler.py index e4eba9931..fae2da0e7 100644 --- a/kpops/component_handlers/schema_handler/schema_handler.py +++ b/kpops/component_handlers/schema_handler/schema_handler.py @@ -30,7 +30,7 @@ def __init__( components_module: str | None, ) -> None: self.schema_registry_client = SchemaRegistryClient( - kpops_config.schema_registry.url + str(kpops_config.schema_registry.url) ) self.components_module = components_module diff --git a/kpops/component_handlers/topic/handler.py b/kpops/component_handlers/topic/handler.py index 8f6b198d3..9a08e5512 100644 --- a/kpops/component_handlers/topic/handler.py +++ b/kpops/component_handlers/topic/handler.py @@ -131,7 +131,7 @@ def __dry_run_topic_creation( log.debug(f"POST /clusters/{self.proxy_wrapper.cluster_id}/topics HTTP/1.1") log.debug(f"Host: {self.proxy_wrapper.url}") log.debug(HEADERS) - log.debug(topic_spec.dict()) + log.debug(topic_spec.model_dump()) @staticmethod def __check_partition_count( @@ -203,7 +203,7 @@ def __prepare_body(cls, topic_name: str, topic_config: TopicConfig) -> TopicSpec :param topic_config: The topic config :return: Topic specification """ - topic_spec_json: dict = topic_config.dict( + topic_spec_json: dict = topic_config.model_dump( include={ "partitions_count": True, "replication_factor": True, diff --git a/kpops/component_handlers/topic/model.py b/kpops/component_handlers/topic/model.py index b58445f81..5c0cf024d 100644 --- a/kpops/component_handlers/topic/model.py +++ b/kpops/component_handlers/topic/model.py @@ -1,13 +1,14 @@ from enum import Enum +from typing import Any -from pydantic import BaseConfig, BaseModel, Extra +from pydantic import BaseModel, ConfigDict class TopicSpec(BaseModel): topic_name: str - partitions_count: int | None - replication_factor: int | None - configs: list[dict[str, str]] | None + partitions_count: int | None = None + replication_factor: int | None = None + configs: list[dict[str, Any]] | None = None class TopicResponse(BaseModel): @@ -43,8 +44,9 @@ class KafkaTopicConfigSynonyms(BaseModel): value: str source: KafkaTopicConfigSource - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaTopicConfig(BaseModel): @@ -53,15 +55,17 @@ class KafkaTopicConfig(BaseModel): value: str name: str - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class TopicConfigResponse(BaseModel): data: list[KafkaTopicConfig] - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaBrokerConfigSource(str, Enum): @@ -75,8 +79,9 @@ class KafkaBrokerConfigSynonyms(BaseModel): value: str | None source: KafkaBrokerConfigSource - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaBrokerConfig(BaseModel): @@ -85,12 +90,14 @@ class KafkaBrokerConfig(BaseModel): value: str | None name: str - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class BrokerConfigResponse(BaseModel): data: list[KafkaBrokerConfig] - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index a80205506..aa1db6283 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -46,7 +46,7 @@ def cluster_id(self) -> str: :raises KafkaRestProxyError: Kafka REST proxy error :return: The Kafka cluster ID. """ - response = httpx.get(url=f"{self._config.url}/v3/clusters") + response = httpx.get(url=f"{self._config.url!s}v3/clusters") if response.status_code == httpx.codes.OK: cluster_information = response.json() return cluster_information["data"][0]["cluster_id"] @@ -67,9 +67,9 @@ def create_topic(self, topic_spec: TopicSpec) -> None: :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics", headers=HEADERS, - json=topic_spec.dict(exclude_none=True), + json=topic_spec.model_dump(exclude_none=True), ) if response.status_code == httpx.codes.CREATED: log.info(f"Topic {topic_spec.topic_name} created.") @@ -88,7 +88,7 @@ def delete_topic(self, topic_name: str) -> None: :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.delete( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.NO_CONTENT: @@ -109,7 +109,7 @@ def get_topic(self, topic_name: str) -> TopicResponse: :return: Response of the get topic API. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}", headers=HEADERS, ) if response.status_code == httpx.codes.OK: @@ -139,7 +139,7 @@ def get_topic_config(self, topic_name: str) -> TopicConfigResponse: :return: The topic configuration. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs", headers=HEADERS, ) @@ -169,7 +169,7 @@ def batch_alter_topic_config(self, topic_name: str, json_body: list[dict]) -> No :raises KafkaRestProxyError: Kafka REST proxy error """ response = httpx.post( - url=f"{self.url}/v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/topics/{topic_name}/configs:alter", headers=HEADERS, json={"data": json_body}, ) @@ -189,7 +189,7 @@ def get_broker_config(self) -> BrokerConfigResponse: :return: The broker configuration. """ response = httpx.get( - url=f"{self.url}/v3/clusters/{self.cluster_id}/brokers/-/configs", + url=f"{self.url!s}v3/clusters/{self.cluster_id}/brokers/-/configs", headers=HEADERS, ) diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index 73bf54c7c..293d17dcc 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -8,7 +8,8 @@ from typing import TypeVar import typer -from pydantic import BaseModel, Field +from pydantic import AliasChoices, ConfigDict, Field +from pydantic.json_schema import SkipJsonSchema from kpops.component_handlers import ComponentHandlers from kpops.config import KpopsConfig @@ -16,7 +17,7 @@ from kpops.utils.dict_ops import update_nested from kpops.utils.docstring import describe_attr from kpops.utils.environment import ENV -from kpops.utils.pydantic import DescConfig, to_dash +from kpops.utils.pydantic import DescConfigModel, to_dash from kpops.utils.yaml_loading import load_yaml_file try: @@ -27,7 +28,7 @@ log = logging.getLogger("BaseDefaultsComponent") -class BaseDefaultsComponent(BaseModel, ABC): +class BaseDefaultsComponent(DescConfigModel, ABC): """Base for all components, handles defaults. Component defaults are usually provided in a yaml file called @@ -40,36 +41,33 @@ class BaseDefaultsComponent(BaseModel, ABC): :param validate: Whether to run custom validation on the component, defaults to True """ - enrich: bool = Field( + model_config = ConfigDict( + arbitrary_types_allowed=True, + ignored_types=(cached_property, cached_classproperty), + ) + + enrich: SkipJsonSchema[bool] = Field( default=False, description=describe_attr("enrich", __doc__), exclude=True, - hidden_from_schema=True, ) - config: KpopsConfig = Field( + config: SkipJsonSchema[KpopsConfig] = Field( default=..., description=describe_attr("config", __doc__), exclude=True, - hidden_from_schema=True, ) - handlers: ComponentHandlers = Field( + handlers: SkipJsonSchema[ComponentHandlers] = Field( default=..., description=describe_attr("handlers", __doc__), exclude=True, - hidden_from_schema=True, ) - validate_: bool = Field( - alias="validate", + validate_: SkipJsonSchema[bool] = Field( + validation_alias=AliasChoices("validate", "validate_"), default=True, description=describe_attr("validate", __doc__), exclude=True, - hidden_from_schema=True, ) - class Config(DescConfig): - arbitrary_types_allowed = True - keep_untouched = (cached_property, cached_classproperty) - def __init__(self, **kwargs) -> None: if kwargs.get("enrich", True): kwargs = self.extend_with_defaults(**kwargs) diff --git a/kpops/components/base_components/helm_app.py b/kpops/components/base_components/helm_app.py index f98abd648..5d70bacfd 100644 --- a/kpops/components/base_components/helm_app.py +++ b/kpops/components/base_components/helm_app.py @@ -4,7 +4,7 @@ from functools import cached_property from typing import Any -from pydantic import Field +from pydantic import Field, SerializationInfo, model_serializer from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -19,6 +19,7 @@ from kpops.components.base_components.kubernetes_app import KubernetesApp from kpops.utils.colorify import magentaify from kpops.utils.docstring import describe_attr +from kpops.utils.pydantic import exclude_by_name log = logging.getLogger("HelmApp") @@ -79,7 +80,9 @@ def helm_chart(self) -> str: @property def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" - auth_flags = self.repo_config.repo_auth_flags.dict() if self.repo_config else {} + auth_flags = ( + self.repo_config.repo_auth_flags.model_dump() if self.repo_config else {} + ) return HelmFlags( **auth_flags, version=self.version, @@ -90,7 +93,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command.""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -108,7 +111,7 @@ def template(self) -> None: @property def deploy_flags(self) -> HelmUpgradeInstallFlags: """Return flags for Helm upgrade install command.""" - return HelmUpgradeInstallFlags(**self.helm_flags.dict()) + return HelmUpgradeInstallFlags(**self.helm_flags.model_dump()) @override def deploy(self, dry_run: bool) -> None: @@ -139,7 +142,9 @@ def to_helm_values(self) -> dict: :returns: Thte values to be used by Helm """ - return self.app.dict(by_alias=True, exclude_none=True, exclude_defaults=True) + return self.app.model_dump( + by_alias=True, exclude_none=True, exclude_defaults=True + ) def print_helm_diff(self, stdout: str) -> None: """Print the diff of the last and current release of this component. @@ -156,11 +161,8 @@ def print_helm_diff(self, stdout: str) -> None: new_release = Helm.load_manifest(stdout) self.helm_diff.log_helm_diff(log, current_release, new_release) - @override - def dict(self, *, exclude=None, **kwargs) -> dict[str, Any]: - # HACK: workaround for Pydantic to exclude cached properties during model export - if exclude is None: - exclude = set() - exclude.add("helm") - exclude.add("helm_diff") - return super().dict(exclude=exclude, **kwargs) + # HACK: workaround for Pydantic to exclude cached properties during model export + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution + @model_serializer(mode="wrap", when_used="always") + def serialize_model(self, handler, info: SerializationInfo) -> dict[str, Any]: + return exclude_by_name(handler(self), "helm", "helm_diff") diff --git a/kpops/components/base_components/kafka_app.py b/kpops/components/base_components/kafka_app.py index cf8e5f4ef..b62e54bab 100644 --- a/kpops/components/base_components/kafka_app.py +++ b/kpops/components/base_components/kafka_app.py @@ -3,7 +3,7 @@ import logging from abc import ABC -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import ( @@ -14,12 +14,12 @@ from kpops.components.base_components.helm_app import HelmApp from kpops.components.base_components.kubernetes_app import KubernetesAppConfig from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KafkaApp") -class KafkaStreamsConfig(BaseModel): +class KafkaStreamsConfig(CamelCaseConfigModel, DescConfigModel): """Kafka Streams config. :param brokers: Brokers @@ -31,8 +31,9 @@ class KafkaStreamsConfig(BaseModel): default=None, description=describe_attr("schema_registry_url", __doc__) ) - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KafkaAppConfig(KubernetesAppConfig): diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index 8f0163025..7af2c5ae4 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -3,9 +3,9 @@ import logging from abc import ABC from functools import cached_property -from typing import Any, NoReturn +from typing import NoReturn -from pydantic import Field, validator +from pydantic import Field, PrivateAttr, ValidationInfo, field_validator from typing_extensions import override from kpops.component_handlers.helm_wrapper.dry_run_handler import DryRunHandler @@ -46,7 +46,6 @@ class KafkaConnector(PipelineComponent, ABC): :param version: Helm chart version, defaults to "1.0.4" :param resetter_values: Overriding Kafka Connect Resetter Helm values. E.g. to override the Image Tag etc., defaults to dict - :param _connector_type: Defines the type of the connector (Source or Sink) """ namespace: str = Field( @@ -71,24 +70,24 @@ class KafkaConnector(PipelineComponent, ABC): default_factory=dict, description=describe_attr("resetter_values", __doc__), ) + _connector_type: KafkaConnectorType = PrivateAttr() - _connector_type: KafkaConnectorType = Field(default=..., hidden_from_schema=True) - - @validator("app", pre=True) + @field_validator("app", mode="before") + @classmethod def connector_config_should_have_component_name( cls, app: KafkaConnectorConfig | dict[str, str], - values: dict[str, Any], - ) -> dict[str, str]: + info: ValidationInfo, + ) -> KafkaConnectorConfig: if isinstance(app, KafkaConnectorConfig): - app = app.dict() - component_name = values["prefix"] + values["name"] + app = app.model_dump() + component_name: str = info.data["prefix"] + info.data["name"] connector_name: str | None = app.get("name") if connector_name is not None and connector_name != component_name: msg = f"Connector name '{connector_name}' should be the same as component name '{component_name}'" raise ValueError(msg) app["name"] = component_name - return app + return KafkaConnectorConfig(**app) @cached_property def helm(self) -> Helm: @@ -121,7 +120,7 @@ def dry_run_handler(self) -> DryRunHandler: def helm_flags(self) -> HelmFlags: """Return shared flags for Helm commands.""" return HelmFlags( - **self.repo_config.repo_auth_flags.dict(), + **self.repo_config.repo_auth_flags.model_dump(), version=self.version, create_namespace=self.config.create_namespace, ) @@ -130,7 +129,7 @@ def helm_flags(self) -> HelmFlags: def template_flags(self) -> HelmTemplateFlags: """Return flags for Helm template command.""" return HelmTemplateFlags( - **self.helm_flags.dict(), + **self.helm_flags.model_dump(), api_version=self.config.helm_config.api_version, ) @@ -246,7 +245,7 @@ def _get_kafka_connect_resetter_values( ), connector_type=self._connector_type.value, name_override=self.full_name, - ).dict(), + ).model_dump(), **self.resetter_values, } @@ -276,7 +275,7 @@ class KafkaSourceConnector(KafkaConnector): description=describe_attr("offset_topic", __doc__), ) - _connector_type = KafkaConnectorType.SOURCE + _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SOURCE) @override def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn: @@ -321,7 +320,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None: class KafkaSinkConnector(KafkaConnector): """Kafka sink connector model.""" - _connector_type = KafkaConnectorType.SINK + _connector_type: KafkaConnectorType = PrivateAttr(KafkaConnectorType.SINK) @override def add_input_topics(self, topics: list[str]) -> None: diff --git a/kpops/components/base_components/kubernetes_app.py b/kpops/components/base_components/kubernetes_app.py index 4b4e24c1a..cae474cee 100644 --- a/kpops/components/base_components/kubernetes_app.py +++ b/kpops/components/base_components/kubernetes_app.py @@ -4,12 +4,12 @@ import re from abc import ABC -from pydantic import BaseModel, Extra, Field +from pydantic import ConfigDict, Field from typing_extensions import override from kpops.components.base_components.pipeline_component import PipelineComponent from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import CamelCaseConfigModel, DescConfigModel log = logging.getLogger("KubernetesApp") @@ -18,11 +18,12 @@ ) -class KubernetesAppConfig(BaseModel): +class KubernetesAppConfig(CamelCaseConfigModel, DescConfigModel): """Settings specific to Kubernetes apps.""" - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) class KubernetesApp(PipelineComponent, ABC): diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 153133639..5f1dae193 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -1,11 +1,11 @@ from enum import Enum from typing import Any, NewType -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import ConfigDict, Field, model_validator from kpops.components.base_components.models import TopicName from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class InputTopicTypes(str, Enum): @@ -18,7 +18,7 @@ class InputTopicTypes(str, Enum): PATTERN = "pattern" -class FromTopic(BaseModel): +class FromTopic(DescConfigModel): """Input topic. :param type: Topic type, defaults to None @@ -31,23 +31,24 @@ class FromTopic(BaseModel): ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) - class Config(DescConfig): - extra = Extra.forbid - use_enum_values = True + model_config = ConfigDict( + extra="forbid", + use_enum_values=True, + ) - @root_validator - def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + def extra_topic_role(self) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" - if values["type"] == InputTopicTypes.INPUT and values["role"]: + if self.type == InputTopicTypes.INPUT and self.role: msg = "Define role only if `type` is `pattern` or `None`" raise ValueError(msg) - return values + return self ComponentName = NewType("ComponentName", str) -class FromSection(BaseModel): +class FromSection(DescConfigModel): """Holds multiple input topics. :param topics: Input topics @@ -63,5 +64,6 @@ class FromSection(BaseModel): description=describe_attr("components", __doc__), ) - class Config(DescConfig): - extra = Extra.forbid + model_config = ConfigDict( + extra="forbid", + ) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 03f1d7141..56da461c8 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -1,11 +1,11 @@ from enum import Enum from typing import Any -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import ConfigDict, Field, model_validator from kpops.components.base_components.models import ModelName, ModelVersion, TopicName from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig +from kpops.utils.pydantic import DescConfigModel class OutputTopicTypes(str, Enum): @@ -18,7 +18,7 @@ class OutputTopicTypes(str, Enum): ERROR = "error" -class TopicConfig(BaseModel): +class TopicConfig(DescConfigModel): """Configure an output topic. :param type: Topic type @@ -58,21 +58,22 @@ class TopicConfig(BaseModel): ) role: str | None = Field(default=None, description=describe_attr("role", __doc__)) - class Config(DescConfig): - extra = Extra.forbid - allow_population_by_field_name = True - use_enum_values = True + model_config = ConfigDict( + extra="forbid", + use_enum_values=True, + populate_by_name=True, + ) - @root_validator - def extra_topic_role(cls, values: dict[str, Any]) -> dict[str, Any]: + @model_validator(mode="after") + def extra_topic_role(self) -> Any: """Ensure that cls.role is used correctly, assign type if needed.""" - if values["type"] and values["role"]: + if self.type and self.role: msg = "Define `role` only if `type` is undefined" raise ValueError(msg) - return values + return self -class ToSection(BaseModel): +class ToSection(DescConfigModel): """Holds multiple output topics. :param topics: Output topics @@ -86,5 +87,6 @@ class ToSection(BaseModel): default={}, description=describe_attr("models", __doc__) ) - class Config(DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="forbid", + ) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index d05d4d4c1..7be212300 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -2,7 +2,7 @@ from abc import ABC -from pydantic import Extra, Field +from pydantic import AliasChoices, ConfigDict, Field from kpops.components.base_components.base_defaults_component import ( BaseDefaultsComponent, @@ -18,7 +18,6 @@ ToSection, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import DescConfig class PipelineComponent(BaseDefaultsComponent, ABC): @@ -41,7 +40,8 @@ class PipelineComponent(BaseDefaultsComponent, ABC): ) from_: FromSection | None = Field( default=None, - alias="from", + serialization_alias="from", + validation_alias=AliasChoices("from", "from_"), title="From", description=describe_attr("from_", __doc__), ) @@ -50,8 +50,9 @@ class PipelineComponent(BaseDefaultsComponent, ABC): description=describe_attr("to", __doc__), ) - class Config(DescConfig): - extra = Extra.allow + model_config = ConfigDict( + extra="allow", + ) def __init__(self, **kwargs) -> None: super().__init__(**kwargs) diff --git a/kpops/components/streams_bootstrap/producer/model.py b/kpops/components/streams_bootstrap/producer/model.py index 8af1a68c6..01bda1dbc 100644 --- a/kpops/components/streams_bootstrap/producer/model.py +++ b/kpops/components/streams_bootstrap/producer/model.py @@ -1,4 +1,4 @@ -from pydantic import BaseConfig, Extra, Field +from pydantic import ConfigDict, Field from kpops.components.base_components.kafka_app import ( KafkaAppConfig, @@ -32,5 +32,4 @@ class ProducerValues(KafkaAppConfig): default=..., description=describe_attr("streams", __doc__) ) - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict(extra="allow") diff --git a/kpops/components/streams_bootstrap/streams/model.py b/kpops/components/streams_bootstrap/streams/model.py index ca2db77ae..2c8b952ce 100644 --- a/kpops/components/streams_bootstrap/streams/model.py +++ b/kpops/components/streams_bootstrap/streams/model.py @@ -1,8 +1,7 @@ -from collections.abc import Mapping, Set +from collections.abc import Callable from typing import Any -from pydantic import BaseConfig, BaseModel, Extra, Field -from typing_extensions import override +from pydantic import ConfigDict, Field, SerializationInfo, model_serializer from kpops.components.base_components.base_defaults_component import deduplicate from kpops.components.base_components.kafka_app import ( @@ -10,7 +9,12 @@ KafkaStreamsConfig, ) from kpops.utils.docstring import describe_attr -from kpops.utils.pydantic import CamelCaseConfig, DescConfig +from kpops.utils.pydantic import ( + CamelCaseConfigModel, + DescConfigModel, + exclude_by_value, + exclude_defaults, +) class StreamsConfig(KafkaStreamsConfig): @@ -47,7 +51,7 @@ class StreamsConfig(KafkaStreamsConfig): error_topic: str | None = Field( default=None, description=describe_attr("error_topic", __doc__) ) - config: dict[str, str] = Field( + config: dict[str, Any] = Field( default={}, description=describe_attr("config", __doc__) ) @@ -72,40 +76,15 @@ def add_extra_input_topics(self, role: str, topics: list[str]) -> None: self.extra_input_topics.get(role, []) + topics ) - @override - def dict( - self, - *, - include: None | Set[int | str] | Mapping[int | str, Any] = None, - exclude: None | Set[int | str] | Mapping[int | str, Any] = None, - by_alias: bool = False, - skip_defaults: bool | None = None, - exclude_unset: bool = False, - **kwargs, - ) -> dict: - """Generate a dictionary representation of the model. - - Optionally, specify which fields to include or exclude. - - :param include: Fields to include - :param include: Fields to exclude - :param by_alias: Use the fields' aliases in the dictionary - :param skip_defaults: Whether to skip defaults - :param exclude_unset: Whether to exclude unset fields - """ - return super().dict( - include=include, - exclude=exclude, - by_alias=by_alias, - skip_defaults=skip_defaults, - exclude_unset=exclude_unset, - # The following lines are required only for the streams configs since we never not want to export defaults here, just fallback to helm default values - exclude_defaults=True, - exclude_none=True, - ) + # TODO(Ivan Yordanov): Currently hacky and potentially unsafe. Find cleaner solution + @model_serializer(mode="wrap", when_used="always") + def serialize_model( + self, handler: Callable, info: SerializationInfo + ) -> dict[str, Any]: + return exclude_defaults(self, exclude_by_value(handler(self), None)) -class StreamsAppAutoScaling(BaseModel): +class StreamsAppAutoScaling(CamelCaseConfigModel, DescConfigModel): """Kubernetes Event-driven Autoscaling config. :param enabled: Whether to enable auto-scaling using KEDA., defaults to False @@ -184,9 +163,7 @@ class StreamsAppAutoScaling(BaseModel): default=[], description=describe_attr("topics", __doc__), ) - - class Config(CamelCaseConfig, DescConfig): - extra = Extra.allow + model_config = ConfigDict(extra="allow") class StreamsAppConfig(KafkaAppConfig): @@ -206,6 +183,4 @@ class StreamsAppConfig(KafkaAppConfig): default=None, description=describe_attr("autoscaling", __doc__), ) - - class Config(BaseConfig): - extra = Extra.allow + model_config = ConfigDict(extra="allow") diff --git a/kpops/config.py b/kpops/config.py index 718568fb6..850418d21 100644 --- a/kpops/config.py +++ b/kpops/config.py @@ -1,15 +1,18 @@ from __future__ import annotations -from collections.abc import Callable from pathlib import Path -from typing import Any -from pydantic import AnyHttpUrl, BaseConfig, BaseSettings, Field, parse_obj_as -from pydantic.env_settings import SettingsSourceCallable +from pydantic import AnyHttpUrl, Field, TypeAdapter +from pydantic_settings import ( + BaseSettings, + PydanticBaseSettingsSource, + SettingsConfigDict, +) +from typing_extensions import override from kpops.component_handlers.helm_wrapper.model import HelmConfig, HelmDiffConfig from kpops.utils.docstring import describe_object -from kpops.utils.yaml_loading import load_yaml_file +from kpops.utils.pydantic import YamlConfigSettingsSource ENV_PREFIX = "KPOPS_" @@ -35,10 +38,7 @@ class SchemaRegistryConfig(BaseSettings): description="Whether the Schema Registry handler should be initialized.", ) url: AnyHttpUrl = Field( - # For validating URLs use parse_obj_as - # https://github.com/pydantic/pydantic/issues/1106 - default=parse_obj_as(AnyHttpUrl, "http://localhost:8081"), - env=f"{ENV_PREFIX}SCHEMA_REGISTRY_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8081"), description="Address of the Schema Registry.", ) @@ -47,8 +47,7 @@ class KafkaRestConfig(BaseSettings): """Configuration for Kafka REST Proxy.""" url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8082"), - env=f"{ENV_PREFIX}KAFKA_REST_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8082"), description="Address of the Kafka REST Proxy.", ) @@ -57,8 +56,7 @@ class KafkaConnectConfig(BaseSettings): """Configuration for Kafka Connect.""" url: AnyHttpUrl = Field( - default=parse_obj_as(AnyHttpUrl, "http://localhost:8083"), - env=f"{ENV_PREFIX}KAFKA_CONNECT_URL", + default=TypeAdapter(AnyHttpUrl).validate_python("http://localhost:8083"), description="Address of Kafka Connect.", ) @@ -68,22 +66,25 @@ class KpopsConfig(BaseSettings): defaults_path: Path = Field( default=Path(), - example="defaults", + examples=["defaults", "."], description="The path to the folder containing the defaults.yaml file and the environment defaults files. " "Paths can either be absolute or relative to `config.yaml`", ) environment: str = Field( default=..., - env=f"{ENV_PREFIX}ENVIRONMENT", - example="development", + examples=[ + "development", + "production", + ], description="The environment you want to generate and deploy the pipeline to. " "Suffix your environment files with this value (e.g. defaults_development.yaml for environment=development).", ) kafka_brokers: str = Field( default=..., - env=f"{ENV_PREFIX}KAFKA_BROKERS", + examples=[ + "broker1:9092,broker2:9092,broker3:9092", + ], description="The comma separated Kafka brokers address.", - example="broker1:9092,broker2:9092,broker3:9092", ) defaults_filename_prefix: str = Field( default="defaults", @@ -107,7 +108,6 @@ class KpopsConfig(BaseSettings): ) timeout: int = Field( default=300, - env=f"{ENV_PREFIX}TIMEOUT", description="The timeout in seconds that specifies when actions like deletion or deploy timeout.", ) create_namespace: bool = Field( @@ -124,38 +124,25 @@ class KpopsConfig(BaseSettings): ) retain_clean_jobs: bool = Field( default=False, - env=f"{ENV_PREFIX}RETAIN_CLEAN_JOBS", description="Whether to retain clean up jobs in the cluster or uninstall the, after completion.", ) - class Config(BaseConfig): - config_path = Path("config.yaml") - env_file = ".env" - env_file_encoding = "utf-8" - env_prefix = ENV_PREFIX - - @classmethod - def customise_sources( - cls, - init_settings: SettingsSourceCallable, - env_settings: SettingsSourceCallable, - file_secret_settings: SettingsSourceCallable, - ) -> tuple[ - SettingsSourceCallable | Callable[[KpopsConfig], dict[str, Any]], ... - ]: - return ( - env_settings, - init_settings, - yaml_config_settings_source, - file_secret_settings, - ) - - -def yaml_config_settings_source(settings: KpopsConfig) -> dict[str, Any]: - path_to_config = settings.Config.config_path - if path_to_config.exists(): - if isinstance(source := load_yaml_file(path_to_config), dict): - return source - err_msg = f"{path_to_config} must be a mapping." - raise TypeError(err_msg) - return {} + model_config = SettingsConfigDict(env_prefix=ENV_PREFIX, env_nested_delimiter="__") + + @override + @classmethod + def settings_customise_sources( + cls, + settings_cls: type[BaseSettings], + init_settings: PydanticBaseSettingsSource, + env_settings: PydanticBaseSettingsSource, + dotenv_settings: PydanticBaseSettingsSource, + file_secret_settings: PydanticBaseSettingsSource, + ): + return ( + env_settings, + init_settings, + YamlConfigSettingsSource(settings_cls), + dotenv_settings, + file_secret_settings, + ) diff --git a/kpops/pipeline_generator/pipeline.py b/kpops/pipeline_generator/pipeline.py index cdfe14fa4..f4676105c 100644 --- a/kpops/pipeline_generator/pipeline.py +++ b/kpops/pipeline_generator/pipeline.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING import yaml -from pydantic import BaseModel +from pydantic import BaseModel, SerializeAsAny from rich.console import Console from rich.syntax import Syntax @@ -38,7 +38,7 @@ class ValidationError(Exception): class PipelineComponents(BaseModel): """Stores the pipeline components.""" - components: list[PipelineComponent] = [] + components: list[SerializeAsAny[PipelineComponent]] = [] @property def last(self) -> PipelineComponent: @@ -192,7 +192,6 @@ def apply_component( **component_data, ) component = self.enrich_component(component) - # inflate & enrich components for inflated_component in component.inflate(): # TODO: recursively enriched_component = self.enrich_component(inflated_component) @@ -230,8 +229,7 @@ def enrich_component( component.validate_ = True env_component_as_dict = update_nested_pair( self.env_components_index.get(component.name, {}), - # HACK: Pydantic .dict() doesn't create jsonable dict - json.loads(component.json(by_alias=True)), + component.model_dump(mode="json", by_alias=True), ) # HACK: make sure component type is set for inflated components, because property is not serialized by Pydantic env_component_as_dict["type"] = component.type @@ -266,9 +264,7 @@ def __iter__(self) -> Iterator[PipelineComponent]: def __str__(self) -> str: return yaml.dump( - json.loads( # HACK: serialize types on Pydantic model export, which are not serialized by .dict(); e.g. pathlib.Path - self.components.json(exclude_none=True, by_alias=True) - ) + self.components.model_dump(mode="json", by_alias=True, exclude_none=True) ) def __len__(self) -> int: @@ -283,7 +279,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: config = self.config # Leftover variables that were previously introduced in the component by the substitution # functions, still hardcoded, because of their names. - # TODO: Get rid of them + # TODO(Ivan Yordanov): Get rid of them substitution_hardcoded = { "error_topic_name": config.topic_name_config.default_error_topic_name, "output_topic_name": config.topic_name_config.default_output_topic_name, @@ -294,7 +290,7 @@ def substitute_in_component(self, component_as_dict: dict) -> dict: substitution_hardcoded, ) substitution = generate_substitution( - json.loads(config.json()), existing_substitution=component_substitution + config.model_dump(mode="json"), existing_substitution=component_substitution ) return json.loads( diff --git a/kpops/utils/dict_ops.py b/kpops/utils/dict_ops.py index 14cc849e3..c53cc383d 100644 --- a/kpops/utils/dict_ops.py +++ b/kpops/utils/dict_ops.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from typing import Any +from typing import Any, TypeVar def update_nested_pair(original_dict: dict, other_dict: Mapping) -> dict: @@ -66,18 +66,22 @@ def flatten_mapping( if prefix: key = prefix + separator + key if isinstance(value, Mapping): - nested_mapping = flatten_mapping(value, key) + nested_mapping = flatten_mapping(value, key, separator) top = update_nested_pair(top, nested_mapping) else: top[key] = value return top +_V = TypeVar("_V") + + def generate_substitution( - input: dict, + input: dict[str, _V], prefix: str | None = None, existing_substitution: dict | None = None, -) -> dict: + separator: str | None = None, +) -> dict[str, _V]: """Generate a complete substitution dict from a given dict. Finds all attributes that belong to a model and expands them to create @@ -88,4 +92,10 @@ def generate_substitution( :param substitution: existing substitution to include :returns: Substitution dict of all variables related to the model. """ - return update_nested(existing_substitution or {}, flatten_mapping(input, prefix)) + if separator is None: + return update_nested( + existing_substitution or {}, flatten_mapping(input, prefix) + ) + return update_nested( + existing_substitution or {}, flatten_mapping(input, prefix, separator) + ) diff --git a/kpops/utils/gen_schema.py b/kpops/utils/gen_schema.py index ac64da49a..18ac3c5a4 100644 --- a/kpops/utils/gen_schema.py +++ b/kpops/utils/gen_schema.py @@ -1,18 +1,24 @@ import inspect +import json import logging from abc import ABC from collections.abc import Sequence from enum import Enum -from typing import Annotated, Any, Literal, Union - -from pydantic import BaseConfig, Field, schema, schema_json_of -from pydantic.fields import FieldInfo, ModelField -from pydantic.schema import SkipField +from typing import Annotated, Literal, Union + +from pydantic import Field, RootModel +from pydantic.fields import FieldInfo +from pydantic.json_schema import GenerateJsonSchema, SkipJsonSchema, model_json_schema +from pydantic_core.core_schema import ( + DefinitionsSchema, + LiteralSchema, + ModelField, + ModelFieldsSchema, +) from kpops.cli.registry import _find_classes -from kpops.components.base_components.pipeline_component import PipelineComponent +from kpops.components import PipelineComponent from kpops.config import KpopsConfig -from kpops.utils.docstring import describe_object class SchemaScope(str, Enum): @@ -20,20 +26,10 @@ class SchemaScope(str, Enum): CONFIG = "config" -original_field_schema = schema.field_schema - - -# adapted from https://github.com/tiangolo/fastapi/issues/1378#issuecomment-764966955 -def field_schema(field: ModelField, **kwargs: Any) -> Any: - if field.field_info.extra.get("hidden_from_schema"): - msg = f"{field.name} field is being hidden" - raise SkipField(msg) - else: - return original_field_schema(field, **kwargs) +class MultiComponentGenerateJsonSchema(GenerateJsonSchema): + ... -schema.field_schema = field_schema - log = logging.getLogger("") @@ -57,8 +53,9 @@ def _is_valid_component( def _add_components( - components_module: str, components: tuple[type[PipelineComponent]] | None = None -) -> tuple[type[PipelineComponent]]: + components_module: str, + components: tuple[type[PipelineComponent], ...] | None = None, +) -> tuple[type[PipelineComponent], ...]: """Add components to a components tuple. If an empty tuple is provided or it is not provided at all, the components @@ -96,7 +93,7 @@ def gen_pipeline_schema( log.warning("No components are provided, no schema is generated.") return # Add stock components if enabled - components: tuple[type[PipelineComponent]] = tuple() # noqa: C408 + components: tuple[type[PipelineComponent], ...] = () if include_stock_components: components = _add_components("kpops.components") # Add custom components if provided @@ -105,42 +102,45 @@ def gen_pipeline_schema( if not components: msg = "No valid components found." raise RuntimeError(msg) - # Create a type union that will hold the union of all component types - PipelineComponents = Union[components] # type: ignore[valid-type] # re-assign component type as Literal to work as discriminator for component in components: - component.__fields__["type"] = ModelField( - name="type", - type_=Literal[component.type], # type: ignore[reportGeneralTypeIssues] - required=False, + component.model_fields["type"] = FieldInfo( + annotation=Literal[component.type], # type:ignore[valid-type] default=component.type, - final=True, - field_info=FieldInfo( - title="Component type", - description=describe_object(component.__doc__), + exclude=True, + ) + core_schema: DefinitionsSchema = component.__pydantic_core_schema__ # pyright:ignore[reportGeneralTypeIssues] + model_schema: ModelFieldsSchema = core_schema["schema"]["schema"] # pyright:ignore[reportGeneralTypeIssues,reportTypedDictNotRequiredAccess] + model_schema["fields"]["type"] = ModelField( + type="model-field", + schema=LiteralSchema( + type="literal", + expected=[component.type], + metadata={ + "pydantic.internal.needs_apply_discriminated_union": False, + "pydantic_js_annotation_functions": [ + SkipJsonSchema().__get_pydantic_json_schema__ # pyright:ignore[reportGeneralTypeIssues] + ], + }, ), - model_config=BaseConfig, - class_validators=None, ) + PipelineComponents = Union[components] # type: ignore[valid-type] AnnotatedPipelineComponents = Annotated[ PipelineComponents, Field(discriminator="type") ] - schema = schema_json_of( - Sequence[AnnotatedPipelineComponents], - title="KPOps pipeline schema", - by_alias=True, - indent=4, - sort_keys=True, - ) - print(schema) + class PipelineSchema(RootModel): + root: Sequence[ + AnnotatedPipelineComponents # pyright:ignore[reportGeneralTypeIssues] + ] + + schema = PipelineSchema.model_json_schema(by_alias=True) + print(json.dumps(schema, indent=4, sort_keys=True)) def gen_config_schema() -> None: """Generate a json schema from the model of pipeline config.""" - schema = schema_json_of( - KpopsConfig, title="KPOps config schema", indent=4, sort_keys=True - ) - print(schema) + schema = model_json_schema(KpopsConfig) + print(json.dumps(schema, indent=4, sort_keys=True)) diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index 2eb0fa641..3ac64d82d 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -1,9 +1,15 @@ +from pathlib import Path from typing import Any import humps -from pydantic import BaseConfig, BaseModel +from pydantic import BaseModel, ConfigDict, Field +from pydantic.alias_generators import to_snake +from pydantic.fields import FieldInfo +from pydantic_settings import PydanticBaseSettingsSource +from typing_extensions import TypeVar, override from kpops.utils.docstring import describe_object +from kpops.utils.yaml_loading import load_yaml_file def to_camel(s: str) -> str: @@ -21,12 +27,127 @@ def to_dot(s: str) -> str: return s.replace("_", ".") -class CamelCaseConfig(BaseConfig): - alias_generator = to_camel - allow_population_by_field_name = True +def by_alias(model: BaseModel, field_name: str) -> str: + """Return field alias if exists else field name. + :param field_name: Name of the field to get alias of + :param model: Model that owns the field + """ + return model.model_fields.get(field_name, Field()).alias or field_name -class DescConfig(BaseConfig): - @classmethod - def schema_extra(cls, schema: dict[str, Any], model: type[BaseModel]) -> None: + +_V = TypeVar("_V") + + +def exclude_by_value( + dumped_model: dict[str, _V], *excluded_values: Any +) -> dict[str, _V]: + """Strip all key-value pairs with certain values. + + :param dumped_model: Dumped model + :param excluded_values: Excluded field values + :return: Dumped model without excluded fields + """ + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_value not in excluded_values + } + + +def exclude_by_name( + dumped_model: dict[str, _V], *excluded_fields: str +) -> dict[str, _V]: + """Strip all key-value pairs with certain field names. + + :param dumped_model: Dumped model + :param excluded_fields: Excluded field names + :return: Dumped model without excluded fields + """ + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_name not in excluded_fields + } + + +def exclude_defaults(model: BaseModel, dumped_model: dict[str, _V]) -> dict[str, _V]: + """Strip all key-value pairs with default values. + + :param model: Model + :param dumped_model: Dumped model + :return: Dumped model without defaults + """ + default_fields = { + field_name: field_info.default + for field_name, field_info in model.model_fields.items() + } + return { + field_name: field_value + for field_name, field_value in dumped_model.items() + if field_value + not in ( + default_fields.get(field_name), + default_fields.get(to_snake(field_name)), + ) + } + + +class CamelCaseConfigModel(BaseModel): + model_config = ConfigDict( + alias_generator=to_camel, + populate_by_name=True, + ) + + +class DescConfigModel(BaseModel): + @staticmethod + def json_schema_extra(schema: dict[str, Any], model: type[BaseModel]) -> None: schema["description"] = describe_object(model.__doc__) + + model_config = ConfigDict(json_schema_extra=json_schema_extra) + + +class YamlConfigSettingsSource(PydanticBaseSettingsSource): + """Loads variables from a YAML file at the project's root.""" + + path_to_config = Path("config.yaml") + + @override + def get_field_value( + self, + field: FieldInfo, + field_name: str, + ) -> tuple[Any, str, bool]: + if self.path_to_config.exists() and isinstance( + (file_content_yaml := load_yaml_file(self.path_to_config)), dict + ): + field_value = file_content_yaml.get(field_name) + return field_value, field_name, False + return None, field_name, False + + @override + def prepare_field_value( + self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool + ) -> Any: + return value + + @override + def __call__(self) -> dict[str, Any]: + d: dict[str, Any] = {} + + for field_name, field in self.settings_cls.model_fields.items(): + field_value, field_key, value_is_complex = self.get_field_value( + field, + field_name, + ) + field_value = self.prepare_field_value( + field_name, + field, + field_value, + value_is_complex, + ) + if field_value is not None: + d[field_key] = field_value + + return d diff --git a/poetry.lock b/poetry.lock index 05591986a..b9593a550 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "aiofiles" @@ -11,6 +11,17 @@ files = [ {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, ] +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + [[package]] name = "anyio" version = "3.6.2" @@ -816,56 +827,154 @@ virtualenv = ">=20.0.8" [[package]] name = "pydantic" -version = "1.10.8" -description = "Data validation and settings management using python type hints" +version = "2.5.2" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [package.dependencies] -python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.14.5" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.0.3" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_settings-2.0.3-py3-none-any.whl", hash = "sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625"}, + {file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"}, +] + +[package.dependencies] +pydantic = ">=2.0.1" +python-dotenv = ">=0.21.0" [[package]] name = "pygments" @@ -1377,28 +1486,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruff" -version = "0.1.3" -description = "An extremely fast Python linter, written in Rust." +version = "0.1.7" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.3-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b46d43d51f7061652eeadb426a9e3caa1e0002470229ab2fc19de8a7b0766901"}, - {file = "ruff-0.1.3-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b8afeb9abd26b4029c72adc9921b8363374f4e7edb78385ffaa80278313a15f9"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca3cf365bf32e9ba7e6db3f48a4d3e2c446cd19ebee04f05338bc3910114528b"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4874c165f96c14a00590dcc727a04dca0cfd110334c24b039458c06cf78a672e"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec2dd31eed114e48ea42dbffc443e9b7221976554a504767ceaee3dd38edeb8"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dc3ec4edb3b73f21b4aa51337e16674c752f1d76a4a543af56d7d04e97769613"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e3de9ed2e39160800281848ff4670e1698037ca039bda7b9274f849258d26ce"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c595193881922cc0556a90f3af99b1c5681f0c552e7a2a189956141d8666fe8"}, - {file = "ruff-0.1.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f75e670d529aa2288cd00fc0e9b9287603d95e1536d7a7e0cafe00f75e0dd9d"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76dd49f6cd945d82d9d4a9a6622c54a994689d8d7b22fa1322983389b4892e20"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:918b454bc4f8874a616f0d725590277c42949431ceb303950e87fef7a7d94cb3"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8859605e729cd5e53aa38275568dbbdb4fe882d2ea2714c5453b678dca83784"}, - {file = "ruff-0.1.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0b6c55f5ef8d9dd05b230bb6ab80bc4381ecb60ae56db0330f660ea240cb0d4a"}, - {file = "ruff-0.1.3-py3-none-win32.whl", hash = "sha256:3e7afcbdcfbe3399c34e0f6370c30f6e529193c731b885316c5a09c9e4317eef"}, - {file = "ruff-0.1.3-py3-none-win_amd64.whl", hash = "sha256:7a18df6638cec4a5bd75350639b2bb2a2366e01222825562c7346674bdceb7ea"}, - {file = "ruff-0.1.3-py3-none-win_arm64.whl", hash = "sha256:12fd53696c83a194a2db7f9a46337ce06445fb9aa7d25ea6f293cf75b21aca9f"}, - {file = "ruff-0.1.3.tar.gz", hash = "sha256:3ba6145369a151401d5db79f0a47d50e470384d0d89d0d6f7fab0b589ad07c34"}, + {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7f80496854fdc65b6659c271d2c26e90d4d401e6a4a31908e7e334fab4645aac"}, + {file = "ruff-0.1.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1ea109bdb23c2a4413f397ebd8ac32cb498bee234d4191ae1a310af760e5d287"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0c2de9dd9daf5e07624c24add25c3a490dbf74b0e9bca4145c632457b3b42a"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:69a4bed13bc1d5dabf3902522b5a2aadfebe28226c6269694283c3b0cecb45fd"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de02ca331f2143195a712983a57137c5ec0f10acc4aa81f7c1f86519e52b92a1"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45b38c3f8788a65e6a2cab02e0f7adfa88872696839d9882c13b7e2f35d64c5f"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c64cb67b2025b1ac6d58e5ffca8f7b3f7fd921f35e78198411237e4f0db8e73"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dcc6bb2f4df59cb5b4b40ff14be7d57012179d69c6565c1da0d1f013d29951b"}, + {file = "ruff-0.1.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2bb4bb6bbe921f6b4f5b6fdd8d8468c940731cb9406f274ae8c5ed7a78c478"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:276a89bcb149b3d8c1b11d91aa81898fe698900ed553a08129b38d9d6570e717"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:90c958fe950735041f1c80d21b42184f1072cc3975d05e736e8d66fc377119ea"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b05e3b123f93bb4146a761b7a7d57af8cb7384ccb2502d29d736eaade0db519"}, + {file = "ruff-0.1.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:290ecab680dce94affebefe0bbca2322a6277e83d4f29234627e0f8f6b4fa9ce"}, + {file = "ruff-0.1.7-py3-none-win32.whl", hash = "sha256:416dfd0bd45d1a2baa3b1b07b1b9758e7d993c256d3e51dc6e03a5e7901c7d80"}, + {file = "ruff-0.1.7-py3-none-win_amd64.whl", hash = "sha256:4af95fd1d3b001fc41325064336db36e3d27d2004cdb6d21fd617d45a172dd96"}, + {file = "ruff-0.1.7-py3-none-win_arm64.whl", hash = "sha256:0683b7bfbb95e6df3c7c04fe9d78f631f8e8ba4868dfc932d43d690698057e2e"}, + {file = "ruff-0.1.7.tar.gz", hash = "sha256:dffd699d07abf54833e5f6cc50b85a6ff043715da8788c4a79bcd4ab4734d306"}, ] [[package]] @@ -1613,13 +1722,13 @@ typer = ">=0.4.0,<=0.7.0" [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] @@ -1761,4 +1870,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "cfa6447fe58b9cffa8247c4d2e09fa988bbb7484a2a728e432a63c05872e3bd8" +content-hash = "2dac8180567353aea454a8d6f9dc5f6fcddce9d6c6ec9026c23fe31627385635" diff --git a/pyproject.toml b/pyproject.toml index 8e749efe0..d4096b732 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,8 @@ kpops = "kpops.cli.main:app" [tool.poetry.dependencies] python = "^3.10" -pydantic = { extras = ["dotenv"], version = "^1.10.8" } +pydantic = { extras = ["dotenv"], version = "^2.5.2" } +pydantic-settings = "^2.0.3" rich = "^12.4.4" PyYAML = "^6.0" typer = { extras = ["all"], version = "^0.6.1" } @@ -45,7 +46,7 @@ pytest-mock = "^3.10.0" pytest-timeout = "^2.1.0" snapshottest = "^0.6.0" pre-commit = "^2.19.0" -ruff = "^0.1.3" +ruff = "^0.1.7" typer-cli = "^0.0.13" pyright = "^1.1.314" pytest-rerunfailures = "^11.1.2" diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index 2dd92b512..f8f75d870 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -8,16 +8,21 @@ snapshots = Snapshot() snapshots['TestGenSchema.test_gen_pipeline_schema_only_custom_module test-schema-generation'] = '''{ - "definitions": { + "$defs": { "EmptyPipelineComponent": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -33,21 +38,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "empty-pipeline-component", - "enum": [ - "empty-pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -62,7 +62,7 @@ "properties": { "components": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Components to read from", @@ -71,7 +71,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/FromTopic" + "$ref": "#/$defs/FromTopic" }, "default": {}, "description": "Input topics", @@ -87,16 +87,28 @@ "description": "Input topic.", "properties": { "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to a topic; define only if `type` is `pattern` or `None`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/InputTopicTypes" + }, { - "$ref": "#/definitions/InputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type" } }, @@ -122,14 +134,19 @@ "type": "string" }, "SubPipelineComponent": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -145,21 +162,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component", - "enum": [ - "sub-pipeline-component" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -169,14 +181,19 @@ "type": "object" }, "SubPipelineComponentCorrect": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -192,21 +209,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-correct", - "enum": [ - "sub-pipeline-component-correct" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -216,6 +228,7 @@ "type": "object" }, "SubPipelineComponentCorrectDocstr": { + "additionalProperties": true, "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", "properties": { "example_attr": { @@ -224,11 +237,15 @@ "type": "string" }, "from": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/FromSection" + "$ref": "#/$defs/FromSection" + }, + { + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -244,22 +261,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/ToSection" + }, { - "$ref": "#/definitions/ToSection" + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-correct-docstr", - "description": "Newline before title is removed.\\nSummarry is correctly imported. All whitespaces are removed and replaced with a single space. The description extraction terminates at the correct place, deletes 1 trailing coma", - "enum": [ - "sub-pipeline-component-correct-docstr" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -270,14 +281,19 @@ "type": "object" }, "SubPipelineComponentNoSchemaTypeNoType": { + "additionalProperties": true, "description": "", "properties": { "from": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/FromSection" + }, { - "$ref": "#/definitions/FromSection" + "type": "null" } ], + "default": null, "description": "Topic(s) and/or components from which the component will read input", "title": "From" }, @@ -293,21 +309,16 @@ "type": "string" }, "to": { - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/ToSection" + "$ref": "#/$defs/ToSection" + }, + { + "type": "null" } ], - "description": "Topic(s) into which the component will write output", - "title": "To" - }, - "type": { - "default": "sub-pipeline-component-no-schema-type-no-type", - "enum": [ - "sub-pipeline-component-no-schema-type-no-type" - ], - "title": "Component type", - "type": "string" + "default": null, + "description": "Topic(s) into which the component will write output" } }, "required": [ @@ -317,6 +328,7 @@ "type": "object" }, "ToSection": { + "additionalProperties": false, "description": "Holds multiple output topics.", "properties": { "models": { @@ -330,7 +342,7 @@ }, "topics": { "additionalProperties": { - "$ref": "#/definitions/TopicConfig" + "$ref": "#/$defs/TopicConfig" }, "default": {}, "description": "Output topics", @@ -362,38 +374,82 @@ "type": "object" }, "key_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Key schema class name", - "title": "Key schema", - "type": "string" + "title": "Key schema" }, "partitions_count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Number of partitions into which the topic is divided", - "title": "Partitions count", - "type": "integer" + "title": "Partitions count" }, "replication_factor": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null, "description": "Replication factor of the topic", - "title": "Replication factor", - "type": "integer" + "title": "Replication factor" }, "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Custom identifier belonging to one or multiple topics, provide only if `type` is `extra`", - "title": "Role", - "type": "string" + "title": "Role" }, "type": { - "allOf": [ + "anyOf": [ + { + "$ref": "#/$defs/OutputTopicTypes" + }, { - "$ref": "#/definitions/OutputTopicTypes" + "type": "null" } ], + "default": null, "description": "Topic type", "title": "Topic type" }, "value_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null, "description": "Value schema class name", - "title": "Value schema", - "type": "string" + "title": "Value schema" } }, "title": "TopicConfig", @@ -403,33 +459,33 @@ "items": { "discriminator": { "mapping": { - "empty-pipeline-component": "#/definitions/EmptyPipelineComponent", - "sub-pipeline-component": "#/definitions/SubPipelineComponent", - "sub-pipeline-component-correct": "#/definitions/SubPipelineComponentCorrect", - "sub-pipeline-component-correct-docstr": "#/definitions/SubPipelineComponentCorrectDocstr", - "sub-pipeline-component-no-schema-type-no-type": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + "empty-pipeline-component": "#/$defs/EmptyPipelineComponent", + "sub-pipeline-component": "#/$defs/SubPipelineComponent", + "sub-pipeline-component-correct": "#/$defs/SubPipelineComponentCorrect", + "sub-pipeline-component-correct-docstr": "#/$defs/SubPipelineComponentCorrectDocstr", + "sub-pipeline-component-no-schema-type-no-type": "#/$defs/SubPipelineComponentNoSchemaTypeNoType" }, "propertyName": "type" }, "oneOf": [ { - "$ref": "#/definitions/EmptyPipelineComponent" + "$ref": "#/$defs/EmptyPipelineComponent" }, { - "$ref": "#/definitions/SubPipelineComponent" + "$ref": "#/$defs/SubPipelineComponent" }, { - "$ref": "#/definitions/SubPipelineComponentCorrect" + "$ref": "#/$defs/SubPipelineComponentCorrect" }, { - "$ref": "#/definitions/SubPipelineComponentCorrectDocstr" + "$ref": "#/$defs/SubPipelineComponentCorrectDocstr" }, { - "$ref": "#/definitions/SubPipelineComponentNoSchemaTypeNoType" + "$ref": "#/$defs/SubPipelineComponentNoSchemaTypeNoType" } ] }, - "title": "KPOps pipeline schema", + "title": "PipelineSchema", "type": "array" } ''' diff --git a/tests/cli/test_kpops_config.py b/tests/cli/test_kpops_config.py index 33db1560a..717a67e46 100644 --- a/tests/cli/test_kpops_config.py +++ b/tests/cli/test_kpops_config.py @@ -1,7 +1,7 @@ from pathlib import Path import pytest -from pydantic import AnyHttpUrl, ValidationError, parse_obj_as +from pydantic import AnyHttpUrl, TypeAdapter, ValidationError from kpops.config import ( KafkaConnectConfig, @@ -27,9 +27,9 @@ def test_kpops_config_with_default_values(): == "${pipeline_name}-${component_name}-error" ) assert default_config.schema_registry.enabled is False - assert default_config.schema_registry.url == "http://localhost:8081" - assert default_config.kafka_rest.url == "http://localhost:8082" - assert default_config.kafka_connect.url == "http://localhost:8083" + assert default_config.schema_registry.url == AnyHttpUrl("http://localhost:8081") + assert default_config.kafka_rest.url == AnyHttpUrl("http://localhost:8082") + assert default_config.kafka_connect.url == AnyHttpUrl("http://localhost:8083") assert default_config.timeout == 300 assert default_config.create_namespace is False assert default_config.helm_config.context is None @@ -45,7 +45,7 @@ def test_kpops_config_with_different_invalid_urls(): environment="development", kafka_brokers="http://broker:9092", kafka_connect=KafkaConnectConfig( - url=parse_obj_as(AnyHttpUrl, "invalid-host") + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") ), ) @@ -53,7 +53,9 @@ def test_kpops_config_with_different_invalid_urls(): KpopsConfig( environment="development", kafka_brokers="http://broker:9092", - kafka_rest=KafkaRestConfig(url=parse_obj_as(AnyHttpUrl, "invalid-host")), + kafka_rest=KafkaRestConfig( + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host") + ), ) with pytest.raises(ValidationError): @@ -62,6 +64,6 @@ def test_kpops_config_with_different_invalid_urls(): kafka_brokers="http://broker:9092", schema_registry=SchemaRegistryConfig( enabled=True, - url=parse_obj_as(AnyHttpUrl, "invalid-host"), + url=TypeAdapter(AnyHttpUrl).validate_python("invalid-host"), ), ) diff --git a/tests/cli/test_schema_generation.py b/tests/cli/test_schema_generation.py index cbb855d14..d860a0b9c 100644 --- a/tests/cli/test_schema_generation.py +++ b/tests/cli/test_schema_generation.py @@ -26,7 +26,7 @@ # type is inherited from PipelineComponent class EmptyPipelineComponent(PipelineComponent): class Config: - anystr_strip_whitespace = True + str_strip_whitespace = True # abstract component inheriting from ABC should be excluded diff --git a/tests/component_handlers/kafka_connect/test_connect_wrapper.py b/tests/component_handlers/kafka_connect/test_connect_wrapper.py index 159b33359..1c38a8b1a 100644 --- a/tests/component_handlers/kafka_connect/test_connect_wrapper.py +++ b/tests/component_handlers/kafka_connect/test_connect_wrapper.py @@ -65,7 +65,7 @@ def test_should_create_post_requests_for_given_connector_configuration( headers=HEADERS, json={ "name": "test-connector", - "config": KafkaConnectorConfig(**configs).dict(), + "config": KafkaConnectorConfig(**configs).model_dump(), }, ) @@ -235,7 +235,7 @@ def test_should_create_correct_update_connector_request(self, mock_put: MagicMoc mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connectors/{connector_name}/config", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**configs).dict(), + json=KafkaConnectorConfig(**configs).model_dump(), ) @patch("kpops.component_handlers.kafka_connect.connect_wrapper.log.info") @@ -455,7 +455,7 @@ def test_should_create_correct_validate_connector_config_request( mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connector-plugins/FileStreamSinkConnector/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=connector_config.dict(), + json=connector_config.model_dump(), ) @patch("httpx.put") @@ -477,7 +477,9 @@ def test_should_create_correct_validate_connector_config_and_name_gets_added( mock_put.assert_called_with( url=f"{DEFAULT_HOST}/connector-plugins/{connector_name}/config/validate", headers={"Accept": "application/json", "Content-Type": "application/json"}, - json=KafkaConnectorConfig(**{"name": connector_name, **configs}).dict(), + json=KafkaConnectorConfig( + **{"name": connector_name, **configs} + ).model_dump(), ) def test_should_parse_validate_connector_config(self, httpx_mock: HTTPXMock): diff --git a/tests/component_handlers/schema_handler/test_schema_handler.py b/tests/component_handlers/schema_handler/test_schema_handler.py index 9f1fe143f..00718f3bd 100644 --- a/tests/component_handlers/schema_handler/test_schema_handler.py +++ b/tests/component_handlers/schema_handler/test_schema_handler.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock import pytest -from pydantic import AnyHttpUrl, BaseModel, parse_obj_as +from pydantic import AnyHttpUrl, BaseModel, TypeAdapter from pytest_mock import MockerFixture from schema_registry.client.schema import AvroSchema from schema_registry.client.utils import SchemaVersion @@ -74,7 +74,8 @@ def kpops_config_with_sr_enabled() -> KpopsConfig: environment="development", kafka_brokers="broker:9092", schema_registry=SchemaRegistryConfig( - enabled=True, url=parse_obj_as(AnyHttpUrl, "http://mock:8081") + enabled=True, + url=TypeAdapter(AnyHttpUrl).validate_python("http://mock:8081"), ), ) @@ -87,7 +88,7 @@ def test_load_schema_handler(kpops_config_with_sr_enabled: KpopsConfig): SchemaHandler, ) - config_disable = kpops_config_with_sr_enabled.copy() + config_disable = kpops_config_with_sr_enabled.model_copy() config_disable.schema_registry = SchemaRegistryConfig(enabled=False) assert ( diff --git a/tests/component_handlers/topic/test_proxy_wrapper.py b/tests/component_handlers/topic/test_proxy_wrapper.py index bbd87bc1e..3cee5f06b 100644 --- a/tests/component_handlers/topic/test_proxy_wrapper.py +++ b/tests/component_handlers/topic/test_proxy_wrapper.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock, patch import pytest +from pydantic import AnyHttpUrl from pytest_httpx import HTTPXMock from pytest_mock import MockerFixture @@ -45,7 +46,7 @@ def _setup(self, httpx_mock: HTTPXMock): json=cluster_response, status_code=200, ) - assert self.proxy_wrapper.url == DEFAULT_HOST + assert self.proxy_wrapper.url == AnyHttpUrl(DEFAULT_HOST) assert self.proxy_wrapper.cluster_id == "cluster-1" @patch("httpx.post") diff --git a/tests/components/test_kafka_sink_connector.py b/tests/components/test_kafka_sink_connector.py index 748eb39a9..25fa67498 100644 --- a/tests/components/test_kafka_sink_connector.py +++ b/tests/components/test_kafka_sink_connector.py @@ -73,7 +73,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics": topic_name} + **{**connector_config.model_dump(), "topics": topic_name} ), namespace="test-namespace", ) @@ -85,7 +85,7 @@ def test_connector_config_parsing( config=config, handlers=handlers, app=KafkaConnectorConfig( - **{**connector_config.dict(), "topics.regex": topic_pattern} + **{**connector_config.model_dump(), "topics.regex": topic_pattern} ), namespace="test-namespace", ) diff --git a/tests/defaults.yaml b/tests/defaults.yaml new file mode 100644 index 000000000..09fd863b3 --- /dev/null +++ b/tests/defaults.yaml @@ -0,0 +1,2 @@ +streams-app: + namespace: "namespace" diff --git a/tests/pipeline/resources/dotenv/.env b/tests/pipeline/resources/dotenv/.env new file mode 100644 index 000000000..9829b1218 --- /dev/null +++ b/tests/pipeline/resources/dotenv/.env @@ -0,0 +1,3 @@ +KPOPS_environment="default" +KPOPS_schema_registry__enabled="true" +KPOPS_schema_registry__url="http://localhost:8081" diff --git a/tests/pipeline/resources/dotenv/config.yaml b/tests/pipeline/resources/dotenv/config.yaml new file mode 100644 index 000000000..196b583f5 --- /dev/null +++ b/tests/pipeline/resources/dotenv/config.yaml @@ -0,0 +1,12 @@ +# environment: development +defaults_path: ../defaults.yaml +topic_name_config: + default_error_topic_name: "${component_name}-dead-letter-topic" + default_output_topic_name: "${component_name}-test-topic" +kafka_brokers: "http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092" +kafka_connect: + url: "http://localhost:8083" +kafka_rest: + url: "http://localhost:8082" +helm_config: + api_version: "2.1.1" diff --git a/tests/pipeline/resources/dotenv/custom.env b/tests/pipeline/resources/dotenv/custom.env new file mode 100644 index 000000000..3e5371e98 --- /dev/null +++ b/tests/pipeline/resources/dotenv/custom.env @@ -0,0 +1,3 @@ +KPOPS_environment="custom" +KPOPS_schema_registry__enabled="false" +KPOPS_schema_registry__url="http://notlocalhost:8081" diff --git a/tests/pipeline/resources/temp-trim-release-name/defaults.yaml b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml new file mode 100644 index 000000000..55754eba1 --- /dev/null +++ b/tests/pipeline/resources/temp-trim-release-name/defaults.yaml @@ -0,0 +1,23 @@ +kubernetes-app: + namespace: example-namespace + +kafka-app: + app: + streams: + brokers: "${kafka_brokers}" + schema_registry_url: "${schema_registry_url}" + version: "2.4.2" + +streams-app: # inherits from kafka-app + app: + streams: + config: + large.message.id.generator: com.bakdata.kafka.MurmurHashIdGenerator + to: + topics: + ${error_topic_name}: + type: error + value_schema: com.bakdata.kafka.DeadLetter + partitions_count: 1 + configs: + cleanup.policy: compact,delete diff --git a/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml new file mode 100644 index 000000000..d61d6c9ba --- /dev/null +++ b/tests/pipeline/resources/temp-trim-release-name/pipeline.yaml @@ -0,0 +1,6 @@ +- type: streams-app + name: in-order-to-have-len-fifty-two-name-should-end--here + app: + streams: + config: + max.poll.records: 100 diff --git a/tests/pipeline/snapshots/snap_test_example.py b/tests/pipeline/snapshots/snap_test_example.py index 2ef44b969..406679c8b 100644 --- a/tests/pipeline/snapshots/snap_test_example.py +++ b/tests/pipeline/snapshots/snap_test_example.py @@ -28,7 +28,7 @@ }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, 'suspend': True }, @@ -79,7 +79,7 @@ }, 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-avro-producer-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' }, 'suspend': True }, @@ -136,7 +136,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-transaction-joiner-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'name': 'transaction-joiner', @@ -198,7 +198,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-fraud-detector-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'name': 'fraud-detector', @@ -265,7 +265,7 @@ ], 'optimizeLeaveGroupBehavior': False, 'outputTopic': 'bakdata-atm-fraud-detection-account-linker-topic', - 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081' + 'schemaRegistryUrl': 'http://k8kafka-cp-schema-registry.kpops.svc.cluster.local:8081/' } }, 'from': { diff --git a/tests/pipeline/snapshots/snap_test_pipeline.py b/tests/pipeline/snapshots/snap_test_pipeline.py index c2e339fbc..d1e6f1776 100644 --- a/tests/pipeline/snapshots/snap_test_pipeline.py +++ b/tests/pipeline/snapshots/snap_test_pipeline.py @@ -25,7 +25,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-custom-config-app1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -67,7 +67,7 @@ 'resources-custom-config-app1' ], 'outputTopic': 'resources-custom-config-app2', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -120,7 +120,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-pipeline-with-inflate-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -187,7 +187,7 @@ 'resources-pipeline-with-inflate-scheduled-producer' ], 'outputTopic': 'resources-pipeline-with-inflate-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -262,7 +262,7 @@ 'resources-pipeline-with-inflate-converter' ], 'outputTopic': 'resources-pipeline-with-inflate-should-inflate', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'should-inflate', @@ -359,7 +359,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'resources-pipeline-with-inflate-should-inflate-should-inflate-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'should-inflate-inflated-streams-app', @@ -413,7 +413,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -506,7 +506,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-first-pipeline-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -573,7 +573,7 @@ 'resources-first-pipeline-scheduled-producer' ], 'outputTopic': 'resources-first-pipeline-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -648,7 +648,7 @@ 'resources-first-pipeline-converter' ], 'outputTopic': 'resources-first-pipeline-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name-a-long-name', @@ -698,7 +698,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'out', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'account-producer', @@ -744,7 +744,7 @@ 'errorTopic': 'resources-no-input-topic-pipeline-app1-error', 'inputPattern': '.*', 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -804,7 +804,7 @@ 'inputTopics': [ 'example-output' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -863,7 +863,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -926,7 +926,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-pipeline-with-envs-input-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'input-producer', @@ -993,7 +993,7 @@ 'resources-pipeline-with-envs-input-producer' ], 'outputTopic': 'resources-pipeline-with-envs-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -1068,7 +1068,7 @@ 'resources-pipeline-with-envs-converter' ], 'outputTopic': 'resources-pipeline-with-envs-filter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'filter', @@ -1127,7 +1127,7 @@ 'brokers': 'http://k8kafka-cp-kafka-headless.kpops.svc.cluster.local:9092', 'extraOutputTopics': { }, - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' }, 'suspend': True }, @@ -1157,7 +1157,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-read-from-component-producer1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'producer1', @@ -1192,7 +1192,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-read-from-component-producer2', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'producer2', @@ -1247,7 +1247,7 @@ 'resources-read-from-component-producer2' ], 'outputTopic': 'resources-read-from-component-inflate-step', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step', @@ -1344,7 +1344,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-inflated-streams-app', @@ -1407,7 +1407,7 @@ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], 'outputTopic': 'resources-read-from-component-inflate-step-without-prefix', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-without-prefix', @@ -1504,7 +1504,7 @@ 'kafka-sink-connector' ], 'outputTopic': 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'inflate-step-without-prefix-inflated-streams-app', @@ -1552,7 +1552,7 @@ 'resources-read-from-component-producer1' ], 'outputTopic': 'resources-read-from-component-consumer1', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1609,7 +1609,7 @@ 'resources-read-from-component-producer1', 'resources-read-from-component-consumer1' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1664,7 +1664,7 @@ 'resources-read-from-component-producer1', 'resources-read-from-component-producer2' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1718,7 +1718,7 @@ 'inputTopics': [ 'resources-read-from-component-inflate-step-inflate-step-inflated-streams-app' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1769,7 +1769,7 @@ 'inputTopics': [ 'inflate-step-without-prefix-inflate-step-without-prefix-inflated-streams-app' ], - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { @@ -1832,7 +1832,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'resources-component-type-substitution-scheduled-producer', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'scheduled-producer', @@ -1899,7 +1899,7 @@ 'resources-component-type-substitution-scheduled-producer' ], 'outputTopic': 'resources-component-type-substitution-converter', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'converter', @@ -1981,7 +1981,7 @@ 'resources-component-type-substitution-converter' ], 'outputTopic': 'resources-component-type-substitution-filter-app', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'filter-app', @@ -2039,7 +2039,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -2081,7 +2081,7 @@ 'app1-test-topic' ], 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -2136,7 +2136,7 @@ 'extraOutputTopics': { }, 'outputTopic': 'app1-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app1', @@ -2178,7 +2178,7 @@ 'app1-test-topic' ], 'outputTopic': 'app2-test-topic', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'name': 'app2', @@ -2231,7 +2231,7 @@ 'example-topic' ], 'outputTopic': 'example-output', - 'schemaRegistryUrl': 'http://localhost:8081' + 'schemaRegistryUrl': 'http://localhost:8081/' } }, 'from': { diff --git a/tests/pipeline/test_components/components.py b/tests/pipeline/test_components/components.py index 86e2c8b8e..84698c0b4 100644 --- a/tests/pipeline/test_components/components.py +++ b/tests/pipeline/test_components/components.py @@ -71,7 +71,7 @@ def inflate(self) -> list[PipelineComponent]: f"{self.full_name}-" + "${component_name}" ): TopicConfig(type=OutputTopicTypes.OUTPUT) } - ).dict(), + ).model_dump(), ) inflate_steps.append(streams_app) diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index d09ae6795..894d88c8d 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -481,6 +481,30 @@ def test_env_vars_precedence_over_config(self, monkeypatch: pytest.MonkeyPatch): == "env_broker" ) + def test_nested_config_env_vars(self, monkeypatch: pytest.MonkeyPatch): + monkeypatch.setenv( + name="KPOPS_SCHEMA_REGISTRY__URL", value="http://somename:1234" + ) + + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--config", + str(RESOURCE_PATH / "custom-config/config.yaml"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["app"]["streams"]["schemaRegistryUrl"] + == "http://somename:1234/" + ) + def test_model_serialization(self, snapshot: SnapshotTest): """Test model serialization of component containing pathlib.Path attribute.""" result = runner.invoke( @@ -501,6 +525,33 @@ def test_model_serialization(self, snapshot: SnapshotTest): enriched_pipeline: dict = yaml.safe_load(result.stdout) snapshot.assert_match(enriched_pipeline, "test-pipeline") + def test_dotenv_support(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "custom-config/pipeline.yaml"), + "--defaults", + str(RESOURCE_PATH), + "--config", + str(RESOURCE_PATH / "dotenv/config.yaml"), + "--dotenv", + str(RESOURCE_PATH / "dotenv/.env"), + "--dotenv", + str(RESOURCE_PATH / "dotenv/custom.env"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][1]["app"]["streams"]["schemaRegistryUrl"] + == "http://notlocalhost:8081/" + ) + def test_short_topic_definition(self): result = runner.invoke( app, @@ -583,3 +634,23 @@ def test_validate_unique_step_names(self): ], catch_exceptions=False, ) + + def test_temp_trim_release_name(self): + result = runner.invoke( + app, + [ + "generate", + "--pipeline-base-dir", + str(PIPELINE_BASE_DIR_PATH), + str(RESOURCE_PATH / "temp-trim-release-name/pipeline.yaml"), + "--defaults", + str(RESOURCE_PATH / "temp-trim-release-name"), + ], + catch_exceptions=False, + ) + assert result.exit_code == 0 + enriched_pipeline: dict = yaml.safe_load(result.stdout) + assert ( + enriched_pipeline["components"][0]["name"] + == "in-order-to-have-len-fifty-two-name-should-end--here" + ) diff --git a/tests/utils/resources/nested_base_settings.py b/tests/utils/resources/nested_base_settings.py index f7f92358a..97e755e71 100644 --- a/tests/utils/resources/nested_base_settings.py +++ b/tests/utils/resources/nested_base_settings.py @@ -1,4 +1,5 @@ -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings class NestedSettings(BaseSettings): @@ -10,5 +11,5 @@ class ParentSettings(BaseSettings): nested_field: NestedSettings = Field(...) field_with_env_defined: str = Field( default=..., - env="FIELD_WITH_ENV_DEFINED", + alias="FIELD_WITH_ENV_DEFINED", ) diff --git a/tests/utils/test_dict_ops.py b/tests/utils/test_dict_ops.py index 1ea410770..224934d87 100644 --- a/tests/utils/test_dict_ops.py +++ b/tests/utils/test_dict_ops.py @@ -70,7 +70,7 @@ class SimpleModel(BaseModel): }, }, problems=99, - ).json() + ).model_dump_json() ) existing_substitution = { "key1": "Everything", diff --git a/tests/utils/test_doc_gen.py b/tests/utils/test_doc_gen.py index d234bd79d..5ad065f2c 100644 --- a/tests/utils/test_doc_gen.py +++ b/tests/utils/test_doc_gen.py @@ -6,25 +6,13 @@ from hooks.gen_docs.gen_docs_env_vars import ( EnvVarAttrs, append_csv_to_dotenv_file, - collect_fields, csv_append_env_var, write_csv_to_md_file, write_title_to_dotenv_file, ) -from tests.utils.resources.nested_base_settings import ParentSettings class TestEnvDocGen: - def test_collect_fields(self): - expected: list[Any] = [ - "not_nested_field", - "attr", - Ellipsis, - Ellipsis, - ] - actual = [field.field_info.default for field in collect_fields(ParentSettings)] - assert actual == expected - @pytest.mark.parametrize( ("var_name", "default_value", "description", "extra_args", "expected_outcome"), [