From ca0f278953b6b8b8650b6d1857717c979c8582ab Mon Sep 17 00:00:00 2001 From: Salomon Popp Date: Thu, 1 Feb 2024 10:40:39 +0100 Subject: [PATCH] Improve Sphinx docs highlighting using RST markup (#443) --- docs/docs/resources/variables/config_env_vars.env | 2 +- docs/docs/resources/variables/config_env_vars.md | 2 +- docs/docs/schema/config.json | 2 +- docs/docs/schema/defaults.json | 6 +++--- docs/docs/schema/pipeline.json | 6 +++--- kpops/cli/exception.py | 2 +- kpops/component_handlers/helm_wrapper/helm.py | 2 +- kpops/component_handlers/helm_wrapper/model.py | 6 +++--- kpops/component_handlers/helm_wrapper/utils.py | 2 +- .../kafka_connect/connect_wrapper.py | 2 +- .../kafka_connect/kafka_connect_handler.py | 4 ++-- kpops/component_handlers/topic/proxy_wrapper.py | 2 +- .../base_components/base_defaults_component.py | 12 ++++++------ kpops/components/base_components/kafka_connector.py | 2 +- .../base_components/models/from_section.py | 5 +++-- .../components/base_components/models/to_section.py | 5 +++-- .../components/base_components/pipeline_component.py | 2 +- kpops/pipeline.py | 9 +++++---- kpops/utils/pydantic.py | 9 +++++---- kpops/utils/yaml.py | 4 ++-- tests/cli/snapshots/snap_test_schema_generation.py | 4 ++-- 21 files changed, 47 insertions(+), 43 deletions(-) diff --git a/docs/docs/resources/variables/config_env_vars.env b/docs/docs/resources/variables/config_env_vars.env index 42d2dead8..56b99f069 100644 --- a/docs/docs/resources/variables/config_env_vars.env +++ b/docs/docs/resources/variables/config_env_vars.env @@ -56,7 +56,7 @@ KPOPS_HELM_CONFIG__CONTEXT # No default value, not required # Run Helm in Debug mode KPOPS_HELM_CONFIG__DEBUG=False # helm_config.api_version -# Kubernetes API version used for Capabilities.APIVersions +# Kubernetes API version used for `Capabilities.APIVersions` KPOPS_HELM_CONFIG__API_VERSION # No default value, not required # helm_diff_config.ignore # Set of keys that should not be checked. diff --git a/docs/docs/resources/variables/config_env_vars.md b/docs/docs/resources/variables/config_env_vars.md index ef0a7726f..ad9a41e62 100644 --- a/docs/docs/resources/variables/config_env_vars.md +++ b/docs/docs/resources/variables/config_env_vars.md @@ -17,6 +17,6 @@ These variables are a lower priority alternative to the settings in `config.yaml |KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace | |KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context | |KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug | -|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version | +|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for `Capabilities.APIVersions` |helm_config.api_version | |KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore | |KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs | diff --git a/docs/docs/schema/config.json b/docs/docs/schema/config.json index 98056fca0..e3b619096 100644 --- a/docs/docs/schema/config.json +++ b/docs/docs/schema/config.json @@ -13,7 +13,7 @@ } ], "default": null, - "description": "Kubernetes API version used for Capabilities.APIVersions", + "description": "Kubernetes API version used for `Capabilities.APIVersions`", "title": "API version" }, "context": { diff --git a/docs/docs/schema/defaults.json b/docs/docs/schema/defaults.json index aa5db63da..0ac5b0022 100644 --- a/docs/docs/schema/defaults.json +++ b/docs/docs/schema/defaults.json @@ -209,7 +209,7 @@ "type": "object" }, "InputTopicTypes": { - "description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", + "description": "Input topic types.\n\n- INPUT: input topic\n- PATTERN: extra-topic-pattern or input-topic-pattern", "enum": [ "input", "pattern" @@ -522,7 +522,7 @@ } ], "default": null, - "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running", + "description": "`offset.storage.topic`, more info: https://kafka.apache.org/documentation/#connect_running", "title": "Offset Topic" }, "prefix": { @@ -676,7 +676,7 @@ "type": "object" }, "OutputTopicTypes": { - "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)", + "description": "Types of output topic.\n\n- OUTPUT: output topic\n- ERROR: error topic", "enum": [ "output", "error" diff --git a/docs/docs/schema/pipeline.json b/docs/docs/schema/pipeline.json index 186863f62..5f3e9f6d9 100644 --- a/docs/docs/schema/pipeline.json +++ b/docs/docs/schema/pipeline.json @@ -209,7 +209,7 @@ "type": "object" }, "InputTopicTypes": { - "description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", + "description": "Input topic types.\n\n- INPUT: input topic\n- PATTERN: extra-topic-pattern or input-topic-pattern", "enum": [ "input", "pattern" @@ -357,7 +357,7 @@ } ], "default": null, - "description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running", + "description": "`offset.storage.topic`, more info: https://kafka.apache.org/documentation/#connect_running", "title": "Offset Topic" }, "prefix": { @@ -413,7 +413,7 @@ "type": "object" }, "OutputTopicTypes": { - "description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)", + "description": "Types of output topic.\n\n- OUTPUT: output topic\n- ERROR: error topic", "enum": [ "output", "error" diff --git a/kpops/cli/exception.py b/kpops/cli/exception.py index d8f7466bc..e9b0a65de 100644 --- a/kpops/cli/exception.py +++ b/kpops/cli/exception.py @@ -1,2 +1,2 @@ class ClassNotFoundError(Exception): - """Similar to builtin ModuleNotFoundError; class doesn't exist inside module.""" + """Similar to builtin `ModuleNotFoundError`; class doesn't exist inside module.""" diff --git a/kpops/component_handlers/helm_wrapper/helm.py b/kpops/component_handlers/helm_wrapper/helm.py index 2f2c5dcf9..8616937c9 100644 --- a/kpops/component_handlers/helm_wrapper/helm.py +++ b/kpops/component_handlers/helm_wrapper/helm.py @@ -112,7 +112,7 @@ async def uninstall( release_name: str, dry_run: bool, ) -> str | None: - """Prepare and execute the helm uninstall command.""" + """Prepare and execute the `helm uninstall` command.""" command = [ "helm", "uninstall", diff --git a/kpops/component_handlers/helm_wrapper/model.py b/kpops/component_handlers/helm_wrapper/model.py index 8a635983f..8aa0a1374 100644 --- a/kpops/component_handlers/helm_wrapper/model.py +++ b/kpops/component_handlers/helm_wrapper/model.py @@ -83,7 +83,7 @@ class HelmConfig(DescConfigModel): :param context: Name of kubeconfig context (`--kube-context`) :param debug: Run Helm in Debug mode - :param api_version: Kubernetes API version used for Capabilities.APIVersions + :param api_version: Kubernetes API version used for `Capabilities.APIVersions` """ context: str | None = Field( @@ -168,8 +168,8 @@ def parse_source(source: str) -> str: """Parse source path from comment at the beginning of the YAML doc. :Example: - - # Source: chart/templates/serviceaccount.yaml + .. code-block:: yaml + # Source: chart/templates/serviceaccount.yaml """ if not source.startswith(HELM_SOURCE_PREFIX): msg = "Not a valid Helm template source" diff --git a/kpops/component_handlers/helm_wrapper/utils.py b/kpops/component_handlers/helm_wrapper/utils.py index 4b892996f..2a571abff 100644 --- a/kpops/component_handlers/helm_wrapper/utils.py +++ b/kpops/component_handlers/helm_wrapper/utils.py @@ -12,7 +12,7 @@ def create_helm_release_name(name: str, suffix: str = "") -> str: Helm has a limit of 53 characters for release names. If the name exceeds the character limit: - 1. trim the string and fetch the first RELEASE_NAME_MAX_LEN - len(suffix) characters. + 1. trim the string and fetch the first ``RELEASE_NAME_MAX_LEN - len(suffix)`` characters. 2. replace the last 6 characters with the SHA-1 encoded string (with "-") to avoid collision 3. append the suffix if given diff --git a/kpops/component_handlers/kafka_connect/connect_wrapper.py b/kpops/component_handlers/kafka_connect/connect_wrapper.py index 687075c2e..dba4130d5 100644 --- a/kpops/component_handlers/kafka_connect/connect_wrapper.py +++ b/kpops/component_handlers/kafka_connect/connect_wrapper.py @@ -98,7 +98,7 @@ async def update_connector_config( ) -> KafkaConnectResponse: """Create or update a connector. - Create a new connector using the given configuration,or update the + Create a new connector using the given configuration, or update the configuration for an existing connector. :param connector_config: Configuration parameters for the connector. :return: Information about the connector after the change has been made. diff --git a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py index 46ec61a68..d50a656d7 100644 --- a/kpops/component_handlers/kafka_connect/kafka_connect_handler.py +++ b/kpops/component_handlers/kafka_connect/kafka_connect_handler.py @@ -41,7 +41,7 @@ async def create_connector( If the connector exists the config of that connector gets updated. :param connector_config: The connector config. - :param dry_run: If the connector creation should be run in dry run mode. + :param dry_run: Whether the connector creation should be run in dry run mode. """ if dry_run: await self.__dry_run_connector_creation(connector_config) @@ -67,7 +67,7 @@ async def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None """Delete a connector resource from the cluster. :param connector_name: The connector name. - :param dry_run: If the connector deletion should be run in dry run mode. + :param dry_run: Whether the connector deletion should be run in dry run mode. """ if dry_run: await self.__dry_run_connector_deletion(connector_name) diff --git a/kpops/component_handlers/topic/proxy_wrapper.py b/kpops/component_handlers/topic/proxy_wrapper.py index 7bfc4a276..9451966fc 100644 --- a/kpops/component_handlers/topic/proxy_wrapper.py +++ b/kpops/component_handlers/topic/proxy_wrapper.py @@ -165,7 +165,7 @@ async def get_topic_config(self, topic_name: str) -> TopicConfigResponse: async def batch_alter_topic_config( self, topic_name: str, json_body: list[dict] ) -> None: - """Reset config of given config_name param to the default value on the kafka server. + """Reset config of given config_name param to the default value on the Kafka server. API Reference: https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics-topic_name-configs-alter diff --git a/kpops/components/base_components/base_defaults_component.py b/kpops/components/base_components/base_defaults_component.py index f60d8d931..94cf3360d 100644 --- a/kpops/components/base_components/base_defaults_component.py +++ b/kpops/components/base_components/base_defaults_component.py @@ -38,7 +38,7 @@ class BaseDefaultsComponent(DescConfigModel, ABC): """Base for all components, handles defaults. - Component defaults are usually provided in a yaml file called + Component defaults are usually provided in a YAML file called `defaults.yaml`. This class ensures that the defaults are read and assigned correctly to the component. @@ -164,15 +164,15 @@ def _validate_custom(self, **kwargs) -> None: def defaults_from_yaml(path: Path, key: str) -> dict: - """Read component-specific settings from a defaults yaml file and return @default if not found. + """Read component-specific settings from a ``defaults*.yaml`` file and return @default if not found. - :param path: Path to defaults yaml file + :param path: Path to ``defaults*.yaml`` file :param key: Component type - :returns: All defaults set for the given component in the provided yaml + :returns: All defaults set for the given component in the provided YAML :Example: - - kafka_app_defaults = defaults_from_yaml(Path("/path/to/defaults.yaml"), "kafka-app") + .. code-block:: python + kafka_app_defaults = defaults_from_yaml(Path("/path/to/defaults.yaml"), "kafka-app") """ content = load_yaml_file(path, substitution=ENV) if not isinstance(content, dict): diff --git a/kpops/components/base_components/kafka_connector.py b/kpops/components/base_components/kafka_connector.py index fc966d6ca..ba8695314 100644 --- a/kpops/components/base_components/kafka_connector.py +++ b/kpops/components/base_components/kafka_connector.py @@ -206,7 +206,7 @@ async def clean(self, dry_run: bool) -> None: class KafkaSourceConnector(KafkaConnector): """Kafka source connector model. - :param offset_topic: offset.storage.topic, + :param offset_topic: `offset.storage.topic`, more info: https://kafka.apache.org/documentation/#connect_running, defaults to None """ diff --git a/kpops/components/base_components/models/from_section.py b/kpops/components/base_components/models/from_section.py index 5f1dae193..3f2d5badd 100644 --- a/kpops/components/base_components/models/from_section.py +++ b/kpops/components/base_components/models/from_section.py @@ -11,7 +11,8 @@ class InputTopicTypes(str, Enum): """Input topic types. - INPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern) + - INPUT: input topic + - PATTERN: extra-topic-pattern or input-topic-pattern """ INPUT = "input" @@ -38,7 +39,7 @@ class FromTopic(DescConfigModel): @model_validator(mode="after") def extra_topic_role(self) -> Any: - """Ensure that cls.role is used correctly, assign type if needed.""" + """Ensure that `cls.role` is used correctly, assign type if needed.""" if self.type == InputTopicTypes.INPUT and self.role: msg = "Define role only if `type` is `pattern` or `None`" raise ValueError(msg) diff --git a/kpops/components/base_components/models/to_section.py b/kpops/components/base_components/models/to_section.py index 56da461c8..b1b750a7c 100644 --- a/kpops/components/base_components/models/to_section.py +++ b/kpops/components/base_components/models/to_section.py @@ -11,7 +11,8 @@ class OutputTopicTypes(str, Enum): """Types of output topic. - OUTPUT (output topic), ERROR (error topic) + - OUTPUT: output topic + - ERROR: error topic """ OUTPUT = "output" @@ -66,7 +67,7 @@ class TopicConfig(DescConfigModel): @model_validator(mode="after") def extra_topic_role(self) -> Any: - """Ensure that cls.role is used correctly, assign type if needed.""" + """Ensure that `cls.role` is used correctly, assign type if needed.""" if self.type and self.role: msg = "Define `role` only if `type` is undefined" raise ValueError(msg) diff --git a/kpops/components/base_components/pipeline_component.py b/kpops/components/base_components/pipeline_component.py index 07f66571c..44ef61394 100644 --- a/kpops/components/base_components/pipeline_component.py +++ b/kpops/components/base_components/pipeline_component.py @@ -175,7 +175,7 @@ def apply_from_inputs(self, name: str, topic: FromTopic) -> None: self.add_input_topics([name]) def set_output_topics(self) -> None: - """Put values of config.to into the producer config section of streams bootstrap. + """Put values of `to` section into the producer config section of streams bootstrap. Supports extra_output_topics (topics by role) or output_topics. """ diff --git a/kpops/pipeline.py b/kpops/pipeline.py index d997814b3..06de8d222 100644 --- a/kpops/pipeline.py +++ b/kpops/pipeline.py @@ -221,7 +221,7 @@ def parse( return self.pipeline def load_yaml(self, path: Path, environment: str | None) -> Pipeline: - """Load pipeline definition from yaml. + """Load pipeline definition from YAML file. The file is often named ``pipeline.yaml`` @@ -399,9 +399,10 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None: For example, for a given path ./data/v1/dev/pipeline.yaml the pipeline_name would be set to data-v1-dev. Then the sub environment variables are set: - pipeline.name_0 = data - pipeline.name_1 = v1 - pipeline.name_2 = dev + .. code-block:: python + pipeline.name_0 = data + pipeline.name_1 = v1 + pipeline.name_2 = dev :param base_dir: Base directory to the pipeline files :param path: Path to pipeline.yaml file diff --git a/kpops/utils/pydantic.py b/kpops/utils/pydantic.py index b47df8125..fb5d715d3 100644 --- a/kpops/utils/pydantic.py +++ b/kpops/utils/pydantic.py @@ -104,9 +104,10 @@ def issubclass_patched( ) -> bool: """Pydantic breaks ``issubclass``. - ``issubclass(set[str], set) # True`` - ``issubclass(BaseSettings, BaseModel) # True`` - ``issubclass(set[str], BaseModel) # raises exception`` + .. code-block:: python + issubclass(set[str], set) # True + issubclass(BaseSettings, BaseModel) # True + issubclass(set[str], BaseModel) # raises Exception :param cls: class to check :base: class(es) to check against, defaults to ``BaseModel`` @@ -161,7 +162,7 @@ def __init__(self, settings_cls) -> None: @staticmethod def load_config(file: Path) -> dict: - """Load yaml file if it exists. + """Load YAML file if it exists. :param file: Path to a ``config*.yaml`` :return: Dict containing the config or empty dict if file doesn't exist diff --git a/kpops/utils/yaml.py b/kpops/utils/yaml.py index be554cf6c..f0bf534bc 100644 --- a/kpops/utils/yaml.py +++ b/kpops/utils/yaml.py @@ -28,9 +28,9 @@ def load_yaml_file( def substitute(input: str, substitution: Mapping[str, Any] | None = None) -> str: - """Substitute $-placeholders in input using template string. + """Substitute `$`-placeholders in input using template string. - :param input: The raw input containing $-placeholders + :param input: The raw input containing `$`-placeholders :param substitution: The key-value mapping containing substitutions :return: Substituted input string """ diff --git a/tests/cli/snapshots/snap_test_schema_generation.py b/tests/cli/snapshots/snap_test_schema_generation.py index f23e77422..0fc422174 100644 --- a/tests/cli/snapshots/snap_test_schema_generation.py +++ b/tests/cli/snapshots/snap_test_schema_generation.py @@ -121,7 +121,7 @@ "type": "object" }, "InputTopicTypes": { - "description": "Input topic types.\\n\\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)", + "description": "Input topic types.\\n\\n- INPUT: input topic\\n- PATTERN: extra-topic-pattern or input-topic-pattern", "enum": [ "input", "pattern" @@ -130,7 +130,7 @@ "type": "string" }, "OutputTopicTypes": { - "description": "Types of output topic.\\n\\nOUTPUT (output topic), ERROR (error topic)", + "description": "Types of output topic.\\n\\n- OUTPUT: output topic\\n- ERROR: error topic", "enum": [ "output", "error"