Skip to content

Commit

Permalink
Improve Sphinx docs highlighting using RST markup (#443)
Browse files Browse the repository at this point in the history
  • Loading branch information
disrupted authored Feb 1, 2024
1 parent 4a4ab33 commit ca0f278
Show file tree
Hide file tree
Showing 21 changed files with 47 additions and 43 deletions.
2 changes: 1 addition & 1 deletion docs/docs/resources/variables/config_env_vars.env
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ KPOPS_HELM_CONFIG__CONTEXT # No default value, not required
# Run Helm in Debug mode
KPOPS_HELM_CONFIG__DEBUG=False
# helm_config.api_version
# Kubernetes API version used for Capabilities.APIVersions
# Kubernetes API version used for `Capabilities.APIVersions`
KPOPS_HELM_CONFIG__API_VERSION # No default value, not required
# helm_diff_config.ignore
# Set of keys that should not be checked.
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/resources/variables/config_env_vars.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,6 @@ These variables are a lower priority alternative to the settings in `config.yaml
|KPOPS_CREATE_NAMESPACE |False |False |Flag for `helm upgrade --install`. Create the release namespace if not present. |create_namespace |
|KPOPS_HELM_CONFIG__CONTEXT | |False |Name of kubeconfig context (`--kube-context`) |helm_config.context |
|KPOPS_HELM_CONFIG__DEBUG |False |False |Run Helm in Debug mode |helm_config.debug |
|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for Capabilities.APIVersions |helm_config.api_version |
|KPOPS_HELM_CONFIG__API_VERSION | |False |Kubernetes API version used for `Capabilities.APIVersions` |helm_config.api_version |
|KPOPS_HELM_DIFF_CONFIG__IGNORE | |True |Set of keys that should not be checked. |helm_diff_config.ignore |
|KPOPS_RETAIN_CLEAN_JOBS |False |False |Whether to retain clean up jobs in the cluster or uninstall the, after completion. |retain_clean_jobs |
2 changes: 1 addition & 1 deletion docs/docs/schema/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
}
],
"default": null,
"description": "Kubernetes API version used for Capabilities.APIVersions",
"description": "Kubernetes API version used for `Capabilities.APIVersions`",
"title": "API version"
},
"context": {
Expand Down
6 changes: 3 additions & 3 deletions docs/docs/schema/defaults.json
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@
"type": "object"
},
"InputTopicTypes": {
"description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)",
"description": "Input topic types.\n\n- INPUT: input topic\n- PATTERN: extra-topic-pattern or input-topic-pattern",
"enum": [
"input",
"pattern"
Expand Down Expand Up @@ -522,7 +522,7 @@
}
],
"default": null,
"description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running",
"description": "`offset.storage.topic`, more info: https://kafka.apache.org/documentation/#connect_running",
"title": "Offset Topic"
},
"prefix": {
Expand Down Expand Up @@ -676,7 +676,7 @@
"type": "object"
},
"OutputTopicTypes": {
"description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)",
"description": "Types of output topic.\n\n- OUTPUT: output topic\n- ERROR: error topic",
"enum": [
"output",
"error"
Expand Down
6 changes: 3 additions & 3 deletions docs/docs/schema/pipeline.json
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@
"type": "object"
},
"InputTopicTypes": {
"description": "Input topic types.\n\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)",
"description": "Input topic types.\n\n- INPUT: input topic\n- PATTERN: extra-topic-pattern or input-topic-pattern",
"enum": [
"input",
"pattern"
Expand Down Expand Up @@ -357,7 +357,7 @@
}
],
"default": null,
"description": "offset.storage.topic, more info: https://kafka.apache.org/documentation/#connect_running",
"description": "`offset.storage.topic`, more info: https://kafka.apache.org/documentation/#connect_running",
"title": "Offset Topic"
},
"prefix": {
Expand Down Expand Up @@ -413,7 +413,7 @@
"type": "object"
},
"OutputTopicTypes": {
"description": "Types of output topic.\n\nOUTPUT (output topic), ERROR (error topic)",
"description": "Types of output topic.\n\n- OUTPUT: output topic\n- ERROR: error topic",
"enum": [
"output",
"error"
Expand Down
2 changes: 1 addition & 1 deletion kpops/cli/exception.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
class ClassNotFoundError(Exception):
"""Similar to builtin ModuleNotFoundError; class doesn't exist inside module."""
"""Similar to builtin `ModuleNotFoundError`; class doesn't exist inside module."""
2 changes: 1 addition & 1 deletion kpops/component_handlers/helm_wrapper/helm.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ async def uninstall(
release_name: str,
dry_run: bool,
) -> str | None:
"""Prepare and execute the helm uninstall command."""
"""Prepare and execute the `helm uninstall` command."""
command = [
"helm",
"uninstall",
Expand Down
6 changes: 3 additions & 3 deletions kpops/component_handlers/helm_wrapper/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class HelmConfig(DescConfigModel):
:param context: Name of kubeconfig context (`--kube-context`)
:param debug: Run Helm in Debug mode
:param api_version: Kubernetes API version used for Capabilities.APIVersions
:param api_version: Kubernetes API version used for `Capabilities.APIVersions`
"""

context: str | None = Field(
Expand Down Expand Up @@ -168,8 +168,8 @@ def parse_source(source: str) -> str:
"""Parse source path from comment at the beginning of the YAML doc.
:Example:
# Source: chart/templates/serviceaccount.yaml
.. code-block:: yaml
# Source: chart/templates/serviceaccount.yaml
"""
if not source.startswith(HELM_SOURCE_PREFIX):
msg = "Not a valid Helm template source"
Expand Down
2 changes: 1 addition & 1 deletion kpops/component_handlers/helm_wrapper/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def create_helm_release_name(name: str, suffix: str = "") -> str:
Helm has a limit of 53 characters for release names.
If the name exceeds the character limit:
1. trim the string and fetch the first RELEASE_NAME_MAX_LEN - len(suffix) characters.
1. trim the string and fetch the first ``RELEASE_NAME_MAX_LEN - len(suffix)`` characters.
2. replace the last 6 characters with the SHA-1 encoded string (with "-") to avoid collision
3. append the suffix if given
Expand Down
2 changes: 1 addition & 1 deletion kpops/component_handlers/kafka_connect/connect_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ async def update_connector_config(
) -> KafkaConnectResponse:
"""Create or update a connector.
Create a new connector using the given configuration,or update the
Create a new connector using the given configuration, or update the
configuration for an existing connector.
:param connector_config: Configuration parameters for the connector.
:return: Information about the connector after the change has been made.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ async def create_connector(
If the connector exists the config of that connector gets updated.
:param connector_config: The connector config.
:param dry_run: If the connector creation should be run in dry run mode.
:param dry_run: Whether the connector creation should be run in dry run mode.
"""
if dry_run:
await self.__dry_run_connector_creation(connector_config)
Expand All @@ -67,7 +67,7 @@ async def destroy_connector(self, connector_name: str, *, dry_run: bool) -> None
"""Delete a connector resource from the cluster.
:param connector_name: The connector name.
:param dry_run: If the connector deletion should be run in dry run mode.
:param dry_run: Whether the connector deletion should be run in dry run mode.
"""
if dry_run:
await self.__dry_run_connector_deletion(connector_name)
Expand Down
2 changes: 1 addition & 1 deletion kpops/component_handlers/topic/proxy_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ async def get_topic_config(self, topic_name: str) -> TopicConfigResponse:
async def batch_alter_topic_config(
self, topic_name: str, json_body: list[dict]
) -> None:
"""Reset config of given config_name param to the default value on the kafka server.
"""Reset config of given config_name param to the default value on the Kafka server.
API Reference:
https://docs.confluent.io/platform/current/kafka-rest/api.html#post--clusters-cluster_id-topics-topic_name-configs-alter
Expand Down
12 changes: 6 additions & 6 deletions kpops/components/base_components/base_defaults_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
class BaseDefaultsComponent(DescConfigModel, ABC):
"""Base for all components, handles defaults.
Component defaults are usually provided in a yaml file called
Component defaults are usually provided in a YAML file called
`defaults.yaml`. This class ensures that the defaults are read and assigned
correctly to the component.
Expand Down Expand Up @@ -164,15 +164,15 @@ def _validate_custom(self, **kwargs) -> None:


def defaults_from_yaml(path: Path, key: str) -> dict:
"""Read component-specific settings from a defaults yaml file and return @default if not found.
"""Read component-specific settings from a ``defaults*.yaml`` file and return @default if not found.
:param path: Path to defaults yaml file
:param path: Path to ``defaults*.yaml`` file
:param key: Component type
:returns: All defaults set for the given component in the provided yaml
:returns: All defaults set for the given component in the provided YAML
:Example:
kafka_app_defaults = defaults_from_yaml(Path("/path/to/defaults.yaml"), "kafka-app")
.. code-block:: python
kafka_app_defaults = defaults_from_yaml(Path("/path/to/defaults.yaml"), "kafka-app")
"""
content = load_yaml_file(path, substitution=ENV)
if not isinstance(content, dict):
Expand Down
2 changes: 1 addition & 1 deletion kpops/components/base_components/kafka_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ async def clean(self, dry_run: bool) -> None:
class KafkaSourceConnector(KafkaConnector):
"""Kafka source connector model.
:param offset_topic: offset.storage.topic,
:param offset_topic: `offset.storage.topic`,
more info: https://kafka.apache.org/documentation/#connect_running,
defaults to None
"""
Expand Down
5 changes: 3 additions & 2 deletions kpops/components/base_components/models/from_section.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
class InputTopicTypes(str, Enum):
"""Input topic types.
INPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)
- INPUT: input topic
- PATTERN: extra-topic-pattern or input-topic-pattern
"""

INPUT = "input"
Expand All @@ -38,7 +39,7 @@ class FromTopic(DescConfigModel):

@model_validator(mode="after")
def extra_topic_role(self) -> Any:
"""Ensure that cls.role is used correctly, assign type if needed."""
"""Ensure that `cls.role` is used correctly, assign type if needed."""
if self.type == InputTopicTypes.INPUT and self.role:
msg = "Define role only if `type` is `pattern` or `None`"
raise ValueError(msg)
Expand Down
5 changes: 3 additions & 2 deletions kpops/components/base_components/models/to_section.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
class OutputTopicTypes(str, Enum):
"""Types of output topic.
OUTPUT (output topic), ERROR (error topic)
- OUTPUT: output topic
- ERROR: error topic
"""

OUTPUT = "output"
Expand Down Expand Up @@ -66,7 +67,7 @@ class TopicConfig(DescConfigModel):

@model_validator(mode="after")
def extra_topic_role(self) -> Any:
"""Ensure that cls.role is used correctly, assign type if needed."""
"""Ensure that `cls.role` is used correctly, assign type if needed."""
if self.type and self.role:
msg = "Define `role` only if `type` is undefined"
raise ValueError(msg)
Expand Down
2 changes: 1 addition & 1 deletion kpops/components/base_components/pipeline_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def apply_from_inputs(self, name: str, topic: FromTopic) -> None:
self.add_input_topics([name])

def set_output_topics(self) -> None:
"""Put values of config.to into the producer config section of streams bootstrap.
"""Put values of `to` section into the producer config section of streams bootstrap.
Supports extra_output_topics (topics by role) or output_topics.
"""
Expand Down
9 changes: 5 additions & 4 deletions kpops/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def parse(
return self.pipeline

def load_yaml(self, path: Path, environment: str | None) -> Pipeline:
"""Load pipeline definition from yaml.
"""Load pipeline definition from YAML file.
The file is often named ``pipeline.yaml``
Expand Down Expand Up @@ -399,9 +399,10 @@ def set_pipeline_name_env_vars(base_dir: Path, path: Path) -> None:
For example, for a given path ./data/v1/dev/pipeline.yaml the pipeline_name would be
set to data-v1-dev. Then the sub environment variables are set:
pipeline.name_0 = data
pipeline.name_1 = v1
pipeline.name_2 = dev
.. code-block:: python
pipeline.name_0 = data
pipeline.name_1 = v1
pipeline.name_2 = dev
:param base_dir: Base directory to the pipeline files
:param path: Path to pipeline.yaml file
Expand Down
9 changes: 5 additions & 4 deletions kpops/utils/pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,10 @@ def issubclass_patched(
) -> bool:
"""Pydantic breaks ``issubclass``.
``issubclass(set[str], set) # True``
``issubclass(BaseSettings, BaseModel) # True``
``issubclass(set[str], BaseModel) # raises exception``
.. code-block:: python
issubclass(set[str], set) # True
issubclass(BaseSettings, BaseModel) # True
issubclass(set[str], BaseModel) # raises Exception
:param cls: class to check
:base: class(es) to check against, defaults to ``BaseModel``
Expand Down Expand Up @@ -161,7 +162,7 @@ def __init__(self, settings_cls) -> None:

@staticmethod
def load_config(file: Path) -> dict:
"""Load yaml file if it exists.
"""Load YAML file if it exists.
:param file: Path to a ``config*.yaml``
:return: Dict containing the config or empty dict if file doesn't exist
Expand Down
4 changes: 2 additions & 2 deletions kpops/utils/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ def load_yaml_file(


def substitute(input: str, substitution: Mapping[str, Any] | None = None) -> str:
"""Substitute $-placeholders in input using template string.
"""Substitute `$`-placeholders in input using template string.
:param input: The raw input containing $-placeholders
:param input: The raw input containing `$`-placeholders
:param substitution: The key-value mapping containing substitutions
:return: Substituted input string
"""
Expand Down
4 changes: 2 additions & 2 deletions tests/cli/snapshots/snap_test_schema_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@
"type": "object"
},
"InputTopicTypes": {
"description": "Input topic types.\\n\\nINPUT (input topic), PATTERN (extra-topic-pattern or input-topic-pattern)",
"description": "Input topic types.\\n\\n- INPUT: input topic\\n- PATTERN: extra-topic-pattern or input-topic-pattern",
"enum": [
"input",
"pattern"
Expand All @@ -130,7 +130,7 @@
"type": "string"
},
"OutputTopicTypes": {
"description": "Types of output topic.\\n\\nOUTPUT (output topic), ERROR (error topic)",
"description": "Types of output topic.\\n\\n- OUTPUT: output topic\\n- ERROR: error topic",
"enum": [
"output",
"error"
Expand Down

0 comments on commit ca0f278

Please sign in to comment.