Skip to content

Commit

Permalink
fix: misc errors from merge
Browse files Browse the repository at this point in the history
  • Loading branch information
sujuka99 committed Oct 9, 2023
1 parent 22300ad commit 98b103e
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 29 deletions.
34 changes: 15 additions & 19 deletions hooks/gen_docs/gen_docs_env_vars.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
from textwrap import fill
from typing import Any

from pydantic import BaseSettings
from pydantic.fields import ModelField
from pydantic_settings import BaseSettings
from pydantic.fields import FieldInfo

Check failure on line 12 in hooks/gen_docs/gen_docs_env_vars.py

View workflow job for this annotation

GitHub Actions / Test (ubuntu-22.04, 3.10)

[*] Trailing whitespace
from pytablewriter import MarkdownTableWriter
from typer.models import ArgumentInfo, OptionInfo

Expand Down Expand Up @@ -254,33 +254,29 @@ def fill_csv_pipeline_config(target: Path) -> None:
:param target: The path to the `.csv` file. Note that it must already
contain the column names
"""
for field in collect_fields(PipelineConfig):
field_info = PipelineConfig.Config.get_field_info(field.name)
for field_name, field_value in collect_fields(PipelineConfig):
field_description: str = (
field.field_info.description
field_value.description
or "No description available, please refer to the pipeline config documentation."
)
field_default = field.field_info.default
if config_env_var := field_info.get(
"env",
) or field.field_info.extra.get("env"):
csv_append_env_var(
target,
config_env_var,
field_default,
field_description,
field.name,
)
field_default = field_value.default
csv_append_env_var(
target,
field_value.serialization_alias or field_name,
field_default,
field_description,
field_name,
)


def collect_fields(settings: type[BaseSettings]) -> Iterator[ModelField]:
def collect_fields(settings: type[BaseSettings]) -> Iterator[FieldInfo]:
"""Collect and yield all fields in a settings class.
:param model: settings class
:yield: all settings including nested ones in settings classes
"""
for field in settings.__fields__.values():
if issubclass(field_type := field.type_, BaseSettings):
for field in settings.model_fields.values():
if field.annotation and issubclass(field_type := field.annotation, BaseSettings):
yield from collect_fields(field_type)
yield field

Expand Down
10 changes: 5 additions & 5 deletions kpops/components/base_components/kafka_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class KafkaConnector(PipelineComponent, ABC):
default_factory=dict,
description=describe_attr("resetter_values", __doc__),
)
_connector_type: KafkaConnectorType = Field()
connector_type: KafkaConnectorType = Field()
@field_validator("app")
@classmethod
def connector_config_should_have_component_name(
Expand Down Expand Up @@ -190,7 +190,7 @@ def _run_connect_resetter(

log.info(
magentaify(
f"Connector Cleanup: deploy Connect {self._connector_type.value} resetter for {self.full_name}"
f"Connector Cleanup: deploy Connect {self.connector_type.value} resetter for {self.full_name}"
)
)

Expand Down Expand Up @@ -246,7 +246,7 @@ def _get_kafka_connect_resetter_values(
brokers=self.config.brokers,
**kwargs,
),
connector_type=self._connector_type.value,
connector_type=self.connector_type.value,
name_override=self.full_name,
).model_dump(),
**self.resetter_values,
Expand Down Expand Up @@ -278,7 +278,7 @@ class KafkaSourceConnector(KafkaConnector):
description=describe_attr("offset_topic", __doc__),
)

_connector_type = KafkaConnectorType.SOURCE
connector_type: KafkaConnectorType = KafkaConnectorType.SOURCE

@override
def apply_from_inputs(self, name: str, topic: FromTopic) -> NoReturn:
Expand Down Expand Up @@ -323,7 +323,7 @@ def __run_kafka_connect_resetter(self, dry_run: bool) -> None:
class KafkaSinkConnector(KafkaConnector):
"""Kafka sink connector model."""

_connector_type = KafkaConnectorType.SINK
connector_type: KafkaConnectorType = KafkaConnectorType.SINK

@override
def add_input_topics(self, topics: list[str]) -> None:
Expand Down
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions tests/components/test_kafka_sink_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def test_connector_config_parsing(
config=config,
handlers=handlers,
app=KafkaConnectorConfig(
**{**connector_config.dict(), "topics": topic_name}
**{**connector_config.model_dump(), "topics": topic_name}
),
namespace="test-namespace",
)
Expand All @@ -85,7 +85,7 @@ def test_connector_config_parsing(
config=config,
handlers=handlers,
app=KafkaConnectorConfig(
**{**connector_config.dict(), "topics.regex": topic_pattern}
**{**connector_config.model_dump(), "topics.regex": topic_pattern}
),
namespace="test-namespace",
)
Expand Down
5 changes: 3 additions & 2 deletions tests/utils/resources/nested_base_settings.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from pydantic import BaseSettings, Field
from pydantic import Field
from pydantic_settings import BaseSettings


class NestedSettings(BaseSettings):
Expand All @@ -10,5 +11,5 @@ class ParentSettings(BaseSettings):
nested_field: NestedSettings = Field(...)
field_with_env_defined: str = Field(
default=...,
env="FIELD_WITH_ENV_DEFINED",
alias="FIELD_WITH_ENV_DEFINED",
)

0 comments on commit 98b103e

Please sign in to comment.