Skip to content

Commit

Permalink
Improve model output
Browse files Browse the repository at this point in the history
  • Loading branch information
jochemvandooren committed May 29, 2024
1 parent ae5006f commit 4a414a4
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
5 changes: 3 additions & 2 deletions src/dbt_score/formatters/human_readable_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ def model_evaluated(
self, model: Model, results: ModelResultsType, score: Score
) -> None:
"""Callback when a model has been evaluated."""
print(f"Model {self.bold(model.name)}")
print(
f"{score.medal} {self.bold(model.name)} (score: {round(score.score, 1)!s})"
)
for rule, result in results.items():
if result is None:
print(f"{self.indent}{self.label_ok} {rule.source()}")
Expand All @@ -36,7 +38,6 @@ def model_evaluated(
)
else:
print(f"{self.indent}{self.label_error} {rule.source()}: {result!s}")
print(f"Score: {self.bold(str(round(score.score, 1)))} {score.medal}")
print()

def project_evaluated(self, score: Score) -> None:
Expand Down
3 changes: 1 addition & 2 deletions tests/formatters/test_human_readable_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,10 @@ def test_human_readable_formatter_model(
stdout = capsys.readouterr().out
assert (
stdout
== """Model \x1B[1mmodel1\x1B[0m
== """🥇 \x1B[1mmodel1\x1B[0m (score: 10.0)
\x1B[1;32mOK \x1B[0m tests.conftest.rule_severity_low
\x1B[1;31mERR \x1B[0m tests.conftest.rule_severity_medium: Oh noes
\x1B[1;33mWARN\x1B[0m (critical) tests.conftest.rule_severity_critical: Error
Score: \x1B[1m10.0\x1B[0m 🥇
"""
)
Expand Down

0 comments on commit 4a414a4

Please sign in to comment.