Skip to content

Commit

Permalink
Merge branch 'main' into use-returncode-for-loading-dbt-graph
Browse files Browse the repository at this point in the history
  • Loading branch information
tatiana authored Sep 26, 2023
2 parents 4588749 + 6a3ca35 commit d8dd05a
Show file tree
Hide file tree
Showing 6 changed files with 92 additions and 58 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,14 @@ repos:
- id: remove-tabs
exclude: ^docs/make.bat$|^docs/Makefile$|^dev/dags/dbt/jaffle_shop/seeds/raw_orders.csv$
- repo: https://github.com/asottile/pyupgrade
rev: v3.10.1
rev: v3.13.0
hooks:
- id: pyupgrade
args:
- --py37-plus
- --keep-runtime-typing
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.288
rev: v0.0.291
hooks:
- id: ruff
args:
Expand Down
22 changes: 13 additions & 9 deletions cosmos/airflow/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def create_test_task_metadata(


def create_task_metadata(
node: DbtNode, execution_mode: ExecutionMode, args: dict[str, Any], use_name_as_task_id_prefix: bool = True
node: DbtNode, execution_mode: ExecutionMode, args: dict[str, Any], use_task_group: bool = False
) -> TaskMetadata | None:
"""
Create the metadata that will be used to instantiate the Airflow Task used to run the Dbt node.
Expand All @@ -106,9 +106,9 @@ def create_task_metadata(

if hasattr(node.resource_type, "value") and node.resource_type in dbt_resource_to_class:
if node.resource_type == DbtResourceType.MODEL:
if use_name_as_task_id_prefix:
task_id = f"{node.name}_run"
else:
task_id = f"{node.name}_run"

if use_task_group is True:
task_id = "run"
else:
task_id = f"{node.name}_{node.resource_type.value}"
Expand Down Expand Up @@ -167,14 +167,18 @@ def build_airflow_graph(
# The exception are the test nodes, since it would be too slow to run test tasks individually.
# If test_behaviour=="after_each", each model task will be bundled with a test task, using TaskGroup
for node_id, node in nodes.items():
use_task_group = (
node.resource_type == DbtResourceType.MODEL
and test_behavior == TestBehavior.AFTER_EACH
and node.has_test is True
)

task_meta = create_task_metadata(
node=node,
execution_mode=execution_mode,
args=task_args,
use_name_as_task_id_prefix=test_behavior != TestBehavior.AFTER_EACH,
node=node, execution_mode=execution_mode, args=task_args, use_task_group=use_task_group
)

if task_meta and node.resource_type != DbtResourceType.TEST:
if node.resource_type == DbtResourceType.MODEL and test_behavior == TestBehavior.AFTER_EACH:
if use_task_group is True:
with TaskGroup(dag=dag, group_id=node.name, parent_group=task_group) as model_task_group:
task = create_airflow_task(task_meta, dag, task_group=model_task_group)
test_meta = create_test_task_metadata(
Expand Down
22 changes: 22 additions & 0 deletions cosmos/dbt/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class DbtNode:
file_path: Path
tags: list[str] = field(default_factory=lambda: [])
config: dict[str, Any] = field(default_factory=lambda: {})
has_test: bool = False


class DbtGraph:
Expand Down Expand Up @@ -264,6 +265,8 @@ def load_via_dbt_ls(self) -> None:
self.nodes = nodes
self.filtered_nodes = nodes

self.update_node_dependency()

logger.info("Total nodes: %i", len(self.nodes))
logger.info("Total filtered nodes: %i", len(self.nodes))

Expand Down Expand Up @@ -308,6 +311,8 @@ def load_via_custom_parser(self) -> None:
project_dir=self.project.dir, nodes=nodes, select=self.select, exclude=self.exclude
)

self.update_node_dependency()

logger.info("Total nodes: %i", len(self.nodes))
logger.info("Total filtered nodes: %i", len(self.nodes))

Expand Down Expand Up @@ -337,11 +342,28 @@ def load_from_dbt_manifest(self) -> None:
tags=node_dict["tags"],
config=node_dict["config"],
)

nodes[node.unique_id] = node

self.nodes = nodes
self.filtered_nodes = select_nodes(
project_dir=self.project.dir, nodes=nodes, select=self.select, exclude=self.exclude
)

self.update_node_dependency()

logger.info("Total nodes: %i", len(self.nodes))
logger.info("Total filtered nodes: %i", len(self.nodes))

def update_node_dependency(self) -> None:
"""
This will update the property `has_text` if node has `dbt` test
Updates in-place:
* self.filtered_nodes
"""
for _, node in self.filtered_nodes.items():
if node.resource_type == DbtResourceType.TEST:
for node_id in node.depends_on:
if node_id in self.filtered_nodes:
self.filtered_nodes[node_id].has_test = True
38 changes: 14 additions & 24 deletions docs/getting_started/dbt-airflow-concepts.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,28 +10,18 @@ differences, they also share similar concepts.
This page aims to list some of these concepts and help those
who may be new to Airflow or dbt and are considering to use Cosmos.

.. table::
:align: left
:widths: auto

+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Airflow naming | dbt naming | Description | Differences | References |
+================+==============+=================================================================================+=============================================================================+======================================================================================+
| DAG | Workflow | Pipeline (Direct Acyclic Graph) that contains a group of steps | Airflow expects upstream tasks to have passed to run downstream tasks. | https://airflow.apache.org/docs/apache-airflow/2.7.1/core-concepts/dags.html |
| | | | dbt can run a subset of tasks assuming upstream tasks were run. | https://docs.getdbt.com/docs/introduction |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Task | Node | Step within a pipeline (DAG or workflow) | In dbt, these are usually transformations that run on a remote database. | https://docs.getdbt.com/reference/node-selection/syntax |
| | | | In Airflow, steps can be anything, running locally in Airflow or remotely. | https://airflow.apache.org/docs/apache-airflow/2.7.1/core-concepts/tasks.html |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Language | Language | Programming or declarative language used to define pipelines and steps. | In dbt, users write SQL, YML and Python to define the steps of a pipeline. | https://docs.getdbt.com/docs/introduction#dbt-optimizes-your-workflow |
| | | | Airflow expects steps and pipelines are written in Python. | https://airflow.apache.org/docs/apache-airflow/stable/public-airflow-interface.html |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Variables | Variables | Key-value configuration that can be used in steps and avoids hard-coded values | | https://docs.getdbt.com/docs/build/project-variables |
| | | | | https://airflow.apache.org/docs/apache-airflow/2.7.1/core-concepts/variables.html |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Templating | Macros | Jinja templating used to access variables, configuration and reference steps | dbt encourages using jinja templating for control structures (if and for). | https://docs.getdbt.com/docs/build/jinja-macros |
| | | | Native in Airflow/Python, used to define variables, macros and filters. | https://airflow.apache.org/docs/apache-airflow/stable/templates-ref.html |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Connection | Profile | Configuration to connect to databases or other services | | https://airflow.apache.org/docs/apache-airflow/stable/howto/connection.html |
| | | | | https://docs.getdbt.com/docs/core/connect-data-platform/connection-profiles |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
| Providers | Adapter | Additional Python libraries that support specific databases or services | | https://airflow.apache.org/docs/apache-airflow-providers/ |
| | | | | https://docs.getdbt.com/guides/dbt-ecosystem/adapter-development/1-what-are-adapters |
+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+
=================================================================================================== ==================================================================================================== ==================================================================================== ======================================================================================================================================================
Airflow naming dbt naming Description Differences
=================================================================================================== ==================================================================================================== ==================================================================================== ======================================================================================================================================================
`DAG <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/dags.html>`_ `Workflow <https://docs.getdbt.com/docs/introduction>`_ Pipeline (Direct Acyclic Graph) that contains a group of steps Airflow expects upstream tasks to have passed to run downstream tasks. dbt can run a subset of tasks assuming upstream tasks were run.
`Task <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/tasks.html>`_ `Node <https://docs.getdbt.com/reference/node-selection/syntax>`_ Step within a pipeline (DAG or workflow) In dbt, these are usually transformations that run on a remote database. In Airflow, steps can be anything, running locally in Airflow or remotely.
`Language <https://airflow.apache.org/docs/apache-airflow/stable/public-airflow-interface.html>`_ `Language <https://docs.getdbt.com/docs/introduction#dbt-optimizes-your-workflow>`_ Programming or declarative language used to define pipelines and steps. In dbt, users write SQL, YML and Python to define the steps of a pipeline. Airflow expects steps and pipelines are written in Python.
`Variables <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/variables.html>`_ `Variables <https://docs.getdbt.com/docs/build/project-variables>`_ Key-value configuration that can be used in steps and avoids hard-coded values
`Templating <https://airflow.apache.org/docs/apache-airflow/stable/templates-ref.html>`_ `Macros <https://docs.getdbt.com/docs/build/jinja-macros>`_ Jinja templating used to access variables, configuration and reference steps dbt encourages using jinja templating for control structures (if and for). Native in Airflow/Python, used to define variables, macros and filters.
`Connection <https://airflow.apache.org/docs/apache-airflow/stable/howto/connection.html>`_ `Profile <https://docs.getdbt.com/docs/core/connect-data-platform/connection-profiles>`_ Configuration to connect to databases or other services
`Providers <https://airflow.apache.org/docs/apache-airflow-providers/>`_ `Adapter <https://docs.getdbt.com/guides/dbt-ecosystem/adapter-development/1-what-are-adapters>`_ Additional Python libraries that support specific databases or services
=================================================================================================== ==================================================================================================== ==================================================================================== ======================================================================================================================================================
35 changes: 12 additions & 23 deletions tests/airflow/test_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
file_path=SAMPLE_PROJ_PATH / "gen2/models/parent.sql",
tags=["has_child"],
config={"materialized": "view"},
has_test=True,
)
test_parent_node = DbtNode(
name="test_parent", unique_id="test_parent", resource_type=DbtResourceType.TEST, depends_on=["parent"], file_path=""
Expand All @@ -49,15 +50,8 @@
tags=["nightly"],
config={"materialized": "table"},
)
test_child_node = DbtNode(
name="test_child",
unique_id="test_child",
resource_type=DbtResourceType.TEST,
depends_on=["child"],
file_path="",
)

sample_nodes_list = [parent_seed, parent_node, test_parent_node, child_node, test_child_node]
sample_nodes_list = [parent_seed, parent_node, test_parent_node, child_node]
sample_nodes = {node.unique_id: node for node in sample_nodes_list}


Expand Down Expand Up @@ -93,21 +87,18 @@ def test_build_airflow_graph_with_after_each():
"seed_parent_seed",
"parent.run",
"parent.test",
"child.run",
"child.test",
"child_run",
]

assert topological_sort == expected_sort
task_groups = dag.task_group_dict
assert len(task_groups) == 2
assert len(task_groups) == 1

assert task_groups["parent"].upstream_task_ids == {"seed_parent_seed"}
assert list(task_groups["parent"].children.keys()) == ["parent.run", "parent.test"]

assert task_groups["child"].upstream_task_ids == {"parent.test"}
assert list(task_groups["child"].children.keys()) == ["child.run", "child.test"]

assert len(dag.leaves) == 1
assert dag.leaves[0].task_id == "child.test"
assert dag.leaves[0].task_id == "child_run"


@pytest.mark.skipif(
Expand Down Expand Up @@ -231,7 +222,7 @@ def test_create_task_metadata_model(caplog):
assert metadata.arguments == {"models": "my_model"}


def test_create_task_metadata_model_use_name_as_task_id_prefix(caplog):
def test_create_task_metadata_model_use_task_group(caplog):
child_node = DbtNode(
name="my_model",
unique_id="my_folder.my_model",
Expand All @@ -241,14 +232,12 @@ def test_create_task_metadata_model_use_name_as_task_id_prefix(caplog):
tags=[],
config={},
)
metadata = create_task_metadata(
child_node, execution_mode=ExecutionMode.LOCAL, args={}, use_name_as_task_id_prefix=False
)
metadata = create_task_metadata(child_node, execution_mode=ExecutionMode.LOCAL, args={}, use_task_group=True)
assert metadata.id == "run"


@pytest.mark.parametrize("use_name_as_task_id_prefix", (None, True, False))
def test_create_task_metadata_seed(caplog, use_name_as_task_id_prefix):
@pytest.mark.parametrize("use_task_group", (None, True, False))
def test_create_task_metadata_seed(caplog, use_task_group):
sample_node = DbtNode(
name="my_seed",
unique_id="my_folder.my_seed",
Expand All @@ -258,14 +247,14 @@ def test_create_task_metadata_seed(caplog, use_name_as_task_id_prefix):
tags=[],
config={},
)
if use_name_as_task_id_prefix is None:
if use_task_group is None:
metadata = create_task_metadata(sample_node, execution_mode=ExecutionMode.DOCKER, args={})
else:
metadata = create_task_metadata(
sample_node,
execution_mode=ExecutionMode.DOCKER,
args={},
use_name_as_task_id_prefix=use_name_as_task_id_prefix,
use_task_group=use_task_group,
)
assert metadata.id == "my_seed_seed"
assert metadata.operator_class == "cosmos.operators.docker.DbtSeedDockerOperator"
Expand Down
Loading

0 comments on commit d8dd05a

Please sign in to comment.