diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7acebacc4..85a22f5f2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,14 +43,14 @@ repos: - id: remove-tabs exclude: ^docs/make.bat$|^docs/Makefile$|^dev/dags/dbt/jaffle_shop/seeds/raw_orders.csv$ - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 + rev: v3.13.0 hooks: - id: pyupgrade args: - --py37-plus - --keep-runtime-typing - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.288 + rev: v0.0.291 hooks: - id: ruff args: diff --git a/cosmos/airflow/graph.py b/cosmos/airflow/graph.py index d03af5f1f..b1dcb8747 100644 --- a/cosmos/airflow/graph.py +++ b/cosmos/airflow/graph.py @@ -83,7 +83,7 @@ def create_test_task_metadata( def create_task_metadata( - node: DbtNode, execution_mode: ExecutionMode, args: dict[str, Any], use_name_as_task_id_prefix: bool = True + node: DbtNode, execution_mode: ExecutionMode, args: dict[str, Any], use_task_group: bool = False ) -> TaskMetadata | None: """ Create the metadata that will be used to instantiate the Airflow Task used to run the Dbt node. @@ -106,9 +106,9 @@ def create_task_metadata( if hasattr(node.resource_type, "value") and node.resource_type in dbt_resource_to_class: if node.resource_type == DbtResourceType.MODEL: - if use_name_as_task_id_prefix: - task_id = f"{node.name}_run" - else: + task_id = f"{node.name}_run" + + if use_task_group is True: task_id = "run" else: task_id = f"{node.name}_{node.resource_type.value}" @@ -167,14 +167,18 @@ def build_airflow_graph( # The exception are the test nodes, since it would be too slow to run test tasks individually. # If test_behaviour=="after_each", each model task will be bundled with a test task, using TaskGroup for node_id, node in nodes.items(): + use_task_group = ( + node.resource_type == DbtResourceType.MODEL + and test_behavior == TestBehavior.AFTER_EACH + and node.has_test is True + ) + task_meta = create_task_metadata( - node=node, - execution_mode=execution_mode, - args=task_args, - use_name_as_task_id_prefix=test_behavior != TestBehavior.AFTER_EACH, + node=node, execution_mode=execution_mode, args=task_args, use_task_group=use_task_group ) + if task_meta and node.resource_type != DbtResourceType.TEST: - if node.resource_type == DbtResourceType.MODEL and test_behavior == TestBehavior.AFTER_EACH: + if use_task_group is True: with TaskGroup(dag=dag, group_id=node.name, parent_group=task_group) as model_task_group: task = create_airflow_task(task_meta, dag, task_group=model_task_group) test_meta = create_test_task_metadata( diff --git a/cosmos/dbt/graph.py b/cosmos/dbt/graph.py index 81a81efb4..1ad6c1737 100644 --- a/cosmos/dbt/graph.py +++ b/cosmos/dbt/graph.py @@ -50,6 +50,7 @@ class DbtNode: file_path: Path tags: list[str] = field(default_factory=lambda: []) config: dict[str, Any] = field(default_factory=lambda: {}) + has_test: bool = False class DbtGraph: @@ -264,6 +265,8 @@ def load_via_dbt_ls(self) -> None: self.nodes = nodes self.filtered_nodes = nodes + self.update_node_dependency() + logger.info("Total nodes: %i", len(self.nodes)) logger.info("Total filtered nodes: %i", len(self.nodes)) @@ -308,6 +311,8 @@ def load_via_custom_parser(self) -> None: project_dir=self.project.dir, nodes=nodes, select=self.select, exclude=self.exclude ) + self.update_node_dependency() + logger.info("Total nodes: %i", len(self.nodes)) logger.info("Total filtered nodes: %i", len(self.nodes)) @@ -337,11 +342,28 @@ def load_from_dbt_manifest(self) -> None: tags=node_dict["tags"], config=node_dict["config"], ) + nodes[node.unique_id] = node self.nodes = nodes self.filtered_nodes = select_nodes( project_dir=self.project.dir, nodes=nodes, select=self.select, exclude=self.exclude ) + + self.update_node_dependency() + logger.info("Total nodes: %i", len(self.nodes)) logger.info("Total filtered nodes: %i", len(self.nodes)) + + def update_node_dependency(self) -> None: + """ + This will update the property `has_text` if node has `dbt` test + + Updates in-place: + * self.filtered_nodes + """ + for _, node in self.filtered_nodes.items(): + if node.resource_type == DbtResourceType.TEST: + for node_id in node.depends_on: + if node_id in self.filtered_nodes: + self.filtered_nodes[node_id].has_test = True diff --git a/docs/getting_started/dbt-airflow-concepts.rst b/docs/getting_started/dbt-airflow-concepts.rst index 8dfe00582..291cec19c 100644 --- a/docs/getting_started/dbt-airflow-concepts.rst +++ b/docs/getting_started/dbt-airflow-concepts.rst @@ -10,28 +10,18 @@ differences, they also share similar concepts. This page aims to list some of these concepts and help those who may be new to Airflow or dbt and are considering to use Cosmos. +.. table:: + :align: left + :widths: auto -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Airflow naming | dbt naming | Description | Differences | References | -+================+==============+=================================================================================+=============================================================================+======================================================================================+ -| DAG | Workflow | Pipeline (Direct Acyclic Graph) that contains a group of steps | Airflow expects upstream tasks to have passed to run downstream tasks. | https://airflow.apache.org/docs/apache-airflow/2.7.1/core-concepts/dags.html | -| | | | dbt can run a subset of tasks assuming upstream tasks were run. | https://docs.getdbt.com/docs/introduction | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Task | Node | Step within a pipeline (DAG or workflow) | In dbt, these are usually transformations that run on a remote database. | https://docs.getdbt.com/reference/node-selection/syntax | -| | | | In Airflow, steps can be anything, running locally in Airflow or remotely. | https://airflow.apache.org/docs/apache-airflow/2.7.1/core-concepts/tasks.html | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Language | Language | Programming or declarative language used to define pipelines and steps. | In dbt, users write SQL, YML and Python to define the steps of a pipeline. | https://docs.getdbt.com/docs/introduction#dbt-optimizes-your-workflow | -| | | | Airflow expects steps and pipelines are written in Python. | https://airflow.apache.org/docs/apache-airflow/stable/public-airflow-interface.html | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Variables | Variables | Key-value configuration that can be used in steps and avoids hard-coded values | | https://docs.getdbt.com/docs/build/project-variables | -| | | | | https://airflow.apache.org/docs/apache-airflow/2.7.1/core-concepts/variables.html | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Templating | Macros | Jinja templating used to access variables, configuration and reference steps | dbt encourages using jinja templating for control structures (if and for). | https://docs.getdbt.com/docs/build/jinja-macros | -| | | | Native in Airflow/Python, used to define variables, macros and filters. | https://airflow.apache.org/docs/apache-airflow/stable/templates-ref.html | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Connection | Profile | Configuration to connect to databases or other services | | https://airflow.apache.org/docs/apache-airflow/stable/howto/connection.html | -| | | | | https://docs.getdbt.com/docs/core/connect-data-platform/connection-profiles | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ -| Providers | Adapter | Additional Python libraries that support specific databases or services | | https://airflow.apache.org/docs/apache-airflow-providers/ | -| | | | | https://docs.getdbt.com/guides/dbt-ecosystem/adapter-development/1-what-are-adapters | -+----------------+--------------+---------------------------------------------------------------------------------+-----------------------------------------------------------------------------+--------------------------------------------------------------------------------------+ + =================================================================================================== ==================================================================================================== ==================================================================================== ====================================================================================================================================================== + Airflow naming dbt naming Description Differences + =================================================================================================== ==================================================================================================== ==================================================================================== ====================================================================================================================================================== + `DAG `_ `Workflow `_ Pipeline (Direct Acyclic Graph) that contains a group of steps Airflow expects upstream tasks to have passed to run downstream tasks. dbt can run a subset of tasks assuming upstream tasks were run. + `Task `_ `Node `_ Step within a pipeline (DAG or workflow) In dbt, these are usually transformations that run on a remote database. In Airflow, steps can be anything, running locally in Airflow or remotely. + `Language `_ `Language `_ Programming or declarative language used to define pipelines and steps. In dbt, users write SQL, YML and Python to define the steps of a pipeline. Airflow expects steps and pipelines are written in Python. + `Variables `_ `Variables `_ Key-value configuration that can be used in steps and avoids hard-coded values + `Templating `_ `Macros `_ Jinja templating used to access variables, configuration and reference steps dbt encourages using jinja templating for control structures (if and for). Native in Airflow/Python, used to define variables, macros and filters. + `Connection `_ `Profile `_ Configuration to connect to databases or other services + `Providers `_ `Adapter `_ Additional Python libraries that support specific databases or services + =================================================================================================== ==================================================================================================== ==================================================================================== ====================================================================================================================================================== diff --git a/tests/airflow/test_graph.py b/tests/airflow/test_graph.py index 7b539bb5b..bd3777209 100644 --- a/tests/airflow/test_graph.py +++ b/tests/airflow/test_graph.py @@ -36,6 +36,7 @@ file_path=SAMPLE_PROJ_PATH / "gen2/models/parent.sql", tags=["has_child"], config={"materialized": "view"}, + has_test=True, ) test_parent_node = DbtNode( name="test_parent", unique_id="test_parent", resource_type=DbtResourceType.TEST, depends_on=["parent"], file_path="" @@ -49,15 +50,8 @@ tags=["nightly"], config={"materialized": "table"}, ) -test_child_node = DbtNode( - name="test_child", - unique_id="test_child", - resource_type=DbtResourceType.TEST, - depends_on=["child"], - file_path="", -) -sample_nodes_list = [parent_seed, parent_node, test_parent_node, child_node, test_child_node] +sample_nodes_list = [parent_seed, parent_node, test_parent_node, child_node] sample_nodes = {node.unique_id: node for node in sample_nodes_list} @@ -93,21 +87,18 @@ def test_build_airflow_graph_with_after_each(): "seed_parent_seed", "parent.run", "parent.test", - "child.run", - "child.test", + "child_run", ] + assert topological_sort == expected_sort task_groups = dag.task_group_dict - assert len(task_groups) == 2 + assert len(task_groups) == 1 assert task_groups["parent"].upstream_task_ids == {"seed_parent_seed"} assert list(task_groups["parent"].children.keys()) == ["parent.run", "parent.test"] - assert task_groups["child"].upstream_task_ids == {"parent.test"} - assert list(task_groups["child"].children.keys()) == ["child.run", "child.test"] - assert len(dag.leaves) == 1 - assert dag.leaves[0].task_id == "child.test" + assert dag.leaves[0].task_id == "child_run" @pytest.mark.skipif( @@ -231,7 +222,7 @@ def test_create_task_metadata_model(caplog): assert metadata.arguments == {"models": "my_model"} -def test_create_task_metadata_model_use_name_as_task_id_prefix(caplog): +def test_create_task_metadata_model_use_task_group(caplog): child_node = DbtNode( name="my_model", unique_id="my_folder.my_model", @@ -241,14 +232,12 @@ def test_create_task_metadata_model_use_name_as_task_id_prefix(caplog): tags=[], config={}, ) - metadata = create_task_metadata( - child_node, execution_mode=ExecutionMode.LOCAL, args={}, use_name_as_task_id_prefix=False - ) + metadata = create_task_metadata(child_node, execution_mode=ExecutionMode.LOCAL, args={}, use_task_group=True) assert metadata.id == "run" -@pytest.mark.parametrize("use_name_as_task_id_prefix", (None, True, False)) -def test_create_task_metadata_seed(caplog, use_name_as_task_id_prefix): +@pytest.mark.parametrize("use_task_group", (None, True, False)) +def test_create_task_metadata_seed(caplog, use_task_group): sample_node = DbtNode( name="my_seed", unique_id="my_folder.my_seed", @@ -258,14 +247,14 @@ def test_create_task_metadata_seed(caplog, use_name_as_task_id_prefix): tags=[], config={}, ) - if use_name_as_task_id_prefix is None: + if use_task_group is None: metadata = create_task_metadata(sample_node, execution_mode=ExecutionMode.DOCKER, args={}) else: metadata = create_task_metadata( sample_node, execution_mode=ExecutionMode.DOCKER, args={}, - use_name_as_task_id_prefix=use_name_as_task_id_prefix, + use_task_group=use_task_group, ) assert metadata.id == "my_seed_seed" assert metadata.operator_class == "cosmos.operators.docker.DbtSeedDockerOperator" diff --git a/tests/dbt/test_graph.py b/tests/dbt/test_graph.py index 2547a4885..317dce0bb 100644 --- a/tests/dbt/test_graph.py +++ b/tests/dbt/test_graph.py @@ -361,3 +361,32 @@ def test_load_via_load_via_custom_parser(pipeline_name): assert dbt_graph.nodes == dbt_graph.filtered_nodes # the custom parser does not add dbt test nodes assert len(dbt_graph.nodes) == 8 + + +@patch("cosmos.dbt.graph.DbtGraph.update_node_dependency", return_value=None) +def test_update_node_dependency_called(mock_update_node_dependency): + dbt_project = DbtProject(name="jaffle_shop", root_dir=DBT_PROJECTS_ROOT_DIR, manifest_path=SAMPLE_MANIFEST) + dbt_graph = DbtGraph(project=dbt_project) + dbt_graph.load() + + assert mock_update_node_dependency.called + + +def test_update_node_dependency_target_exist(): + dbt_project = DbtProject(name="jaffle_shop", root_dir=DBT_PROJECTS_ROOT_DIR, manifest_path=SAMPLE_MANIFEST) + dbt_graph = DbtGraph(project=dbt_project) + dbt_graph.load() + + for _, nodes in dbt_graph.nodes.items(): + if nodes.resource_type == DbtResourceType.TEST: + for node_id in nodes.depends_on: + assert dbt_graph.nodes[node_id].has_test is True + + +def test_update_node_dependency_test_not_exist(): + dbt_project = DbtProject(name="jaffle_shop", root_dir=DBT_PROJECTS_ROOT_DIR, manifest_path=SAMPLE_MANIFEST) + dbt_graph = DbtGraph(project=dbt_project, exclude=["config.materialized:test"]) + dbt_graph.load_from_dbt_manifest() + + for _, nodes in dbt_graph.filtered_nodes.items(): + assert nodes.has_test is False