From 1ede2db01bf5be886d9ece1e718449bbc3899075 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Tue, 13 Jun 2023 10:41:48 -0400 Subject: [PATCH 01/43] Making breadcrumbs clickable --- website/docs/docs/about-setup.md | 2 +- ...t-cloud-features.md => about-dbt-cloud.md} | 5 ++-- .../cloud/dbt-cloud-ide/dbt-cloud-tips.md | 2 +- .../dbt-cloud-ide/develop-in-the-cloud.md | 2 +- .../docs/cloud/manage-access/sso-overview.md | 2 +- website/docs/docs/introduction.md | 2 +- .../run-your-dbt-projects.md | 2 +- website/sidebars.js | 26 +++++++++++-------- website/static/_redirects | 4 +++ 9 files changed, 28 insertions(+), 19 deletions(-) rename website/docs/docs/cloud/about-cloud/{dbt-cloud-features.md => about-dbt-cloud.md} (98%) diff --git a/website/docs/docs/about-setup.md b/website/docs/docs/about-setup.md index 3fb868b8448..c3c0b46ad74 100644 --- a/website/docs/docs/about-setup.md +++ b/website/docs/docs/about-setup.md @@ -11,7 +11,7 @@ dbt compiles and runs your analytics code against your data platform, enabling y **dbt Core** is an open-source command line tool that can be installed locally in your environment, and communication with databases is facilitated through adapters. -If you're not sure which is the right solution for you, read our [What is dbt?](/docs/introduction) and our [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features) articles to help you decide. If you still have questions, don't hesitate to [contact us](https://www.getdbt.com/contact/). +If you're not sure which is the right solution for you, read our [What is dbt?](/docs/introduction) and our [dbt Cloud features](/docs/cloud/about-cloud/about-dbt-cloud) articles to help you decide. If you still have questions, don't hesitate to [contact us](https://www.getdbt.com/contact/). To begin configuring dbt now, select the option that is right for you. diff --git a/website/docs/docs/cloud/about-cloud/dbt-cloud-features.md b/website/docs/docs/cloud/about-cloud/about-dbt-cloud.md similarity index 98% rename from website/docs/docs/cloud/about-cloud/dbt-cloud-features.md rename to website/docs/docs/cloud/about-cloud/about-dbt-cloud.md index f9232a7500b..bed7f0745a9 100644 --- a/website/docs/docs/cloud/about-cloud/dbt-cloud-features.md +++ b/website/docs/docs/cloud/about-cloud/about-dbt-cloud.md @@ -1,6 +1,7 @@ --- -title: "dbt Cloud features" -id: "dbt-cloud-features" +title: "About dbt Cloud" +id: "about-dbt-cloud" +sidebar_label: "About dbt Cloud" hide_table_of_contents: true --- diff --git a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md index 9349f863118..ab7fd5d900f 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md @@ -60,5 +60,5 @@ There are default keyboard shortcuts that can help make development more product ## Related docs - [Quickstart guide](/quickstarts) -- [About dbt Cloud](/docs/cloud/about-cloud/dbt-cloud-features) +- [About dbt Cloud](/docs/cloud/about-cloud/about-dbt-cloud) - [Develop in the Cloud](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) diff --git a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md index e3199984d05..b808b0a3921 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md @@ -133,7 +133,7 @@ The dbt Cloud IDE makes it possible to [build and view](/docs/collaborate/build- ## Related docs -- [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features) +- [dbt Cloud features](/docs/cloud/about-cloud/about-dbt-cloud) - [IDE user interface](/docs/cloud/dbt-cloud-ide/ide-user-interface) - [Version control basics](/docs/collaborate/git/version-control-basics) - [dbt Commands](/reference/dbt-commands) diff --git a/website/docs/docs/cloud/manage-access/sso-overview.md b/website/docs/docs/cloud/manage-access/sso-overview.md index 668be790e58..d4e74ada4c8 100644 --- a/website/docs/docs/cloud/manage-access/sso-overview.md +++ b/website/docs/docs/cloud/manage-access/sso-overview.md @@ -1,5 +1,5 @@ --- -title: "SSO Overview" +title: "Single sign-on (SSO) Overview" id: "sso-overview" --- diff --git a/website/docs/docs/introduction.md b/website/docs/docs/introduction.md index a28e401feb4..656d5305877 100644 --- a/website/docs/docs/introduction.md +++ b/website/docs/docs/introduction.md @@ -35,7 +35,7 @@ You can learn about plans and pricing on [www.getdbt.com](https://www.getdbt.com ### dbt Cloud -dbt Cloud is the fastest and most reliable way to deploy dbt. Develop, test, schedule, and investigate data models all in one web-based UI. Learn more about [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features) and try one of the [dbt Cloud quickstarts](/quickstarts). +dbt Cloud is the fastest and most reliable way to deploy dbt. Develop, test, schedule, and investigate data models all in one web-based UI. Learn more about [dbt Cloud features](/docs/cloud/about-cloud/about-dbt-cloud) and try one of the [dbt Cloud quickstarts](/quickstarts). ### dbt Core diff --git a/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md b/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md index 9bd57e0b280..32b149a2ccb 100644 --- a/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md +++ b/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md @@ -2,7 +2,7 @@ title: "Run your dbt projects" id: "run-your-dbt-projects" --- -You can run your dbt projects with [dbt Cloud](/docs/cloud/about-cloud/dbt-cloud-features) and [dbt Core](https://github.com/dbt-labs/dbt-core). dbt Cloud is a hosted application where you can develop directly from a web browser. dbt Core is an open source project where you can develop from the command line. +You can run your dbt projects with [dbt Cloud](/docs/cloud/about-cloud/about-dbt-cloud) and [dbt Core](https://github.com/dbt-labs/dbt-core). dbt Cloud is a hosted application where you can develop directly from a web browser. dbt Core is an open source project where you can develop from the command line. Among other features, dbt Cloud provides a development environment to help you build, test, run, and [version control](/docs/collaborate/git-version-control) your project faster. It also includes an easier way to share your [dbt project's documentation](/docs/collaborate/build-and-view-your-docs) with your team. These development tasks are directly built into dbt Cloud for an _integrated development environment_ (IDE). Refer to [Develop in the Cloud](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) for more details. diff --git a/website/sidebars.js b/website/sidebars.js index f329bb3bbbd..e682395f2d2 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -7,6 +7,7 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/supported-data-platforms" }, items: [ + "docs/supported-data-platforms", "docs/connect-adapters", "docs/community-adapters", "docs/contribute-core-adapters", @@ -15,8 +16,9 @@ const sidebarSettings = { { type: "category", label: "About dbt Cloud", + link: { type: "doc", id: "docs/cloud/about-cloud/about-dbt-cloud" }, items: [ - "docs/cloud/about-cloud/dbt-cloud-features", + "docs/cloud/about-cloud/about-dbt-cloud", "docs/cloud/about-cloud/architecture", "docs/cloud/about-cloud/tenancy", "docs/cloud/about-cloud/regions-ip-addresses", @@ -32,17 +34,20 @@ const sidebarSettings = { type: "category", label: "Set up dbt", collapsed: true, + link: { type: "doc", id: "docs/about-setup" }, items: [ "docs/about-setup", { type: "category", label: "dbt Cloud", collapsed: true, + link: { type: "doc", id: "docs/cloud/about-cloud-setup" }, items: [ "docs/cloud/about-cloud-setup", { type: "category", label: "Connect data platform", + link: { type: "doc", id: "docs/cloud/connect-data-platform/about-connections" }, items: [ "docs/cloud/connect-data-platform/about-connections", "docs/cloud/connect-data-platform/connect-starburst-trino", @@ -56,13 +61,15 @@ const sidebarSettings = { { type: "category", label: "Manage access", + link: { type: "doc", id: "docs/cloud/manage-access/about-user-access" }, items: [ "docs/cloud/manage-access/about-user-access", - "docs/cloud/manage-access/seats-and-users", { type: "category", - label: "Permissions", + label: "User permissions and licenses", + link: { type: "doc", id: "docs/cloud/manage-access/seats-and-users" }, items: [ + "docs/cloud/manage-access/seats-and-users", "docs/cloud/manage-access/self-service-permissions", "docs/cloud/manage-access/enterprise-permissions", ], @@ -70,7 +77,8 @@ const sidebarSettings = { { type: "category", - label: "Single sign-on", + label: "Single sign-on and Oauth", + link: { type: "doc", id: "docs/cloud/manage-access/sso-overview" }, items: [ "docs/cloud/manage-access/sso-overview", "docs/cloud/manage-access/auth0-migration", @@ -78,16 +86,10 @@ const sidebarSettings = { "docs/cloud/manage-access/set-up-sso-okta", "docs/cloud/manage-access/set-up-sso-google-workspace", "docs/cloud/manage-access/set-up-sso-azure-active-directory", - ], - }, // SSO - { - type: "category", - label: "OAuth with data platforms", - items: [ "docs/cloud/manage-access/set-up-snowflake-oauth", "docs/cloud/manage-access/set-up-bigquery-oauth", ], - }, // oauth + }, // SSO "docs/cloud/manage-access/audit-log", ], }, // Manage access @@ -101,6 +103,7 @@ const sidebarSettings = { { type: "category", label: "Azure DevOps", + link: { type: "doc", id: "docs/cloud/git/connect-azure-devops" }, items: [ "docs/cloud/git/connect-azure-devops", "docs/cloud/git/setup-azure", @@ -115,6 +118,7 @@ const sidebarSettings = { label: "Develop in the IDE", link: { type: "doc", id: "docs/cloud/dbt-cloud-ide/develop-in-the-cloud" }, items: [ + "docs/cloud/dbt-cloud-ide/develop-in-the-cloud", "docs/cloud/dbt-cloud-ide/ide-user-interface", "docs/cloud/dbt-cloud-ide/lint-format", "docs/cloud/dbt-cloud-ide/dbt-cloud-tips", diff --git a/website/static/_redirects b/website/static/_redirects index 3297acc799f..23d07c6e76b 100644 --- a/website/static/_redirects +++ b/website/static/_redirects @@ -1,3 +1,7 @@ +## Breadcrumb name changes + +/docs/cloud/about-cloud/dbt-cloud-features /docs/cloud/about-cloud/about-dbt-cloud 301 + ## quickstarts redirect again /docs/quickstarts/dbt-cloud/bigquery /quickstarts/bigquery 301 From 7b11fefd8eab8ee15140190886cf92de6a04b708 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 15 Jun 2023 14:32:58 -0400 Subject: [PATCH 02/43] Adding pagination to frontmatter for demo --- website/docs/docs/supported-data-platforms.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/website/docs/docs/supported-data-platforms.md b/website/docs/docs/supported-data-platforms.md index 031d4aeb6fe..e9f80e4e2e2 100644 --- a/website/docs/docs/supported-data-platforms.md +++ b/website/docs/docs/supported-data-platforms.md @@ -4,6 +4,8 @@ id: "supported-data-platforms" sidebar_label: "Supported data platforms" description: "Connect dbt to any data platform in dbt Cloud or dbt Core, using a dedicated adapter plugin" hide_table_of_contents: true +pagination_next: "docs/connect-adapters" +pagination_prev: null --- dbt connects to and runs SQL against your database, warehouse, lake, or query engine. These SQL-speaking platforms are collectively referred to as _data platforms_. dbt connects with data platforms by using a dedicated adapter plugin for each. Plugins are built as Python modules that dbt Core discovers if they are installed on your system. Read [What are Adapters](/guides/dbt-ecosystem/adapter-development/1-what-are-adapters) for more info. From 47ba5b916a7e3d5c7ce27a9d85d434abeca0bf8f Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:43:46 -0400 Subject: [PATCH 03/43] Updates to sidebar --- website/sidebars.js | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/website/sidebars.js b/website/sidebars.js index e682395f2d2..31c8173d59d 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -97,7 +97,9 @@ const sidebarSettings = { { type: "category", label: "Configure Git", + link: { type: "doc", id: "docs/cloud/git/import-a-project-by-git-url" }, items: [ + "docs/cloud/git/import-a-project-by-git-url", "docs/cloud/git/connect-github", "docs/cloud/git/connect-gitlab", { @@ -110,7 +112,6 @@ const sidebarSettings = { "docs/cloud/git/authenticate-azure", ], }, - "docs/cloud/git/import-a-project-by-git-url", ], }, // Supported Git providers { @@ -127,12 +128,13 @@ const sidebarSettings = { { type: "category", label: "Secure your tenant", + link: { type: "doc", id: "docs/cloud/secure/ip-restrictions" }, items: [ + "docs/cloud/secure/ip-restrictions", "docs/cloud/secure/about-privatelink", "docs/cloud/secure/snowflake-privatelink", "docs/cloud/secure/redshift-privatelink", "docs/cloud/secure/databricks-privatelink", - "docs/cloud/secure/ip-restrictions", ], }, // PrivateLink ], @@ -143,12 +145,14 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/core/about-core-setup", }, items: [ + "docs/core/about-core-setup", "docs/core/about-the-cli", { type: "category", label: "Install dbt", link: { type: "doc", id: "docs/core/installation", }, items: [ + "docs/core/installation", "docs/core/homebrew-install", "docs/core/pip-install", "docs/core/docker-install", @@ -160,6 +164,7 @@ const sidebarSettings = { label: "Connect data platform", link: { type: "doc", id: "docs/core/connect-data-platform/about-core-connections" }, items: [ + "docs/core/connect-data-platform/about-core-connections", "docs/core/connect-data-platform/profiles.yml", "docs/core/connect-data-platform/connection-profiles", "docs/core/connect-data-platform/bigquery-setup", @@ -213,17 +218,20 @@ const sidebarSettings = { type: "category", label: "Build dbt projects", collapsed: true, + link: { type: "doc", id: "docs/build/projects" }, items: [ "docs/build/projects", { type: "category", label: "Build your DAG", collapsed: true, + link: { type: "doc", id: "docs/build/sources" }, items: [ "docs/build/sources", { type: "category", label: "Models", + link: { type: "doc", id: "docs/build/models" }, items: [ "docs/build/models", "docs/build/sql-models", @@ -241,6 +249,7 @@ const sidebarSettings = { type: "category", label: "Enhance your models", collapsed: true, + link: { type: "doc", id: "docs/build/tests" }, items: [ "docs/build/tests", "docs/build/materializations", @@ -251,6 +260,7 @@ const sidebarSettings = { type: "category", label: "Enhance your code", collapsed: true, + link: { type: "doc", id: "docs/build/jinja-macros" }, items: [ "docs/build/jinja-macros", "docs/build/project-variables", @@ -264,6 +274,7 @@ const sidebarSettings = { type: "category", label: "Organize your outputs", collapsed: true, + link: { type: "doc", id: "docs/build/custom-schemas" }, items: [ "docs/build/custom-schemas", "docs/build/custom-databases", @@ -286,6 +297,7 @@ const sidebarSettings = { label: "Deploy with dbt Cloud", link: { type: "doc", id: "docs/deploy/dbt-cloud-job" }, items: [ + "docs/deploy/dbt-cloud-job", "docs/deploy/artifacts", "docs/deploy/job-scheduler", "docs/deploy/job-settings", @@ -309,6 +321,7 @@ const sidebarSettings = { { type: "category", label: "Environments", + link: { type: "doc", id: "docs/collaborate/environments/environments-in-dbt" }, items: [ "docs/collaborate/environments/environments-in-dbt", "docs/collaborate/environments/dbt-cloud-environments", @@ -318,6 +331,7 @@ const sidebarSettings = { { type: "category", label: "Git version control", + link: { type: "doc", id: "docs/collaborate/git-version-control" }, items: [ "docs/collaborate/git-version-control", "docs/collaborate/git/version-control-basics", @@ -329,6 +343,7 @@ const sidebarSettings = { { type: "category", label: "Document your dbt projects", + link: { type: "doc", id: "docs/collaborate/documentation" }, items: [ "docs/collaborate/documentation", "docs/collaborate/build-and-view-your-docs", @@ -340,6 +355,7 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/collaborate/govern/about-model-governance" }, items: [ + "docs/collaborate/govern/about-model-governance", "docs/collaborate/govern/model-access", "docs/collaborate/govern/model-contracts", "docs/collaborate/govern/model-versions", @@ -351,6 +367,7 @@ const sidebarSettings = { type: "category", label: "Use the dbt Semantic Layer", collapsed: true, + link: { type: "doc", id: "docs/use-dbt-semantic-layer/quickstart-semantic-layer" }, items: [ "docs/use-dbt-semantic-layer/quickstart-semantic-layer", "docs/use-dbt-semantic-layer/dbt-semantic-layer", @@ -362,11 +379,13 @@ const sidebarSettings = { type: "category", label: "dbt Cloud APIs", collapsed: true, + link: { type: "doc", id: "docs/dbt-cloud-apis/overview" }, items: [ "docs/dbt-cloud-apis/overview", { type: "category", label: "Authentication", + link: { type: "doc", id: "docs/dbt-cloud-apis/user-tokens" }, items: [ "docs/dbt-cloud-apis/user-tokens", "docs/dbt-cloud-apis/service-tokens", @@ -378,12 +397,14 @@ const sidebarSettings = { label: "Discovery API", link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-api" }, items: [ + "docs/dbt-cloud-apis/discovery-api", "docs/dbt-cloud-apis/discovery-use-cases-and-examples", "docs/dbt-cloud-apis/project-state", "docs/dbt-cloud-apis/discovery-querying", { type: "category", label: "Schema", + link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-schema-environment" }, items: [ "docs/dbt-cloud-apis/discovery-schema-environment", "docs/dbt-cloud-apis/discovery-schema-model", @@ -409,6 +430,7 @@ const sidebarSettings = { { type: "category", label: "Available dbt versions", + link: { type: "doc", id: "docs/dbt-versions/core" }, items: [ "docs/dbt-versions/core", "docs/dbt-versions/upgrade-core-in-cloud", @@ -416,6 +438,7 @@ const sidebarSettings = { { type: "category", label: "dbt Cloud Release Notes", + link: { type: "doc", id: "docs/dbt-versions/dbt-cloud-release-notes" }, items: [ "docs/dbt-versions/dbt-cloud-release-notes", { From 5ef71b07deb35998212dc5ee84bfb5a60d074b0d Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 20 Jul 2023 10:47:17 -0400 Subject: [PATCH 04/43] Changing collapsible behavior --- website/docusaurus.config.js | 1 + 1 file changed, 1 insertion(+) diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index af285961145..111c3d1b4ae 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -51,6 +51,7 @@ var siteSettings = { docs:{ sidebar: { hideable: true, + autoCollapseCategories: true, }, }, image: "/img/avatar.png", From dfbf23f9ca3940abae88adecd7f7a1a076da33d1 Mon Sep 17 00:00:00 2001 From: gwen windflower Date: Thu, 14 Sep 2023 18:05:32 -0500 Subject: [PATCH 05/43] Spike out mesh guide --- .../how-we-mesh/mesh-1-intro.md | 35 +++++++++++++++++++ .../how-we-mesh/mesh-2-structures.md | 30 ++++++++++++++++ .../how-we-mesh/mesh-3-implementation.md | 29 +++++++++++++++ .../how-we-mesh/mesh-4-conclusion.md | 11 ++++++ 4 files changed, 105 insertions(+) create mode 100644 website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md create mode 100644 website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md create mode 100644 website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md create mode 100644 website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md new file mode 100644 index 00000000000..8ebf96a5070 --- /dev/null +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -0,0 +1,35 @@ +--- +title: "Intro to dbt Mesh" +description: Getting started with dbt Mesh patterns +hoverSnippet: Learn how to get started with dbt Mesh +--- + +## What is dbt Mesh? + +Historically, building data teams has involved two extremes, building a centralized team or using embedded analysts. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts, allowing embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficultly of this model though is managing the compplexity of dependencies, goverance, and workflows between all groups — creating friction in monorepos or complexity and silos in multi-repos. Ideally, you want to teams to be able to work independently, but also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. + +dbt Mesh is not a product, but a pattern, enabled a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: + +- Cross-project references - this is the core feature that enables a mesh structure. `ref`s now work across projects in dbt Cloud-enabled projects on Enterprise plans. +- Governance - dbt Cloud’s new governance features allow you to manage access and permissions across projects. + - Groups - groups allow you to assign models to subsets of models within a project. + - Access - access configs allow you to control who can view and reference models both within and across projects. +- Versioning - building a dbt Mesh involves treating your data models as stable APIs. To achieve this you need mechanisms to version your models and allow graceful adoption and deprecation of models as they evolve. +- Contracts - data contracts set strict expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream consumers. + +## Who is dbt Mesh for? + +dbt Mesh is not for every organization! If you're just starting your dbt journey, don't worry about building a dbt Mesh right away, it increases some meta-complexity around managing your projects that could distract from building initial value in dbt. However, if you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring a dbt Mesh: + +- The number of models in your project is degrading performance and slowing down development. +- Teams have developed separate workflows and need to decouple development. +- Security and governance requirements are increasing and would benefit from increased isolation. + +dbt Cloud is designed to coordinate the features above and simplify the meta-complexities (such as scoped CI and multi-project lineage) to solve for these problems. + +## Learning goals + +- Understand the purpose and tradeoffs of building a dbt Mesh. +- Develop an intuition for various dbt Mesh patterns and how to design a dbt Mesh for your organization. +- Establish recommended steps to incrementally adopt a dbt Mesh pattern in your dbt implementation. +- Offer tooling to help you more quickly and easily implement your dbt Mesh plan. diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md new file mode 100644 index 00000000000..2b57e50f2f4 --- /dev/null +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -0,0 +1,30 @@ +--- +title: "Deciding how to structure your mesh" +description: Getting started with dbt Mesh patterns +hoverSnippet: Learn how to get started with dbt Mesh +--- + +## Exploring mesh patterns + +Building a mesh is not a one-size-fits-all process. In fact, it's the opposite, it's about customizing your project structure to fit _your_ team and _your_ data. Often we've had to fit the data team and project structure into our company's org chart, or manage everything in one project to handle the constraints of our data and warehouse. dbt Mesh allows us to mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. + +## Vertical splits + +Vertical splits are about separating out layers of transformation in the DAG order. For example, splitting up staging and mart layers. Often the vertical separation will be based around security and governance requirements, such as separating out PII data from non-PII data and restricting raw data access to a platform team that's responsible for landing and cleaning data. + +## Horizontal splits + +Horizonal splits are about splitting up the data based on source or domain. Often the horizontal separation will be based around team consumption patterns, such as splitting out marketing data and financial data. Another common vector of horizontal splitting is data from different sources, such as click event data and transactional ecommerce data. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. + +## Combining these divisions + +- These are not either/or techniques, you can and should combine them in any way that makes sense for your organization. + +- **DRY applies to underlying data not just code.** Regardless of your split, you should not be sourcing the same rows and columns into multiple meshes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface error for conflicts and erodes the single source of truth we're trying to create in our dbt project. + +## Monorepo vs multi-repo + +- A dbt Mesh can exist as multiple projects in a single repo (monorepo) or as multiple projects in their own repositories (multi-repo). +- Monorepos are often easier to get started with, but can become unwieldy as the number of models and teams grow. +- If you're a smaller team looking primarily to speed up and simplify development, a monorepo is likely the right choice. +- If you're a larger team with multiple groups, and need to decouple projects for security and enablement of different development styles and rhythms, a multi-repo is your best bet. diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md new file mode 100644 index 00000000000..10d1b1cbaab --- /dev/null +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md @@ -0,0 +1,29 @@ +--- +title: "Implementing your mesh plan" +description: Getting started with dbt Mesh patterns +hoverSnippet: Learn how to get started with dbt Mesh +--- + +## Implementing a dbt Mesh + +Let's examine an outline of steps to start implementing a dbt Mesh in your organization. + +### Research your current structure + +- Look at your selectors to figure out how people are grouping models right now. +- Talk to teams about what sort of separation is naturally existing right now + - Are there various domains people are focused on? + - Are there various sizes, shapes, and sources of data that get handled separately (such as click event data)? + - Are there people focused on separate levels of transformation, such as landing and staging data or building marts? + +## Add groups and access + +Once you have a sense of some initial groupings, implement group and access permissions within a project. + +- Incrementally start building your jobs based on these groups (we would recommend in parallel to your production jobs until you’re sure about them) to feel out that you’ve drawn the lines in the right place. + +## Do the splits + +- When you’ve confirmed the right groups, use `dbt-meshify` to pull chunks out into their own projects. + - Do _one_ group at a time, using the groups as your selectors. + - Do _not_ refactor as you migrate, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully landed the project then you can start optimizing it for its new life as part of the mesh. diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md b/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md new file mode 100644 index 00000000000..50273f109df --- /dev/null +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md @@ -0,0 +1,11 @@ +--- +title: "Conclusion" +description: Getting started with dbt Mesh patterns +hoverSnippet: Learn how to get started with dbt Mesh +--- + +## Conclusion + +dbt Mesh is a powerful new pattern for data transformation. It helps adapt teams and their data towards each other, rather than making arbitrary decisions based on the constraints of either one. By creating alignment between your people and data flows, developers can move faster, analysts can be more productive, and data consumers can be more confident in the data they use. + +You can incrementally adopt the ideas in this guide in your organization as you hit constraints. There's no pressure to adopt this as the _right pattern_ to build with. That said, familiarizing yourself with dbt Mesh concepts and thinking through how they can apply to your organization will help you make better decisions as you grow. We hope this guide has given you a good starting point to do that. From f08b0cd65e436898995ee6112d292f1e9fbd8c0a Mon Sep 17 00:00:00 2001 From: gwen windflower Date: Fri, 15 Sep 2023 11:40:45 -0500 Subject: [PATCH 06/43] Start bullet, bolds, and expansion of mesh guide --- .../how-we-mesh/mesh-1-intro.md | 26 +++++++-------- .../how-we-mesh/mesh-2-structures.md | 13 ++++++-- .../how-we-mesh/mesh-4-conclusion.md | 6 ++-- website/sidebars.js | 33 +++++++++++++++---- 4 files changed, 51 insertions(+), 27 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 8ebf96a5070..611d40a6567 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -10,26 +10,26 @@ Historically, building data teams has involved two extremes, building a centrali dbt Mesh is not a product, but a pattern, enabled a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: -- Cross-project references - this is the core feature that enables a mesh structure. `ref`s now work across projects in dbt Cloud-enabled projects on Enterprise plans. -- Governance - dbt Cloud’s new governance features allow you to manage access and permissions across projects. - - Groups - groups allow you to assign models to subsets of models within a project. - - Access - access configs allow you to control who can view and reference models both within and across projects. -- Versioning - building a dbt Mesh involves treating your data models as stable APIs. To achieve this you need mechanisms to version your models and allow graceful adoption and deprecation of models as they evolve. -- Contracts - data contracts set strict expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream consumers. +- **Cross-project references** - this is the core feature that enables a mesh structure. `ref`s now work across projects in dbt Cloud-enabled projects on Enterprise plans. +- **Governance** - dbt Cloud’s new governance features allow you to manage access and permissions across projects. + - **Groups** - groups allow you to assign models to subsets of models within a project. + - **Access** - access configs allow you to control who can view and reference models both within and across projects. +- **Versioning** - building a dbt Mesh involves treating your data models as stable APIs. To achieve this you need mechanisms to version your models and allow graceful adoption and deprecation of models as they evolve. +- **Contracts** - data contracts set strict expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream consumers. ## Who is dbt Mesh for? dbt Mesh is not for every organization! If you're just starting your dbt journey, don't worry about building a dbt Mesh right away, it increases some meta-complexity around managing your projects that could distract from building initial value in dbt. However, if you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring a dbt Mesh: -- The number of models in your project is degrading performance and slowing down development. -- Teams have developed separate workflows and need to decouple development. -- Security and governance requirements are increasing and would benefit from increased isolation. +- **The number of models** in your project is degrading performance and slowing down development. +- Teams have developed **separate workflows** and need to decouple development. +- **Security and governance** requirements are increasing and would benefit from increased isolation. dbt Cloud is designed to coordinate the features above and simplify the meta-complexities (such as scoped CI and multi-project lineage) to solve for these problems. ## Learning goals -- Understand the purpose and tradeoffs of building a dbt Mesh. -- Develop an intuition for various dbt Mesh patterns and how to design a dbt Mesh for your organization. -- Establish recommended steps to incrementally adopt a dbt Mesh pattern in your dbt implementation. -- Offer tooling to help you more quickly and easily implement your dbt Mesh plan. +- Understand the **purpose and tradeoffs** of building a dbt Mesh. +- Develop an intuition for various **dbt Mesh patterns** and how to design a dbt Mesh for your organization. +- Establish recommended steps to **incrementally adopt** a dbt Mesh pattern in your dbt implementation. +- Offer **tooling** to help you more quickly and easily implement your dbt Mesh plan. diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 2b57e50f2f4..816cba694b0 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -6,15 +6,22 @@ hoverSnippet: Learn how to get started with dbt Mesh ## Exploring mesh patterns -Building a mesh is not a one-size-fits-all process. In fact, it's the opposite, it's about customizing your project structure to fit _your_ team and _your_ data. Often we've had to fit the data team and project structure into our company's org chart, or manage everything in one project to handle the constraints of our data and warehouse. dbt Mesh allows us to mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. +Building a dbt Mesh is not a one-size-fits-all process. In fact, it's the opposite, it's about customizing your project structure to fit _your_ team and _your_ data. Often we've had to fit the data team and project structure into our company's org chart, or manage everything in one project to handle the constraints of our data and warehouse. dbt Mesh allows us to mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. Let's explore some language for discussing the design of these patterns. ## Vertical splits -Vertical splits are about separating out layers of transformation in the DAG order. For example, splitting up staging and mart layers. Often the vertical separation will be based around security and governance requirements, such as separating out PII data from non-PII data and restricting raw data access to a platform team that's responsible for landing and cleaning data. +Vertical splits are about separating out layers of transformation in the DAG order. Let's look at some examples. + +- **Splitting up staging and mart layers.** Creating a more tightly-controlled, shared set of components that other projects build on but can't edit. +- **Isolating earlier models for security and governance requirements.** Separating out and masking PII data so that downstream consumers can't access it is a common use case for a vertical split. +- **Protecting complex or expensive data.** If you have a large or complex model that's expensive to run, you might want to isolate it so that it's safer from accidental selection and easier to debug when it has issues. ## Horizontal splits -Horizonal splits are about splitting up the data based on source or domain. Often the horizontal separation will be based around team consumption patterns, such as splitting out marketing data and financial data. Another common vector of horizontal splitting is data from different sources, such as click event data and transactional ecommerce data. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. +Horizonal splits are about splitting up the data based on source or domain. Let's consider some possibilites for horizontal splitting. + +- Often the horizontal separation will be based around team consumption patterns, such as splitting out marketing data and financial data. +- Another common vector of horizontal splitting is data from different sources, such as click event data and transactional ecommerce data. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. ## Combining these divisions diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md b/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md index 50273f109df..d96bbbf741e 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md @@ -1,11 +1,9 @@ --- -title: "Conclusion" +title: "Go forth and mesh!" description: Getting started with dbt Mesh patterns hoverSnippet: Learn how to get started with dbt Mesh --- -## Conclusion - dbt Mesh is a powerful new pattern for data transformation. It helps adapt teams and their data towards each other, rather than making arbitrary decisions based on the constraints of either one. By creating alignment between your people and data flows, developers can move faster, analysts can be more productive, and data consumers can be more confident in the data they use. -You can incrementally adopt the ideas in this guide in your organization as you hit constraints. There's no pressure to adopt this as the _right pattern_ to build with. That said, familiarizing yourself with dbt Mesh concepts and thinking through how they can apply to your organization will help you make better decisions as you grow. We hope this guide has given you a good starting point to do that. +It's important to reiterate that you can _incrementally_ adopt the ideas in this guide in your organization as you hit constraints. The collection of features tha enable dbt Mesh work effectively as independent tools, and there's no pressure to adopt this as the _right pattern_ to build with. That said, familiarizing yourself with the tooling and dbt Mesh concepts, as well as thinking through how they can apply to your organization, will help you make better decisions as you grow. We hope this guide has given you a good starting point to do that. diff --git a/website/sidebars.js b/website/sidebars.js index fde16bc4983..a515f0a7f66 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -266,7 +266,7 @@ const sidebarSettings = { "docs/build/validation", "docs/build/metricflow-time-spine", "docs/build/metricflow-cli", - ] + ], }, { type: "category", @@ -275,8 +275,8 @@ const sidebarSettings = { items: [ "docs/build/dimensions", "docs/build/entities", - "docs/build/measures" - ] + "docs/build/measures", + ], }, { type: "category", @@ -287,7 +287,7 @@ const sidebarSettings = { "docs/build/derived", "docs/build/ratio", "docs/build/simple", - ] + ], }, ], }, @@ -463,7 +463,10 @@ const sidebarSettings = { { type: "category", label: "Job", - link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-schema-job" }, + link: { + type: "doc", + id: "docs/dbt-cloud-apis/discovery-schema-job", + }, items: [ "docs/dbt-cloud-apis/discovery-schema-job-model", "docs/dbt-cloud-apis/discovery-schema-job-models", @@ -486,7 +489,10 @@ const sidebarSettings = { { type: "category", label: "Environment", - link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-schema-environment" }, + link: { + type: "doc", + id: "docs/dbt-cloud-apis/discovery-schema-environment", + }, items: [ { type: "category", @@ -925,7 +931,20 @@ const sidebarSettings = { }, { type: "category", - label: "Materializations best practices", + label: "How we build our dbt Mesh projects", + link: { + type: "doc", + id: "guides/best-practices/how-we-mesh/mesh-1-intro", + }, + items: [ + "guides/best-practices/how-we-mesh/mesh-2-structures", + "guides/best-practices/how-we-mesh/mesh-3-implementation", + "guides/best-practices/how-we-mesh/mesh-4-conclusion", + ], + }, + { + type: "category", + label: "Materialization best practices", link: { type: "doc", id: "guides/best-practices/materializations/materializations-guide-1-guide-overview", From f27a91ea13a1da7db2ce6d84f5b9ec6f76feb0bd Mon Sep 17 00:00:00 2001 From: gwen windflower Date: Fri, 15 Sep 2023 12:01:28 -0500 Subject: [PATCH 07/43] Finish mesh patterns copy --- .../how-we-mesh/mesh-2-structures.md | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 816cba694b0..e7d213dd2e4 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -18,20 +18,22 @@ Vertical splits are about separating out layers of transformation in the DAG ord ## Horizontal splits -Horizonal splits are about splitting up the data based on source or domain. Let's consider some possibilites for horizontal splitting. +Horizonal splits are about splitting up the data based on source or domain. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. Let's consider some possibilites for horizontal splitting. -- Often the horizontal separation will be based around team consumption patterns, such as splitting out marketing data and financial data. -- Another common vector of horizontal splitting is data from different sources, such as click event data and transactional ecommerce data. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. +- **Team consumption patterns.** For example, splitting out marketing data and financial data. +- **Data from different sources.** For example, click event data and transactional ecommerce data. +- **Team workflows.** If two embedded groups operate in different project management tools at different paces, or are staffed differently, you may want to split the projects up so they can move independently. ## Combining these divisions -- These are not either/or techniques, you can and should combine them in any way that makes sense for your organization. - -- **DRY applies to underlying data not just code.** Regardless of your split, you should not be sourcing the same rows and columns into multiple meshes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface error for conflicts and erodes the single source of truth we're trying to create in our dbt project. +- **These are not either/or techniques**. You can and should combine them in any way that makes sense for your organization. +- **Pick one type of split and focus on that first**. If you have a hub-and-spoke team topology for example, handle breaking out the central platform project before you split the remainder into domains. Then if you need to break those domains up vertically you can shift back to that. +- **DRY applies to underlying data not just code.** Regardless of your splits, you should not be sourcing the same rows and columns into multiple nodes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface error for conflicts and erodes the single source of truth we're trying to create in our dbt project. ## Monorepo vs multi-repo -- A dbt Mesh can exist as multiple projects in a single repo (monorepo) or as multiple projects in their own repositories (multi-repo). -- Monorepos are often easier to get started with, but can become unwieldy as the number of models and teams grow. -- If you're a smaller team looking primarily to speed up and simplify development, a monorepo is likely the right choice. -- If you're a larger team with multiple groups, and need to decouple projects for security and enablement of different development styles and rhythms, a multi-repo is your best bet. +A dbt Mesh can exist as multiple projects in a single repo (monorepo) or as multiple projects in their own repositories (multi-repo). + +- **Monorepos are often easier to get started with**, but can become unwieldy as the number of models and teams grow. +- If you're a **smaller team** looking primarily to speed up and simplify development, a **monorepo** is likely the right choice. +- If you're a **larger team with multiple groups**, and need to decouple projects for security and enablement of different development styles and rhythms, a **multi-repo setup** is your best bet. From d2eda8258e33cf4c4b1ceb35447ef42a6326a7f9 Mon Sep 17 00:00:00 2001 From: gwen windflower Date: Fri, 15 Sep 2023 12:33:47 -0500 Subject: [PATCH 08/43] Bold and expand mesh implementation --- .../how-we-mesh/mesh-3-implementation.md | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md index 10d1b1cbaab..ae7f1d120b1 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md @@ -4,14 +4,14 @@ description: Getting started with dbt Mesh patterns hoverSnippet: Learn how to get started with dbt Mesh --- -## Implementing a dbt Mesh - Let's examine an outline of steps to start implementing a dbt Mesh in your organization. -### Research your current structure +## Research your current structure -- Look at your selectors to figure out how people are grouping models right now. -- Talk to teams about what sort of separation is naturally existing right now +- **Look at your selectors** to figure out how people are grouping models right now. +- **Examine jobs that you run**, look at how they're defined. +- **Look at your lineage graph** to see how models are connected. +- **Talk to teams** about what sort of separation is naturally existing right now. - Are there various domains people are focused on? - Are there various sizes, shapes, and sources of data that get handled separately (such as click event data)? - Are there people focused on separate levels of transformation, such as landing and staging data or building marts? @@ -20,10 +20,12 @@ Let's examine an outline of steps to start implementing a dbt Mesh in your organ Once you have a sense of some initial groupings, implement group and access permissions within a project. -- Incrementally start building your jobs based on these groups (we would recommend in parallel to your production jobs until you’re sure about them) to feel out that you’ve drawn the lines in the right place. +- TODO: Dave +- **Validate these groups by incrementally migrating your jobs** to be based on them. We would recommend in parallel to your production jobs until you’re sure about them. This will help you feel out if you’ve drawn the lines in the right place. ## Do the splits -- When you’ve confirmed the right groups, use `dbt-meshify` to pull chunks out into their own projects. - - Do _one_ group at a time, using the groups as your selectors. - - Do _not_ refactor as you migrate, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully landed the project then you can start optimizing it for its new life as part of the mesh. +- When you’ve **confirmed the right groups** it's time to split the projects out. +- Once again, **use `dbt-meshify`** to pull chunks out into their own projects. + - **Do _one_ group at a time**, using the groups as your selectors. + - **Do _not_ refactor as you migrate**, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully landed the project then you can start optimizing it for its new life as part of the mesh. From cd7188eeb3699ec246fe88899ecdafc1827cfa68 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 5 Oct 2023 15:29:14 -0400 Subject: [PATCH 09/43] Sidebar project updates --- website/docs/docs/about-setup.md | 2 ++ website/docs/docs/build/about-metricflow.md | 2 ++ website/docs/docs/build/analyses.md | 1 + .../docs/docs/build/build-metrics-intro.md | 2 ++ website/docs/docs/build/custom-schemas.md | 1 + .../docs/docs/build/custom-target-names.md | 2 +- website/docs/docs/build/materializations.md | 1 + website/docs/docs/build/metrics-overview.md | 1 + website/docs/docs/build/models.md | 2 ++ website/docs/docs/build/project-variables.md | 1 + website/docs/docs/build/projects.md | 2 ++ website/docs/docs/build/semantic-models.md | 1 + website/docs/docs/build/simple.md | 1 + website/docs/docs/cloud/about-cloud-setup.md | 2 ++ .../docs/cloud/about-cloud/about-dbt-cloud.md | 2 ++ .../docs/docs/cloud/about-cloud/browsers.md | 1 + website/docs/docs/cloud/billing.md | 2 ++ .../about-connections.md | 2 ++ .../connect-apache-spark.md | 1 + .../cloud/dbt-cloud-ide/dbt-cloud-tips.md | 1 + .../dbt-cloud-ide/develop-in-the-cloud.md | 2 ++ .../docs/docs/cloud/git/authenticate-azure.md | 1 + .../docs/cloud/git/connect-azure-devops.md | 1 + .../cloud/git/import-a-project-by-git-url.md | 2 ++ .../docs/cloud/manage-access/about-access.md | 2 ++ .../docs/cloud/manage-access/audit-log.md | 2 ++ .../manage-access/cloud-seats-and-users.md | 2 ++ .../manage-access/enterprise-permissions.md | 1 + .../manage-access/set-up-bigquery-oauth.md | 1 + .../docs/cloud/manage-access/sso-overview.md | 3 +- .../cloud/secure/databricks-privatelink.md | 1 + .../docs/docs/cloud/secure/ip-restrictions.md | 2 ++ .../cloud-build-and-view-your-docs.md | 1 + .../docs/docs/collaborate/documentation.md | 2 ++ .../docs/docs/collaborate/explore-projects.md | 2 ++ .../docs/collaborate/git-version-control.md | 2 ++ .../docs/collaborate/git/merge-conflicts.md | 1 + .../govern/about-model-governance.md | 2 ++ .../govern/project-dependencies.md | 1 + website/docs/docs/contribute-core-adapters.md | 1 + website/docs/docs/core/about-core-setup.md | 2 ++ .../about-core-connections.md | 2 ++ .../connect-data-platform/upsolver-setup.md | 1 + .../docs/docs/core/dbt-core-environments.md | 1 + .../docs/docs/core/installation-overview.md | 2 ++ website/docs/docs/core/source-install.md | 1 + .../docs/dbt-cloud-apis/admin-cloud-api.md | 1 + .../docs/docs/dbt-cloud-apis/apis-overview.md | 2 ++ .../docs/docs/dbt-cloud-apis/discovery-api.md | 1 + .../docs/dbt-cloud-apis/discovery-querying.md | 1 + .../dbt-cloud-apis/schema-discovery-job.mdx | 2 ++ .../docs/dbt-cloud-apis/sl-api-overview.md | 1 + .../docs/docs/dbt-cloud-apis/sl-manifest.md | 1 + .../docs/docs/dbt-cloud-apis/user-tokens.md | 1 + website/docs/docs/dbt-cloud-environments.md | 1 + website/docs/docs/dbt-support.md | 2 ++ .../docs/docs/dbt-versions/core-versions.md | 2 ++ .../dbt-versions/experimental-features.md | 1 + .../docs/docs/dbt-versions/release-notes.md | 2 ++ .../docs/docs/deploy/deployment-overview.md | 2 ++ website/docs/docs/deploy/deployment-tools.md | 1 + website/docs/docs/deploy/jobs.md | 3 +- website/docs/docs/deploy/monitor-jobs.md | 1 + website/docs/docs/environments-in-dbt.md | 1 + website/docs/docs/introduction.md | 2 ++ .../run-your-dbt-projects.md | 1 + .../running-a-dbt-project/using-threads.md | 2 +- .../use-dbt-semantic-layer/quickstart-sl.md | 2 ++ .../use-dbt-semantic-layer/sl-architecture.md | 1 + website/sidebars.js | 33 +++++++++++-------- 70 files changed, 120 insertions(+), 18 deletions(-) diff --git a/website/docs/docs/about-setup.md b/website/docs/docs/about-setup.md index c3c0b46ad74..b5ade88eae4 100644 --- a/website/docs/docs/about-setup.md +++ b/website/docs/docs/about-setup.md @@ -3,6 +3,8 @@ title: About dbt setup id: about-setup description: "About setup of dbt Core and Cloud" sidebar_label: "About dbt setup" +pagination_next: "docs/environments-in-dbt" +pagination_prev: null --- dbt compiles and runs your analytics code against your data platform, enabling you and your team to collaborate on a single source of truth for metrics, insights, and business definitions. There are two options for deploying dbt: diff --git a/website/docs/docs/build/about-metricflow.md b/website/docs/docs/build/about-metricflow.md index 68879911597..b1d226bc957 100644 --- a/website/docs/docs/build/about-metricflow.md +++ b/website/docs/docs/build/about-metricflow.md @@ -4,6 +4,8 @@ id: about-metricflow description: "Learn more about MetricFlow and its key concepts" sidebar_label: About MetricFlow tags: [Metrics, Semantic Layer] +pagination_next: "docs/build/join-logic" +pagination_prev: null --- This guide introduces MetricFlow's fundamental ideas for new users. MetricFlow, which powers the dbt Semantic Layer, helps you define and manage the logic for your company's metrics. It's an opinionated set of abstractions and helps data consumers retrieve metric datasets from a data platform quickly and efficiently. diff --git a/website/docs/docs/build/analyses.md b/website/docs/docs/build/analyses.md index cd74c2e052d..74b138ac67a 100644 --- a/website/docs/docs/build/analyses.md +++ b/website/docs/docs/build/analyses.md @@ -2,6 +2,7 @@ title: "Analyses" description: "Read this tutorial to learn how to use custom analyses when building in dbt." id: "analyses" +pagination_next: null --- ## Overview diff --git a/website/docs/docs/build/build-metrics-intro.md b/website/docs/docs/build/build-metrics-intro.md index a6fab61d576..a405737c99f 100644 --- a/website/docs/docs/build/build-metrics-intro.md +++ b/website/docs/docs/build/build-metrics-intro.md @@ -5,6 +5,8 @@ description: "Learn about MetricFlow and build your metrics with semantic models sidebar_label: Build your metrics tags: [Metrics, Semantic Layer, Governance] hide_table_of_contents: true +pagination_next: "docs/build/sl-getting-started" +pagination_prev: null --- Use MetricFlow in dbt to centrally define your metrics. As a key component of the [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl), MetricFlow is responsible for SQL query construction and defining specifications for dbt semantic models and metrics. diff --git a/website/docs/docs/build/custom-schemas.md b/website/docs/docs/build/custom-schemas.md index ad9fe997483..b20d4130725 100644 --- a/website/docs/docs/build/custom-schemas.md +++ b/website/docs/docs/build/custom-schemas.md @@ -1,6 +1,7 @@ --- title: "Custom schemas" id: "custom-schemas" +pagination_next: "docs/build/custom-databases" --- By default, all dbt models are built in the schema specified in your target. In dbt projects with lots of models, it may be useful to instead build some models in schemas other than your target schema – this can help logically group models together. diff --git a/website/docs/docs/build/custom-target-names.md b/website/docs/docs/build/custom-target-names.md index 4e14f36b784..ac7036de572 100644 --- a/website/docs/docs/build/custom-target-names.md +++ b/website/docs/docs/build/custom-target-names.md @@ -2,7 +2,7 @@ title: "Custom target names" id: "custom-target-names" description: "You can define a custom target name for any dbt Cloud job to correspond to settings in your dbt project." - +pagination_next: null --- ## dbt Cloud Scheduler diff --git a/website/docs/docs/build/materializations.md b/website/docs/docs/build/materializations.md index 463651ccc77..64d650e452b 100644 --- a/website/docs/docs/build/materializations.md +++ b/website/docs/docs/build/materializations.md @@ -2,6 +2,7 @@ title: "Materializations" description: "Read this tutorial to learn how to use materializations when building in dbt." id: "materializations" +pagination_next: "docs/build/incremental-models" --- ## Overview diff --git a/website/docs/docs/build/metrics-overview.md b/website/docs/docs/build/metrics-overview.md index e6d875386ee..12e12f70c72 100644 --- a/website/docs/docs/build/metrics-overview.md +++ b/website/docs/docs/build/metrics-overview.md @@ -4,6 +4,7 @@ id: metrics-overview description: "Metrics can be defined in the same or separate YAML files from semantic models within the same dbt project repo." sidebar_label: "Creating metrics" tags: [Metrics, Semantic Layer] +pagination_next: "docs/build/cumulative" --- Once you've created your semantic models, it's time to start adding metrics! Metrics can be defined in the same YAML files as your semantic models, or split into separate YAML files into any other subdirectories (provided that these subdirectories are also within the same dbt project repo) diff --git a/website/docs/docs/build/models.md b/website/docs/docs/build/models.md index e0683158e6d..d10eb5ed01a 100644 --- a/website/docs/docs/build/models.md +++ b/website/docs/docs/build/models.md @@ -2,6 +2,8 @@ title: "About dbt models" description: "Read this tutorial to learn how to use models when building in dbt." id: "models" +pagination_next: "docs/build/sql-models" +pagination_prev: null --- ## Overview diff --git a/website/docs/docs/build/project-variables.md b/website/docs/docs/build/project-variables.md index a69132d6a3b..1fe45b18ac0 100644 --- a/website/docs/docs/build/project-variables.md +++ b/website/docs/docs/build/project-variables.md @@ -1,6 +1,7 @@ --- title: "Project variables" id: "project-variables" +pagination_next: "docs/build/environment-variables" --- dbt provides a mechanism, [variables](/reference/dbt-jinja-functions/var), to provide data to models for diff --git a/website/docs/docs/build/projects.md b/website/docs/docs/build/projects.md index 0d7dd889fa6..b4b04e3334d 100644 --- a/website/docs/docs/build/projects.md +++ b/website/docs/docs/build/projects.md @@ -1,6 +1,8 @@ --- title: "About dbt projects" id: "projects" +pagination_next: null +pagination_prev: null --- A dbt project informs dbt about the context of your project and how to transform your data (build your data sets). By design, dbt enforces the top-level structure of a dbt project such as the `dbt_project.yml` file, the `models` directory, the `snapshots` directory, and so on. Within the directories of the top-level, you can organize your project in any way that meets the needs of your organization and data pipeline. diff --git a/website/docs/docs/build/semantic-models.md b/website/docs/docs/build/semantic-models.md index bb56bd212e6..d3592c0e3dd 100644 --- a/website/docs/docs/build/semantic-models.md +++ b/website/docs/docs/build/semantic-models.md @@ -6,6 +6,7 @@ keywords: - dbt metrics layer sidebar_label: Semantic models tags: [Metrics, Semantic Layer] +pagination_next: "docs/build/dimensions" --- Semantic models serve as the foundation for defining data in MetricFlow, which powers the dbt Semantic Layer. You can think of semantic models as nodes in your semantic graph, connected via entities as edges. MetricFlow takes semantic models defined in YAML configuration files as inputs and creates a semantic graph that can be used to query metrics. diff --git a/website/docs/docs/build/simple.md b/website/docs/docs/build/simple.md index 7022ca9d007..1803e952a69 100644 --- a/website/docs/docs/build/simple.md +++ b/website/docs/docs/build/simple.md @@ -4,6 +4,7 @@ id: simple description: "Use simple metrics to directly reference a single measure." sidebar_label: Simple tags: [Metrics, Semantic Layer] +pagination_next: null --- Simple metrics are metrics that directly reference a single measure, without any additional measures involved. They are aggregations over a column in your data platform and can be filtered by one or multiple dimensions. diff --git a/website/docs/docs/cloud/about-cloud-setup.md b/website/docs/docs/cloud/about-cloud-setup.md index baa2465472e..d65c2aead2c 100644 --- a/website/docs/docs/cloud/about-cloud-setup.md +++ b/website/docs/docs/cloud/about-cloud-setup.md @@ -3,6 +3,8 @@ title: About dbt Cloud setup id: about-cloud-setup description: "Configuration settings for dbt Cloud." sidebar_label: "About dbt Cloud setup" +pagination_next: "docs/dbt-cloud-environments" +pagination_prev: null --- dbt Cloud is the fastest and most reliable way to deploy your dbt jobs. It contains a myriad of settings that can be configured by admins, from the necessities (data platform integration) to security enhancements (SSO) and quality-of-life features (RBAC). This portion of our documentation will take you through the various settings found by clicking on the gear icon in the dbt Cloud UI, including: diff --git a/website/docs/docs/cloud/about-cloud/about-dbt-cloud.md b/website/docs/docs/cloud/about-cloud/about-dbt-cloud.md index f1d8b32cdb1..67ad9778fe1 100644 --- a/website/docs/docs/cloud/about-cloud/about-dbt-cloud.md +++ b/website/docs/docs/cloud/about-cloud/about-dbt-cloud.md @@ -4,6 +4,8 @@ id: "dbt-cloud-features" sidebar_label: "dbt Cloud features" description: "Explore dbt Cloud's features and learn why dbt Cloud is the fastest way to deploy dbt" hide_table_of_contents: true +pagination_next: "docs/cloud/about-cloud/architecture" +pagination_prev: null --- dbt Cloud is the fastest and most reliable way to deploy dbt. Develop, test, schedule, document, and investigate data models all in one browser-based UI. In addition to providing a hosted architecture for running dbt across your organization, dbt Cloud comes equipped with turnkey support for scheduling jobs, CI/CD, hosting documentation, monitoring & alerting, and an integrated development environment (IDE). diff --git a/website/docs/docs/cloud/about-cloud/browsers.md b/website/docs/docs/cloud/about-cloud/browsers.md index 2fc5a8b4b4d..12665bc7b72 100644 --- a/website/docs/docs/cloud/about-cloud/browsers.md +++ b/website/docs/docs/cloud/about-cloud/browsers.md @@ -2,6 +2,7 @@ title: "Supported browsers" id: "browsers" description: "dbt Cloud supports the latest browsers like Chrome and Firefox." +pagination_next: null --- To have the best experience with dbt Cloud, we recommend using the latest versions of the following browsers: diff --git a/website/docs/docs/cloud/billing.md b/website/docs/docs/cloud/billing.md index 61251f6e41d..f84bdeda87e 100644 --- a/website/docs/docs/cloud/billing.md +++ b/website/docs/docs/cloud/billing.md @@ -3,6 +3,8 @@ title: "Billing" id: billing description: "dbt Cloud billing information." sidebar_label: Billing +pagination_next: null +pagination_prev: null --- dbt Cloud offers a variety of [plans and pricing](https://www.getdbt.com/pricing/) to fit your organization’s needs. With flexible billing options that appeal to large enterprises and small businesses and [server availability](/docs/cloud/about-cloud/regions-ip-addresses) worldwide, dbt Cloud is the fastest and easiest way to begin transforming your data. diff --git a/website/docs/docs/cloud/connect-data-platform/about-connections.md b/website/docs/docs/cloud/connect-data-platform/about-connections.md index 65bfac3a90d..1d3a52225d7 100644 --- a/website/docs/docs/cloud/connect-data-platform/about-connections.md +++ b/website/docs/docs/cloud/connect-data-platform/about-connections.md @@ -3,6 +3,8 @@ title: "About data platform connections" id: about-connections description: "Information about data platform connections" sidebar_label: "About data platform connections" +pagination_next: "docs/cloud/connect-data-platform/connect-starburst-trino" +pagination_prev: null --- dbt Cloud can connect with a variety of data platform providers including: - [Amazon Redshift](/docs/cloud/connect-data-platform/connect-redshift-postgresql-alloydb) diff --git a/website/docs/docs/cloud/connect-data-platform/connect-apache-spark.md b/website/docs/docs/cloud/connect-data-platform/connect-apache-spark.md index 670b628547b..0186d821a54 100644 --- a/website/docs/docs/cloud/connect-data-platform/connect-apache-spark.md +++ b/website/docs/docs/cloud/connect-data-platform/connect-apache-spark.md @@ -3,6 +3,7 @@ title: "Connect Apache Spark" id: connect-apache-spark description: "Setup instructions for connecting Apache Spark to dbt Cloud" sidebar_label: "Connect Apache Spark" +pagination_next: null --- diff --git a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md index 7803c897317..7126361cb6b 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md @@ -3,6 +3,7 @@ title: "Tips and tricks" id: dbt-cloud-tips description: "Check out any dbt Cloud and IDE-related tips." sidebar_label: "Tips and tricks" +pagination_next: null --- # dbt Cloud tips diff --git a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md index 582d3cbd4ba..400f988b72d 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md @@ -4,6 +4,8 @@ id: develop-in-the-cloud description: "Develop, test, run, and build in the Cloud IDE. With the Cloud IDE, you can compile dbt code into SQL and run it against your database directly" sidebar_label: Develop in the IDE tags: [IDE] +pagination_next: "docs/cloud/dbt-cloud-ide/ide-user-interface" +pagination_prev: null --- The dbt Cloud integrated development environment (IDE) is a single interface for building, testing, running, and version-controlling dbt projects from your browser. With the Cloud IDE, you can compile dbt code into SQL and run it against your database directly. diff --git a/website/docs/docs/cloud/git/authenticate-azure.md b/website/docs/docs/cloud/git/authenticate-azure.md index 03020ccca73..8f1361043ce 100644 --- a/website/docs/docs/cloud/git/authenticate-azure.md +++ b/website/docs/docs/cloud/git/authenticate-azure.md @@ -3,6 +3,7 @@ title: "Authenticate with Azure DevOps" id: "authenticate-azure" description: "dbt Cloud developers need to authenticate with Azure DevOps." sidebar_label: "Authenticate with Azure DevOps" +pagination_next: null --- diff --git a/website/docs/docs/cloud/git/connect-azure-devops.md b/website/docs/docs/cloud/git/connect-azure-devops.md index bc5bb81dd24..ebbb356969f 100644 --- a/website/docs/docs/cloud/git/connect-azure-devops.md +++ b/website/docs/docs/cloud/git/connect-azure-devops.md @@ -1,6 +1,7 @@ --- title: "Connect to Azure DevOps" id: "connect-azure-devops" +pagination_next: "docs/cloud/git/setup-azure" --- diff --git a/website/docs/docs/cloud/git/import-a-project-by-git-url.md b/website/docs/docs/cloud/git/import-a-project-by-git-url.md index ba53baa33ea..83846bb1f0b 100644 --- a/website/docs/docs/cloud/git/import-a-project-by-git-url.md +++ b/website/docs/docs/cloud/git/import-a-project-by-git-url.md @@ -1,6 +1,8 @@ --- title: "Import a project by git URL" id: "import-a-project-by-git-url" +pagination_next: "docs/cloud/git/connect-github" +pagination_prev: null --- In dbt Cloud, you can import a git repository from any valid git URL that points to a dbt project. There are some important considerations to keep in mind when doing this. diff --git a/website/docs/docs/cloud/manage-access/about-access.md b/website/docs/docs/cloud/manage-access/about-access.md index f9f97bc555d..e7c2e24312d 100644 --- a/website/docs/docs/cloud/manage-access/about-access.md +++ b/website/docs/docs/cloud/manage-access/about-access.md @@ -2,6 +2,8 @@ title: "About user access in dbt Cloud" description: "Learn how dbt Cloud administrators can use dbt Cloud's permissioning model to control user-level access in a dbt Cloud account." id: "about-user-access" +pagination_next: "docs/cloud/manage-access/audit-log" +pagination_prev: null --- :::info "User access" is not "Model access" diff --git a/website/docs/docs/cloud/manage-access/audit-log.md b/website/docs/docs/cloud/manage-access/audit-log.md index 98bf660b259..b90bceef570 100644 --- a/website/docs/docs/cloud/manage-access/audit-log.md +++ b/website/docs/docs/cloud/manage-access/audit-log.md @@ -3,6 +3,8 @@ title: "The audit log for dbt Cloud Enterprise" id: audit-log description: "You can troubleshoot possible issues and provide security audits by reviewing event activity in your organization." sidebar_label: "Audit log" +pagination_next: null +pagination_prev: "docs/cloud/manage-access/about-user-access" --- To review actions performed by people in your organization, dbt provides logs of audited user and system events in real time. The audit log appears as events happen and includes details such as who performed the action, what the action was, and when it was performed. You can use these details to troubleshoot access issues, perform security audits, or analyze specific events. diff --git a/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md b/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md index 04dfbe093c3..1337bf15ff1 100644 --- a/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md +++ b/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md @@ -3,6 +3,8 @@ title: "Users and licenses" description: "Learn how dbt Cloud administrators can use licenses and seats to control access in a dbt Cloud account." id: "seats-and-users" sidebar: "Users and licenses" +pagination_next: "docs/cloud/manage-access/self-service-permissions" +pagination_prev: null --- In dbt Cloud, _licenses_ are used to allocate users to your account. There are three different types of licenses in dbt Cloud: diff --git a/website/docs/docs/cloud/manage-access/enterprise-permissions.md b/website/docs/docs/cloud/manage-access/enterprise-permissions.md index 5bf3623b105..bcd6a324e6e 100644 --- a/website/docs/docs/cloud/manage-access/enterprise-permissions.md +++ b/website/docs/docs/cloud/manage-access/enterprise-permissions.md @@ -3,6 +3,7 @@ title: "Enterprise permissions" id: "enterprise-permissions" description: "Permission sets for Enterprise plans." hide_table_of_contents: true #For the sake of the tables on this page +pagination_next: null --- import Permissions from '/snippets/_enterprise-permissions-table.md'; diff --git a/website/docs/docs/cloud/manage-access/set-up-bigquery-oauth.md b/website/docs/docs/cloud/manage-access/set-up-bigquery-oauth.md index 516a340c951..cb02bdb0417 100644 --- a/website/docs/docs/cloud/manage-access/set-up-bigquery-oauth.md +++ b/website/docs/docs/cloud/manage-access/set-up-bigquery-oauth.md @@ -2,6 +2,7 @@ title: "Set up BigQuery OAuth" description: "Learn how dbt Cloud administrators can use licenses and seats to control access in a dbt Cloud account." id: "set-up-bigquery-oauth" +pagination_next: null --- :::info Enterprise Feature diff --git a/website/docs/docs/cloud/manage-access/sso-overview.md b/website/docs/docs/cloud/manage-access/sso-overview.md index 6e9eb3d945b..f613df7907e 100644 --- a/website/docs/docs/cloud/manage-access/sso-overview.md +++ b/website/docs/docs/cloud/manage-access/sso-overview.md @@ -1,7 +1,8 @@ --- title: "Single sign-on (SSO) Overview" id: "sso-overview" - +pagination_next: "docs/cloud/manage-access/set-up-sso-saml-2.0" +pagination_prev: null --- This overview explains how users are provisioned in dbt Cloud via Single Sign-On (SSO). diff --git a/website/docs/docs/cloud/secure/databricks-privatelink.md b/website/docs/docs/cloud/secure/databricks-privatelink.md index c136cd8a0f9..a2c9e208459 100644 --- a/website/docs/docs/cloud/secure/databricks-privatelink.md +++ b/website/docs/docs/cloud/secure/databricks-privatelink.md @@ -3,6 +3,7 @@ title: "Configuring Databricks PrivateLink" id: databricks-privatelink description: "Configuring PrivateLink for Databricks" sidebar_label: "PrivateLink for Databricks" +pagination_next: null --- The following steps will walk you through the setup of a Databricks AWS PrivateLink endpoint in the dbt Cloud multi-tenant environment. diff --git a/website/docs/docs/cloud/secure/ip-restrictions.md b/website/docs/docs/cloud/secure/ip-restrictions.md index 237de991c02..093d2a1c876 100644 --- a/website/docs/docs/cloud/secure/ip-restrictions.md +++ b/website/docs/docs/cloud/secure/ip-restrictions.md @@ -3,6 +3,8 @@ title: "Configuring IP restrictions" id: ip-restrictions description: "Configuring IP restrictions to outside traffic from accessing your dbt Cloud environment" sidebar_label: "IP restrictions" +pagination_next: "docs/cloud/secure/about-privatelink" +pagination_prev: null --- import SetUpPages from '/snippets/_available-tiers-iprestrictions.md'; diff --git a/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md b/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md index 36f4781bfde..a54b8fcdc2b 100644 --- a/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md +++ b/website/docs/docs/collaborate/cloud-build-and-view-your-docs.md @@ -2,6 +2,7 @@ title: "Build and view your docs with dbt Cloud" id: "build-and-view-your-docs" description: "Automatically generate project documentation as you run jobs." +pagination_next: null --- dbt enables you to generate documentation for your project and data warehouse, and renders the documentation in a website. For more information, see [Documentation](/docs/collaborate/documentation). diff --git a/website/docs/docs/collaborate/documentation.md b/website/docs/docs/collaborate/documentation.md index 429b5187152..0fa00c7cca2 100644 --- a/website/docs/docs/collaborate/documentation.md +++ b/website/docs/docs/collaborate/documentation.md @@ -2,6 +2,8 @@ title: "About documentation" description: "Learn how good documentation for your dbt models helps stakeholders discover and understand your datasets." id: "documentation" +pagination_next: "docs/collaborate/build-and-view-your-docs" +pagination_prev: null --- ## Related documentation diff --git a/website/docs/docs/collaborate/explore-projects.md b/website/docs/docs/collaborate/explore-projects.md index a4c914259ef..ae91929fbbc 100644 --- a/website/docs/docs/collaborate/explore-projects.md +++ b/website/docs/docs/collaborate/explore-projects.md @@ -2,6 +2,8 @@ title: "Explore your dbt projects (beta)" sidebar_label: "Explore dbt projects (beta)" description: "Learn about dbt Explorer and how to interact with it to understand, improve, and leverage your data pipelines." +pagination_next: null +pagination_prev: null --- With dbt Explorer, you can view your project's [resources](/docs/build/projects) (such as models, tests, and metrics) and their lineage to gain a better understanding of its latest production state. Navigate and manage your projects within dbt Cloud to help your data consumers discover and leverage your dbt resources. diff --git a/website/docs/docs/collaborate/git-version-control.md b/website/docs/docs/collaborate/git-version-control.md index 4444f381bb5..f5d4263ab97 100644 --- a/website/docs/docs/collaborate/git-version-control.md +++ b/website/docs/docs/collaborate/git-version-control.md @@ -3,6 +3,8 @@ title: "About git" id: git-version-control description: "Git overview" sidebar_label: "About git" +pagination_next: "docs/collaborate/git/version-control-basics" +pagination_prev: null --- A [version control](https://en.wikipedia.org/wiki/Version_control) system allows you and your teammates to work collaboratively, safely, and simultaneously on a single project. Version control helps you track all the code changes made in your dbt project. diff --git a/website/docs/docs/collaborate/git/merge-conflicts.md b/website/docs/docs/collaborate/git/merge-conflicts.md index b109cacb511..c3c19b1e2a1 100644 --- a/website/docs/docs/collaborate/git/merge-conflicts.md +++ b/website/docs/docs/collaborate/git/merge-conflicts.md @@ -1,6 +1,7 @@ --- title: "Merge conflicts" id: "merge-conflicts" +pagination_next: null --- [Merge conflicts](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts) in the [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) often occur when multiple users are simultaneously making edits to the same section in the same file. This makes it difficult for Git to decide what changes to incorporate in the final merge. diff --git a/website/docs/docs/collaborate/govern/about-model-governance.md b/website/docs/docs/collaborate/govern/about-model-governance.md index efeb2836bc6..bbc430845d2 100644 --- a/website/docs/docs/collaborate/govern/about-model-governance.md +++ b/website/docs/docs/collaborate/govern/about-model-governance.md @@ -2,6 +2,8 @@ title: "About model governance" id: about-model-governance description: "Information about new features related to model governance" +pagination_next: "docs/collaborate/govern/model-access" +pagination_prev: null --- diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md index 1dbc967e74e..6108c890ac1 100644 --- a/website/docs/docs/collaborate/govern/project-dependencies.md +++ b/website/docs/docs/collaborate/govern/project-dependencies.md @@ -3,6 +3,7 @@ title: "Project dependencies" id: project-dependencies sidebar_label: "Project dependencies" description: "Reference public models across dbt projects" +pagination_next: null --- :::caution Closed Beta - dbt Cloud Enterprise diff --git a/website/docs/docs/contribute-core-adapters.md b/website/docs/docs/contribute-core-adapters.md index 6e66a5d28ff..553361ee1a2 100644 --- a/website/docs/docs/contribute-core-adapters.md +++ b/website/docs/docs/contribute-core-adapters.md @@ -1,6 +1,7 @@ --- title: "Contribute to adapters" id: "contribute-core-adapters" +pagination_next: null --- The dbt Community exists to allow analytics practitioners share their knowledge, help others and collectively to drive forward the discipline of analytics engineering. There are opportunities here for everyone to contribute whether you're at the beginning your analytics engineering journey or you are a seasoned data professional. diff --git a/website/docs/docs/core/about-core-setup.md b/website/docs/docs/core/about-core-setup.md index 0408e529b2d..d119a10f8b4 100644 --- a/website/docs/docs/core/about-core-setup.md +++ b/website/docs/docs/core/about-core-setup.md @@ -3,6 +3,8 @@ title: About dbt Core setup id: about-core-setup description: "Configuration settings for dbt Core." sidebar_label: "About dbt Core setup" +pagination_next: "docs/core/about-the-cli" +pagination_prev: null --- dbt Core is an [open-source](https://github.com/dbt-labs/dbt-core) tool that enables data teams to transform data using analytics engineering best practices. You can install dbt locally in your environment and use dbt Core on the command line. It can communicate with databases through adapters. diff --git a/website/docs/docs/core/connect-data-platform/about-core-connections.md b/website/docs/docs/core/connect-data-platform/about-core-connections.md index 802e197514c..a85a32cc031 100644 --- a/website/docs/docs/core/connect-data-platform/about-core-connections.md +++ b/website/docs/docs/core/connect-data-platform/about-core-connections.md @@ -4,6 +4,8 @@ id: "about-core-connections" description: "Information about data platform connections in dbt Core" sidebar_label: "About data platform connections in dbt Core" hide_table_of_contents: true +pagination_next: "docs/core/connect-data-platform/profiles.yml" +pagination_prev: null --- dbt Core can connect with a variety of data platform providers including: diff --git a/website/docs/docs/core/connect-data-platform/upsolver-setup.md b/website/docs/docs/core/connect-data-platform/upsolver-setup.md index 68cfa3045cd..6b2f410fc07 100644 --- a/website/docs/docs/core/connect-data-platform/upsolver-setup.md +++ b/website/docs/docs/core/connect-data-platform/upsolver-setup.md @@ -14,6 +14,7 @@ meta: slack_channel_link: 'https://join.slack.com/t/upsolvercommunity/shared_invite/zt-1zo1dbyys-hj28WfaZvMh4Z4Id3OkkhA' platform_name: 'Upsolver' config_page: '/reference/resource-configs/upsolver-configs' +pagination_next: null ---

Overview of {frontMatter.meta.pypi_package}

diff --git a/website/docs/docs/core/dbt-core-environments.md b/website/docs/docs/core/dbt-core-environments.md index 5daf17bddf9..c7f340557fd 100644 --- a/website/docs/docs/core/dbt-core-environments.md +++ b/website/docs/docs/core/dbt-core-environments.md @@ -1,6 +1,7 @@ --- title: "dbt Core environments" id: "dbt-core-environments" +pagination_next: "docs/running-a-dbt-project/run-your-dbt-projects" --- dbt makes it easy to maintain separate production and development environments through the use of [targets](/reference/dbt-jinja-functions/target.md) within a [profile](/docs/core/connect-data-platform/profiles.yml). A typical profile, when using dbt locally (for example, running from your command line), will have a target named `dev` and have this set as the default. This means that while making changes, your objects will be built in your _development_ target without affecting production queries made by your end users. Once you are confident in your changes, you can deploy the code to _production_, by running your dbt project with a _prod_ target. diff --git a/website/docs/docs/core/installation-overview.md b/website/docs/docs/core/installation-overview.md index f1fdb800fdf..47b4269eb15 100644 --- a/website/docs/docs/core/installation-overview.md +++ b/website/docs/docs/core/installation-overview.md @@ -2,6 +2,8 @@ title: "About installing dbt" id: "installation" description: "You can install dbt Core using a few different tested methods." +pagination_next: "docs/core/homebrew-install" +pagination_prev: null --- You can install dbt Core on the command line by using one of these methods: diff --git a/website/docs/docs/core/source-install.md b/website/docs/docs/core/source-install.md index be9918223fe..42086159c03 100644 --- a/website/docs/docs/core/source-install.md +++ b/website/docs/docs/core/source-install.md @@ -1,6 +1,7 @@ --- title: "Install from source" description: "You can install dbt Core from its GitHub code source." +pagination_next: null --- dbt Core and almost all of its adapter plugins are open source software. As such, the codebases are freely available to download and build from source. You might install from source if you want the latest code or want to install dbt from a specific commit. This might be helpful when you are contributing changes, or if you want to debug a past change. diff --git a/website/docs/docs/dbt-cloud-apis/admin-cloud-api.md b/website/docs/docs/dbt-cloud-apis/admin-cloud-api.md index 8a5712f40df..168ec0c80f4 100644 --- a/website/docs/docs/dbt-cloud-apis/admin-cloud-api.md +++ b/website/docs/docs/dbt-cloud-apis/admin-cloud-api.md @@ -1,6 +1,7 @@ --- title: "dbt Cloud Administrative API" id: "admin-cloud-api" +pagination_next: "docs/dbt-cloud-apis/discovery-api" --- The dbt Cloud Administrative API is enabled by default for [Team and Enterprise plans](https://www.getdbt.com/pricing/). It can be used to: diff --git a/website/docs/docs/dbt-cloud-apis/apis-overview.md b/website/docs/docs/dbt-cloud-apis/apis-overview.md index b7d722747d8..eef64992af9 100644 --- a/website/docs/docs/dbt-cloud-apis/apis-overview.md +++ b/website/docs/docs/dbt-cloud-apis/apis-overview.md @@ -2,6 +2,8 @@ title: "APIs Overview" description: "Learn how dbt accounts on the Team and Enterprise plans can query the dbt Cloud APIs." id: "overview" +pagination_next: "docs/dbt-cloud-apis/user-tokens" +pagination_prev: null --- ## Overview diff --git a/website/docs/docs/dbt-cloud-apis/discovery-api.md b/website/docs/docs/dbt-cloud-apis/discovery-api.md index e4441aa55a2..747128cf7bc 100644 --- a/website/docs/docs/dbt-cloud-apis/discovery-api.md +++ b/website/docs/docs/dbt-cloud-apis/discovery-api.md @@ -1,5 +1,6 @@ --- title: "About the Discovery API" +pagination_next: "docs/dbt-cloud-apis/discovery-use-cases-and-examples" --- Every time dbt Cloud runs a project, it generates and stores information about the project. The metadata includes details about your project’s models, sources, and other nodes along with their execution results. With the dbt Cloud Discovery API, you can query this comprehensive information to gain a better understanding of your DAG and the data it produces. diff --git a/website/docs/docs/dbt-cloud-apis/discovery-querying.md b/website/docs/docs/dbt-cloud-apis/discovery-querying.md index ba1365e632b..35c092adb4b 100644 --- a/website/docs/docs/dbt-cloud-apis/discovery-querying.md +++ b/website/docs/docs/dbt-cloud-apis/discovery-querying.md @@ -2,6 +2,7 @@ title: "Query the Discovery API" id: "discovery-querying" sidebar_label: "Query the Discovery API" +pagination_next: "docs/dbt-cloud-apis/discovery-schema-environment" --- The Discovery API supports ad-hoc queries and integrations. If you are new to the API, refer to [About the Discovery API](/docs/dbt-cloud-apis/discovery-api) for an introduction. diff --git a/website/docs/docs/dbt-cloud-apis/schema-discovery-job.mdx b/website/docs/docs/dbt-cloud-apis/schema-discovery-job.mdx index bb30786e19d..8b02c5601ad 100644 --- a/website/docs/docs/dbt-cloud-apis/schema-discovery-job.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-discovery-job.mdx @@ -2,6 +2,8 @@ title: "Job object schema" sidebar_label: "Job" id: "discovery-schema-job" +pagination_next: "docs/dbt-cloud-apis/discovery-schema-job-model" +pagination_prev: null --- import { QueryArgsTable, SchemaTable } from "./schema"; diff --git a/website/docs/docs/dbt-cloud-apis/sl-api-overview.md b/website/docs/docs/dbt-cloud-apis/sl-api-overview.md index 42416765904..a401dc54094 100644 --- a/website/docs/docs/dbt-cloud-apis/sl-api-overview.md +++ b/website/docs/docs/dbt-cloud-apis/sl-api-overview.md @@ -4,6 +4,7 @@ id: sl-api-overview description: "Integrate and query metrics and dimensions in downstream tools using the Semantic Layer APIs" tags: [Semantic Layer, API] hide_table_of_contents: true +pagination_next: "docs/dbt-cloud-apis/sl-jdbc" --- diff --git a/website/docs/docs/dbt-cloud-apis/sl-manifest.md b/website/docs/docs/dbt-cloud-apis/sl-manifest.md index 47304accea3..6ecac495869 100644 --- a/website/docs/docs/dbt-cloud-apis/sl-manifest.md +++ b/website/docs/docs/dbt-cloud-apis/sl-manifest.md @@ -4,6 +4,7 @@ id: sl-manifest description: "Learn about the semantic manifest.json file and how you can use artifacts to gain insights about your dbt Semantic Layer." tags: [Semantic Layer, APIs] sidebar_label: "Semantic manifest" +pagination_next: null --- diff --git a/website/docs/docs/dbt-cloud-apis/user-tokens.md b/website/docs/docs/dbt-cloud-apis/user-tokens.md index e56d8b2f974..11bb4f54dec 100644 --- a/website/docs/docs/dbt-cloud-apis/user-tokens.md +++ b/website/docs/docs/dbt-cloud-apis/user-tokens.md @@ -1,6 +1,7 @@ --- title: "User tokens" id: "user-tokens" +pagination_next: "docs/dbt-cloud-apis/service-tokens" --- ## User API tokens diff --git a/website/docs/docs/dbt-cloud-environments.md b/website/docs/docs/dbt-cloud-environments.md index f61ec5ef72b..55bec30be92 100644 --- a/website/docs/docs/dbt-cloud-environments.md +++ b/website/docs/docs/dbt-cloud-environments.md @@ -2,6 +2,7 @@ title: "dbt Cloud environments" id: "dbt-cloud-environments" description: "Learn about dbt Cloud's development environment to execute your project in the IDE" +pagination_next: null --- An environment determines how dbt Cloud will execute your project in both the dbt Cloud IDE (for development) and scheduled jobs (for deployment). diff --git a/website/docs/docs/dbt-support.md b/website/docs/docs/dbt-support.md index f63e016b03e..513d5fff588 100644 --- a/website/docs/docs/dbt-support.md +++ b/website/docs/docs/dbt-support.md @@ -1,6 +1,8 @@ --- title: "dbt support" id: "dbt-support" +pagination_next: null +pagination_prev: null --- ## dbt Core support diff --git a/website/docs/docs/dbt-versions/core-versions.md b/website/docs/docs/dbt-versions/core-versions.md index 2a5ce6daeb7..5e8e437f0b1 100644 --- a/website/docs/docs/dbt-versions/core-versions.md +++ b/website/docs/docs/dbt-versions/core-versions.md @@ -2,6 +2,8 @@ title: "About dbt Core versions" id: "core" description: "Learn about semantic versioning for dbt Core, and how long those versions are supported." +pagination_next: "docs/dbt-versions/upgrade-core-in-cloud" +pagination_prev: null --- dbt Core releases follow [semantic versioning](https://semver.org/) guidelines. For more on how we use semantic versions, see [How dbt Core uses semantic versioning](#how-dbt-core-uses-semantic-versioning). diff --git a/website/docs/docs/dbt-versions/experimental-features.md b/website/docs/docs/dbt-versions/experimental-features.md index 5ed0cf037ca..a621bd4ac44 100644 --- a/website/docs/docs/dbt-versions/experimental-features.md +++ b/website/docs/docs/dbt-versions/experimental-features.md @@ -3,6 +3,7 @@ title: "Preview new and experimental features in dbt Cloud" id: "experimental-features" sidebar_label: "Preview new dbt Cloud features" description: "Gain early access to many new dbt Labs experimental features by enabling this in your profile." +pagination_next: null --- dbt Labs often tests experimental features before deciding to continue on the [Product lifecycle](https://docs.getdbt.com/docs/dbt-versions/product-lifecycles#dbt-cloud). diff --git a/website/docs/docs/dbt-versions/release-notes.md b/website/docs/docs/dbt-versions/release-notes.md index db25af163ae..6f7be90e60d 100644 --- a/website/docs/docs/dbt-versions/release-notes.md +++ b/website/docs/docs/dbt-versions/release-notes.md @@ -2,6 +2,8 @@ title: "About dbt Cloud Release Notes" id: "dbt-cloud-release-notes" description: "Release notes for dbt Cloud" +pagination_next: null +pagination_prev: null --- dbt provides release notes for dbt Cloud so you can see recent and historical changes. Generally, you'll see release notes for these changes: diff --git a/website/docs/docs/deploy/deployment-overview.md b/website/docs/docs/deploy/deployment-overview.md index 5883ecaa3f1..dc464dbd0ac 100644 --- a/website/docs/docs/deploy/deployment-overview.md +++ b/website/docs/docs/deploy/deployment-overview.md @@ -4,6 +4,8 @@ id: "deployments" sidebar: "Use dbt Cloud's capabilities to seamlessly run a dbt job in production." hide_table_of_contents: true tags: ["scheduler"] +pagination_next: "docs/deploy/job-scheduler" +pagination_prev: null --- Use dbt Cloud's capabilities to seamlessly run a dbt job in production or staging environments. Rather than run dbt commands manually from the command line, you can leverage the [dbt Cloud's in-app scheduling](/docs/deploy/job-scheduler) to automate how and when you execute dbt. diff --git a/website/docs/docs/deploy/deployment-tools.md b/website/docs/docs/deploy/deployment-tools.md index 80622880c2c..6fba9caf6e8 100644 --- a/website/docs/docs/deploy/deployment-tools.md +++ b/website/docs/docs/deploy/deployment-tools.md @@ -2,6 +2,7 @@ title: "Integrate with other orchestration tools" id: "deployment-tools" sidebar_label: "Integrate with other tools" +pagination_next: null --- Alongside [dbt Cloud](/docs/deploy/jobs), discover other ways to schedule and run your dbt jobs with the help of tools such as Airflow, Prefect, Dagster, automation server, Cron, and Azure Data Factory (ADF), diff --git a/website/docs/docs/deploy/jobs.md b/website/docs/docs/deploy/jobs.md index e8ca864d65f..92390907cd0 100644 --- a/website/docs/docs/deploy/jobs.md +++ b/website/docs/docs/deploy/jobs.md @@ -1,8 +1,9 @@ --- title: "Jobs in dbt Cloud" -sidebar_label: "Jobs" +sidebar_label: "About Jobs" description: "Learn about deploy jobs and continuous integration (CI) jobs in dbt Cloud and what their differences are." tags: [scheduler] +pagination_next: "docs/deploy/deploy-jobs" --- In dbt Cloud, there are two types of jobs: diff --git a/website/docs/docs/deploy/monitor-jobs.md b/website/docs/docs/deploy/monitor-jobs.md index c4c5fcb73a5..aeff14a10e3 100644 --- a/website/docs/docs/deploy/monitor-jobs.md +++ b/website/docs/docs/deploy/monitor-jobs.md @@ -3,6 +3,7 @@ title: "Monitor jobs and alerts" id: "monitor-jobs" description: "Monitor your dbt Cloud job and set up alerts to ensure seamless orchestration and optimize your data transformations" tags: ["scheduler"] +pagination_next: "docs/deploy/run-visibility" --- Monitor your dbt Cloud jobs to help identify improvement and set up alerts to proactively alert the right people or team. diff --git a/website/docs/docs/environments-in-dbt.md b/website/docs/docs/environments-in-dbt.md index 54eaa68f667..86e41b18ae4 100644 --- a/website/docs/docs/environments-in-dbt.md +++ b/website/docs/docs/environments-in-dbt.md @@ -2,6 +2,7 @@ title: "About environments" id: "environments-in-dbt" hide_table_of_contents: true +pagination_next: null --- In software engineering, environments are used to enable engineers to develop and test code without impacting the users of their software. Typically, there are two types of environments in dbt: diff --git a/website/docs/docs/introduction.md b/website/docs/docs/introduction.md index 8ef11fcd17f..1852cca90d5 100644 --- a/website/docs/docs/introduction.md +++ b/website/docs/docs/introduction.md @@ -1,6 +1,8 @@ --- title: "What is dbt?" id: "introduction" +pagination_next: null +pagination_prev: null --- diff --git a/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md b/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md index 32b149a2ccb..fe4a748015a 100644 --- a/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md +++ b/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md @@ -1,6 +1,7 @@ --- title: "Run your dbt projects" id: "run-your-dbt-projects" +pagination_prev: null --- You can run your dbt projects with [dbt Cloud](/docs/cloud/about-cloud/about-dbt-cloud) and [dbt Core](https://github.com/dbt-labs/dbt-core). dbt Cloud is a hosted application where you can develop directly from a web browser. dbt Core is an open source project where you can develop from the command line. diff --git a/website/docs/docs/running-a-dbt-project/using-threads.md b/website/docs/docs/running-a-dbt-project/using-threads.md index 519ce8aab81..a2b8231ff3b 100644 --- a/website/docs/docs/running-a-dbt-project/using-threads.md +++ b/website/docs/docs/running-a-dbt-project/using-threads.md @@ -3,7 +3,7 @@ title: "Using threads" id: "using-threads" sidebar_label: "Use threads" description: "Understand what threads mean and how to use them." - +pagination_next: null --- When dbt runs, it creates a directed acyclic graph (DAG) of links between models. The number of threads represents the maximum number of paths through the graph dbt may work on at once – increasing the number of threads can minimize the run time of your project. diff --git a/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md b/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md index 3bbc11cea3f..23d4fd43443 100644 --- a/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md +++ b/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md @@ -6,6 +6,8 @@ sidebar_label: "Get started with the dbt Semantic Layer" tags: [Semantic Layer] meta: api_name: dbt Semantic Layer APIs +pagination_next: "docs/use-dbt-semantic-layer/setup-sl" +pagination_prev: null --- diff --git a/website/docs/docs/use-dbt-semantic-layer/sl-architecture.md b/website/docs/docs/use-dbt-semantic-layer/sl-architecture.md index 89cd9bc6ddc..6708959ca8c 100644 --- a/website/docs/docs/use-dbt-semantic-layer/sl-architecture.md +++ b/website/docs/docs/use-dbt-semantic-layer/sl-architecture.md @@ -4,6 +4,7 @@ id: sl-architecture description: "dbt Semantic Layer product architecture and related questions." sidebar_label: "Architecture" tags: [Semantic Layer] +pagination_next: null --- import NewSLChanges from '/snippets/_new-sl-changes.md'; diff --git a/website/sidebars.js b/website/sidebars.js index d287ebe2cf5..47b5dc16ea5 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -18,9 +18,9 @@ const sidebarSettings = { { type: "category", label: "About dbt Cloud", - link: { type: "doc", id: "docs/cloud/about-cloud/about-dbt-cloud" }, + link: { type: "doc", id: "docs/cloud/about-cloud/dbt-cloud-features" }, items: [ - "docs/cloud/about-cloud/about-dbt-cloud", + "docs/cloud/about-cloud/dbt-cloud-features", "docs/cloud/about-cloud/architecture", "docs/cloud/about-cloud/tenancy", "docs/cloud/about-cloud/regions-ip-addresses", @@ -238,7 +238,7 @@ const sidebarSettings = { type: "category", label: "Build your DAG", collapsed: true, - link: { type: "doc", id: "docs/build/sources" }, + link: { type: "doc", id: "docs/build/models" }, items: [ { type: "category", @@ -267,12 +267,14 @@ const sidebarSettings = { link: { type: "doc", id: "docs/build/build-metrics-intro" }, collapsed: true, items: [ + "docs/build/build-metrics-intro", "docs/build/sl-getting-started", { type: "category", label: "About MetricFlow", link: { type: "doc", id: "docs/build/about-metricflow" }, items: [ + "docs/build/about-metricflow", "docs/build/join-logic", "docs/build/validation", "docs/build/metricflow-time-spine", @@ -284,6 +286,7 @@ const sidebarSettings = { label: "Semantic models", link: { type: "doc", id: "docs/build/semantic-models" }, items: [ + "docs/build/semantic-models", "docs/build/dimensions", "docs/build/entities", "docs/build/measures" @@ -294,6 +297,7 @@ const sidebarSettings = { label: "Metrics", link: { type: "doc", id: "docs/build/metrics-overview" }, items: [ + "docs/build/metrics-overview", "docs/build/cumulative", "docs/build/derived", "docs/build/ratio", @@ -306,7 +310,7 @@ const sidebarSettings = { type: "category", label: "Enhance your models", collapsed: true, - link: { type: "doc", id: "docs/build/tests" }, + link: { type: "doc", id: "docs/build/materializations" }, items: [ "docs/build/materializations", "docs/build/incremental-models", @@ -316,7 +320,7 @@ const sidebarSettings = { type: "category", label: "Enhance your code", collapsed: true, - link: { type: "doc", id: "docs/build/jinja-macros" }, + link: { type: "doc", id: "docs/build/project-variables" }, items: [ "docs/build/project-variables", "docs/build/environment-variables", @@ -346,6 +350,7 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/deploy/deployments" }, items: [ + "docs/deploy/deployments", "docs/deploy/job-scheduler", "docs/deploy/deploy-environments", "docs/deploy/continuous-integration", @@ -354,6 +359,7 @@ const sidebarSettings = { label: "Jobs", link: { type: "doc", id: "docs/deploy/jobs" }, items: [ + "docs/deploy/jobs", "docs/deploy/deploy-jobs", "docs/deploy/ci-jobs", "docs/deploy/job-commands", @@ -364,6 +370,7 @@ const sidebarSettings = { label: "Monitor jobs and alerts", link: { type: "doc", id: "docs/deploy/monitor-jobs" }, items: [ + "docs/deploy/monitor-jobs", "docs/deploy/run-visibility", "docs/deploy/job-notifications", "docs/deploy/webhooks", @@ -378,6 +385,7 @@ const sidebarSettings = { { type: "category", label: "Collaborate with others", + link: { type: "doc", id: "docs/collaborate/explore-projects" }, items: [ "docs/collaborate/explore-projects", { @@ -423,7 +431,7 @@ const sidebarSettings = { type: "category", label: "Use the dbt Semantic Layer", collapsed: true, - link: { type: "doc", id: "docs/use-dbt-semantic-layer/quickstart-semantic-layer" }, + link: { type: "doc", id: "docs/use-dbt-semantic-layer/quickstart-sl" }, items: [ "docs/use-dbt-semantic-layer/quickstart-sl", "docs/use-dbt-semantic-layer/setup-sl", @@ -452,6 +460,7 @@ const sidebarSettings = { label: "Administrative API", link: { type: "doc", id: "docs/dbt-cloud-apis/admin-cloud-api" }, items: [ + "docs/dbt-cloud-apis/admin-cloud-api", { type: "link", label: "API v2 (legacy docs)", @@ -483,11 +492,13 @@ const sidebarSettings = { label: "Schema", link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-schema-environment" }, items: [ + "docs/dbt-cloud-apis/discovery-schema-environment", { type: "category", label: "Job", link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-schema-job" }, items: [ + "docs/dbt-cloud-apis/discovery-schema-job", "docs/dbt-cloud-apis/discovery-schema-job-model", "docs/dbt-cloud-apis/discovery-schema-job-models", "docs/dbt-cloud-apis/discovery-schema-job-metric", @@ -507,11 +518,6 @@ const sidebarSettings = { ], }, { - type: "category", - label: "Environment", - link: { type: "doc", id: "docs/dbt-cloud-apis/discovery-schema-environment" }, - items: [ - { type: "category", label: "Applied", items: [ @@ -525,9 +531,7 @@ const sidebarSettings = { // items: [ // // insert pages here // ], - // }, - ], - }, + // }, ], }, ], @@ -537,6 +541,7 @@ const sidebarSettings = { label: "Semantic Layer APIs", link: { type: "doc", id: "docs/dbt-cloud-apis/sl-api-overview" }, items: [ + "docs/dbt-cloud-apis/sl-api-overview", "docs/dbt-cloud-apis/sl-jdbc", "docs/dbt-cloud-apis/sl-graphql", "docs/dbt-cloud-apis/sl-manifest", From e51b0ac1993af79c7ac80e8d1ef2e944b769f995 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 5 Oct 2023 15:43:52 -0400 Subject: [PATCH 10/43] Link fixes --- website/docs/docs/about-setup.md | 2 +- website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md | 2 +- website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md | 2 +- website/docs/docs/deploy/job-scheduler.md | 2 +- website/docs/docs/introduction.md | 2 +- .../docs/docs/running-a-dbt-project/run-your-dbt-projects.md | 2 +- website/docs/reference/references-overview.md | 2 ++ 7 files changed, 8 insertions(+), 6 deletions(-) diff --git a/website/docs/docs/about-setup.md b/website/docs/docs/about-setup.md index b5ade88eae4..e3042db3309 100644 --- a/website/docs/docs/about-setup.md +++ b/website/docs/docs/about-setup.md @@ -13,7 +13,7 @@ dbt compiles and runs your analytics code against your data platform, enabling y **dbt Core** is an open-source command line tool that can be installed locally in your environment, and communication with databases is facilitated through adapters. -If you're not sure which is the right solution for you, read our [What is dbt?](/docs/introduction) and our [dbt Cloud features](/docs/cloud/about-cloud/about-dbt-cloud) articles to help you decide. If you still have questions, don't hesitate to [contact us](https://www.getdbt.com/contact/). +If you're not sure which is the right solution for you, read our [What is dbt?](/docs/introduction) and our [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features) articles to help you decide. If you still have questions, don't hesitate to [contact us](https://www.getdbt.com/contact/). To begin configuring dbt now, select the option that is right for you. diff --git a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md index 7126361cb6b..ab9c3b9f956 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-tips.md @@ -60,5 +60,5 @@ There are default keyboard shortcuts that can help make development more product ## Related docs - [Quickstart guide](/quickstarts) -- [About dbt Cloud](/docs/cloud/about-cloud/about-dbt-cloud) +- [About dbt Cloud](/docs/cloud/about-cloud/dbt-cloud-features) - [Develop in the Cloud](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) diff --git a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md index 400f988b72d..f0c011659f9 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md @@ -134,7 +134,7 @@ The dbt Cloud IDE makes it possible to [build and view](/docs/collaborate/build- ## Related docs -- [dbt Cloud features](/docs/cloud/about-cloud/about-dbt-cloud) +- [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features) - [IDE user interface](/docs/cloud/dbt-cloud-ide/ide-user-interface) - [Version control basics](/docs/collaborate/git/version-control-basics) - [dbt Commands](/reference/dbt-commands) diff --git a/website/docs/docs/deploy/job-scheduler.md b/website/docs/docs/deploy/job-scheduler.md index 8ade670f1cc..fba76f677a7 100644 --- a/website/docs/docs/deploy/job-scheduler.md +++ b/website/docs/docs/deploy/job-scheduler.md @@ -85,7 +85,7 @@ The scheduler prevents queue clog by canceling runs that aren't needed, ensuring To prevent over-scheduling, users will need to take action by either refactoring the job so it runs faster or modifying its [schedule](/docs/deploy/deploy-jobs#schedule-days). ## Related docs -- [dbt Cloud architecture](/docs/cloud/about-cloud/architecture#about-dbt-cloud-architecture) +- [dbt Cloud architecture](/docs/cloud/about-cloud/architecture#dbt-cloud-features-architecture) - [Job commands](/docs/deploy/job-commands) - [Job notifications](/docs/deploy/job-notifications) - [Webhooks](/docs/deploy/webhooks) diff --git a/website/docs/docs/introduction.md b/website/docs/docs/introduction.md index 1852cca90d5..8604146b5f4 100644 --- a/website/docs/docs/introduction.md +++ b/website/docs/docs/introduction.md @@ -37,7 +37,7 @@ You can learn about plans and pricing on [www.getdbt.com](https://www.getdbt.com ### dbt Cloud -dbt Cloud is the fastest and most reliable way to deploy dbt. Develop, test, schedule, and investigate data models all in one web-based UI. Learn more about [dbt Cloud features](/docs/cloud/about-cloud/about-dbt-cloud) and try one of the [dbt Cloud quickstarts](/quickstarts). +dbt Cloud is the fastest and most reliable way to deploy dbt. Develop, test, schedule, and investigate data models all in one web-based UI. Learn more about [dbt Cloud features](/docs/cloud/about-cloud/dbt-cloud-features) and try one of the [dbt Cloud quickstarts](/quickstarts). ### dbt Core diff --git a/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md b/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md index fe4a748015a..740fe5cc3fd 100644 --- a/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md +++ b/website/docs/docs/running-a-dbt-project/run-your-dbt-projects.md @@ -3,7 +3,7 @@ title: "Run your dbt projects" id: "run-your-dbt-projects" pagination_prev: null --- -You can run your dbt projects with [dbt Cloud](/docs/cloud/about-cloud/about-dbt-cloud) and [dbt Core](https://github.com/dbt-labs/dbt-core). dbt Cloud is a hosted application where you can develop directly from a web browser. dbt Core is an open source project where you can develop from the command line. +You can run your dbt projects with [dbt Cloud](/docs/cloud/about-cloud/dbt-cloud-features) and [dbt Core](https://github.com/dbt-labs/dbt-core). dbt Cloud is a hosted application where you can develop directly from a web browser. dbt Core is an open source project where you can develop from the command line. Among other features, dbt Cloud provides a development environment to help you build, test, run, and [version control](/docs/collaborate/git-version-control) your project faster. It also includes an easier way to share your [dbt project's documentation](/docs/collaborate/build-and-view-your-docs) with your team. These development tasks are directly built into dbt Cloud for an _integrated development environment_ (IDE). Refer to [Develop in the Cloud](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) for more details. diff --git a/website/docs/reference/references-overview.md b/website/docs/reference/references-overview.md index 16afd01607c..85a374c5aa3 100644 --- a/website/docs/reference/references-overview.md +++ b/website/docs/reference/references-overview.md @@ -4,6 +4,8 @@ id: "references-overview" sidebar_label: "About References" description: "Connect dbt to any data platform in dbt Cloud or dbt Core, using a dedicated adapter plugin" hide_table_of_contents: true +pagination_next: null +pagination_prev: null --- The References section contains reference materials for developing with dbt, which includes dbt Cloud and dbt Core. From 35395144c769f40bc9c092c40e913a4fd68969c5 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 12 Oct 2023 16:45:58 -0400 Subject: [PATCH 11/43] Update website/docs/docs/cloud/manage-access/about-access.md --- website/docs/docs/cloud/manage-access/about-access.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/cloud/manage-access/about-access.md b/website/docs/docs/cloud/manage-access/about-access.md index e7c2e24312d..d394c79baa3 100644 --- a/website/docs/docs/cloud/manage-access/about-access.md +++ b/website/docs/docs/cloud/manage-access/about-access.md @@ -2,7 +2,7 @@ title: "About user access in dbt Cloud" description: "Learn how dbt Cloud administrators can use dbt Cloud's permissioning model to control user-level access in a dbt Cloud account." id: "about-user-access" -pagination_next: "docs/cloud/manage-access/audit-log" +pagination_next: "docs/cloud/manage-access/seats-and-users" pagination_prev: null --- From a3c37cdeb35c24270208df7da384c23d55710c36 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 12 Oct 2023 16:49:22 -0400 Subject: [PATCH 12/43] Delete website/static/_redirects --- website/static/_redirects | 880 -------------------------------------- 1 file changed, 880 deletions(-) delete mode 100644 website/static/_redirects diff --git a/website/static/_redirects b/website/static/_redirects deleted file mode 100644 index 426d2f8d13f..00000000000 --- a/website/static/_redirects +++ /dev/null @@ -1,880 +0,0 @@ - -## refocus deploy page -/docs/collaborate/environments/environments-in-dbt /docs/environments-in-dbt 301 -/docs/collaborate/environments/dbt-cloud-environments /docs/deploy/dbt-cloud-environments 301 -/docs/collaborate/environments/dbt-core-environments /docs/core/dbt-core-environments 301 - -/docs/cloud/manage-access/licenses-and-groups /docs/cloud/manage-access/about-user-access 301 - -/docs/deploy/cloud-ci-job /docs/deploy/continuous-integration 301 -## Breadcrumb name changes - -/docs/cloud/about-cloud/dbt-cloud-features /docs/cloud/about-cloud/about-dbt-cloud 301 - -## quickstarts redirect again - -/docs/quickstarts/dbt-cloud/bigquery /quickstarts/bigquery 301 -/docs/quickstarts/dbt-cloud/databricks /quickstarts/databricks 301 -/docs/quickstarts/dbt-cloud/redshift /quickstarts/redshift 301 -/docs/quickstarts/dbt-cloud/snowflake /quickstarts/snowflake 301 -/docs/quickstarts/dbt-cloud/starburst-galaxy /quickstarts/starburst-galaxy 301 -/docs/quickstarts/dbt-core/codespace /quickstarts/codespace 301 -/docs/quickstarts/dbt-core/manual-install /quickstarts/manual-install 301 - -/docs/deploy/project-state /reference/node-selection/syntax 301 -/reference/global-configs /reference/global-configs/about-global-configs 301 - -/docs/quickstarts/overview /quickstarts 301 - -## supported data platform - -/docs/supported-data-platforms#verified-adapters /docs/supported-data-platforms 301 -/docs/supported-data-platforms#community-adapters /docs/community-adapters 301 -/docs/supported-data-platforms#adapter-installation /docs/connect-adapters 301 -/docs/supported-data-platforms#adapter-taxonomy /docs/supported-data-platforms 301 -/docs/supported-data-platforms#verified-by-dbt-labs /docs/supported-data-platforms 301 -/docs/supported-data-platforms#maintainers /docs/connect-adapters#maintainers 301 -/docs/supported-data-platforms#contributing-to-dbt-core-adapters /docs/contribute-core-adapters 301 -/docs/supported-data-platforms#contributing-to-a-pre-existing-adapter /docs/contribute-core-adapters#contribute-to-a-pre-existing-adapter 301 -/docs/supported-data-platforms#creating-a-new-adapter /docs/contribute-core-adapters#create-a-new-adapter 301 - -## dbt core setup changes - -/docs/core/connection-profiles /docs/core/connect-data-platform/connection-profiles 301 -/reference/warehouse-setups/bigquery-setup /docs/core/connect-data-platform/bigquery-setup 301 -/reference/warehouse-setups/postgres-setup /docs/core/connect-data-platform/postgres-setup 301 -/reference/warehouse-setups/redshift-setup /docs/core/connect-data-platform/redshift-setup 301 -/reference/warehouse-setups/snowflake-setup /docs/core/connect-data-platform/snowflake-setup 301 -/reference/warehouse-setups/mssql-setup /docs/core/connect-data-platform/mssql-setup 301 -/reference/warehouse-setups/trino-setup /docs/core/connect-data-platform/trino-setup 301 -/reference/warehouse-setups/singlestore-setup /docs/core/connect-data-platform/singlestore-setup 301 -/reference/warehouse-setups/spark-setup /docs/core/connect-data-platform/spark-setup 301 -/reference/warehouse-setups/databricks-setup /docs/core/connect-data-platform/databricks-setup 301 -/reference/warehouse-setups/hive-setup /docs/core/connect-data-platform/hive-setup 301 -/reference/warehouse-setups/exasol-setup /docs/core/connect-data-platform/exasol-setup 301 -/reference/warehouse-setups/oracle-setup /docs/core/connect-data-platform/oracle-setup 301 -/reference/warehouse-setups/azuresynapse-setup /docs/core/connect-data-platform/azuresynapse-setup 301 -/reference/warehouse-setups/dremio-setup /docs/core/connect-data-platform/dremio-setup 301 -/reference/warehouse-setups/clickhouse-setup /docs/core/connect-data-platform/clickhouse-setup 301 -/reference/warehouse-setups/materialize-setup /docs/core/connect-data-platform/materialize-setup 301 -/reference/warehouse-setups/rockset-setup /docs/core/connect-data-platform/rockset-setup 301 -/reference/warehouse-setups/firebolt-setup /docs/core/connect-data-platform/firebolt-setup 301 -/reference/warehouse-setups/teradata-setup /docs/core/connect-data-platform/teradata-setup 301 -/reference/warehouse-setups/athena-setup /docs/core/connect-data-platform/athena-setup 301 -/reference/warehouse-setups/vertica-setup /docs/core/connect-data-platform/vertica-setup 301 -/reference/warehouse-setups/tidb-setup /docs/core/connect-data-platform/tidb-setup 301 -/reference/warehouse-setups/glue-setup /docs/core/connect-data-platform/glue-setup 301 -/reference/warehouse-setups/mindsdb-setup /docs/core/connect-data-platform/mindsdb-setup 301 -/reference/warehouse-setups/greenplum-setup /docs/core/connect-data-platform/greenplum-setup 301 -/reference/warehouse-setups/impala-setup /docs/core/connect-data-platform/impala-setup 301 -/reference/warehouse-setups/layer-setup /docs/core/connect-data-platform/layer-setup 301 -/reference/warehouse-setups/iomete-setup /docs/core/connect-data-platform/iomete-setup 301 -/reference/warehouse-setups/duckdb-setup /docs/core/connect-data-platform/duckdb-setup 301 -/reference/warehouse-setups/sqlite-setup /docs/core/connect-data-platform/sqlite-setup 301 -/reference/warehouse-setups/mysql-setup /docs/core/connect-data-platform/mysql-setup 301 -/reference/warehouse-setups/ibmdb2-setup /docs/core/connect-data-platform/ibmdb2-setup 301 -/reference/warehouse-setups/alloydb-setup /docs/core/connect-data-platform/alloydb-setup 301 -/reference/warehouse-setups/doris-setup /docs/core/connect-data-platform/doris-setup 301 -/reference/warehouse-setups/infer-setup /docs/core/connect-data-platform/infer-setup 301 -/reference/warehouse-setups/databend-setup /docs/core/connect-data-platform/databend-setup 301 -/reference/warehouse-setups/fal-setup /docs/core/connect-data-platform/fal-setup 301 -/reference/warehouse-setups/decodable-setup /docs/core/connect-data-platform/decodable-setup - -# Discovery redirect -/docs/dbt-cloud-apis/metadata-schema-source /docs/dbt-cloud-apis/discovery-schema-source 301 -/docs/dbt-cloud-apis/metadata-schema-sources /docs/dbt-cloud-apis/discovery-schema-sources 301 -/docs/dbt-cloud-apis/metadata-schema-test /docs/dbt-cloud-apis/discovery-schema-test 301 -/docs/dbt-cloud-apis/metadata-schema-tests /docs/dbt-cloud-apis/discovery-schema-tests 301 -/docs/dbt-cloud-apis/metadata-schema-seed /docs/dbt-cloud-apis/discovery-schema-seed 301 -/docs/dbt-cloud-apis/metadata-schema-seeds /docs/dbt-cloud-apis/discovery-schema-seeds 301 -/docs/dbt-cloud-apis/metadata-schema-snapshots /docs/dbt-cloud-apis/discovery-schema-snapshots 301 -/docs/dbt-cloud-apis/metadata-schema-model /docs/dbt-cloud-apis/discovery-schema-model 301 -/docs/dbt-cloud-apis/metadata-schema-models /docs/dbt-cloud-apis/discovery-schema-models 301 -/docs/dbt-cloud-apis/metadata-schema-modelByEnv /docs/dbt-cloud-apis/discovery-schema-modelByEnv 301 -/docs/dbt-cloud-apis/metadata-schema-metrics /docs/dbt-cloud-apis/discovery-schema-metrics 301 -/docs/dbt-cloud-apis/metadata-schema-metric /docs/dbt-cloud-apis/discovery-schema-metric 301 -/docs/dbt-cloud-apis/metadata-schema-exposures /docs/dbt-cloud-apis/discovery-schema-exposures 301 -/docs/dbt-cloud-apis/metadata-schema-exposure /docs/dbt-cloud-apis/discovery-schema-exposure 301 -/docs/dbt-cloud-apis/metadata-use-case-guides /docs/dbt-cloud-apis/discovery-use-cases-and-examples 301 -/docs/dbt-cloud-apis/metadata-api /docs/dbt-cloud-apis/discovery-api 301 -/docs/dbt-cloud-apis/metadata-querying /docs/dbt-cloud-apis/discovery-querying 301 - - -/docs/core/connection-profiles#understanding-threads /docs/running-a-dbt-project/using-threads 301 - -# PrivateLink to Secure redirects -/docs/cloud/privatelink/about-privatelink /docs/cloud/secure/about-privatelink 301 -/docs/cloud/privatelink/snowflake-privatelink /docs/cloud/secure/about-privatelink 301 -/docs/cloud/privatelink/redshift-privatelink /docs/cloud/secure/about-privatelink 301 -/docs/cloud/privatelink/databricks-privatelink /docs/cloud/secure/about-privatelink 301 -/docs/cloud/privatelink/ip-restrictions /docs/cloud/secure/about-privatelink 301 - -/docs/deploy/dbt-cloud-job#create-and-schedule-jobs /docs/deploy/dbt-cloud-job#create-and-schedule-jobs 301 -/docs/cloud/dbt-cloud-tips /docs/cloud/dbt-cloud-ide/dbt-cloud-tips 301 -/docs/cloud/develop-in-the-cloud /docs/cloud/dbt-cloud-ide/develop-in-the-cloud 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab /docs/deploy/dbt-cloud-job#model-timing 301 - -/docs/quickstarts/dbt-core/quickstart /quickstarts/manual-install 301 -docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab /docs/deploy/dbt-cloud-job#model-timing 301 - -/docs/dbt-versions/release-notes/January-2022/model-timing-more /docs/deploy/dbt-cloud-job#model-timing 301 -/docs/deploy/deployments#dbt-cloud /docs/deploy/dbt-cloud-job 301 -/docs/deploy/deployments#airflow /docs/deploy/deployment-tools 301 -/docs/deploy/deployments#prefect /docs/deploy/deployment-tools 301 -/docs/deploy/deployments#run-dbt-in-production /docs/deploy/deployments 301 -/docs/deploy/deployments#on-prefect-2 /docs/deploy/deployment-tools 301 -/docs/deploy/deployments#on-prefect-1 /docs/deploy/deployment-tools 301 -/docs/deploy/deployments#dagster /docs/deploy/deployment-tools 301 -/docs/deploy/deployments#automation-servers /docs/deploy/deployment-tools 301 -/docs/deploy/deployments#cron /docs/deploy/deployment-tools 301 - -# New Cloud directory redirects -/docs/collaborate/manage-access/enterprise-permissions#permission-sets /docs/cloud/manage-access/enterprise-permissions#permission-sets 301 -/docs/get-started/privatelink/about-privatelink /docs/cloud/privatelink/about-privatelink 301 -/docs/get-started/privatelink/snowflake-privatelink /docs/cloud/privatelink/snowflake-privatelink 301 -/docs/get-started/privatelink/redshift-privatelink /docs/cloud/privatelink/redshift-privatelink 301 -/docs/get-started/privatelink/databricks-privatelink /docs/cloud/privatelink/databricks-privatelink 301 -/docs/get-started/dbt-cloud-features /docs/cloud/about-cloud/dbt-cloud-features 301 -/docs/deploy/regions-ip-addresses /docs/cloud/about-cloud/regions-ip-addresses 301 -/docs/deploy/architecture /docs/cloud/about-cloud/architecture 301 -/docs/deploy/single-tenant /docs/cloud/about-cloud/tenancy 301 -/docs/deploy/multi-tenant /docs/cloud/about-cloud/tenancy 301 -/docs/cloud/manage-access/about-access 301 /docs/cloud/manage-access/about-user-access 301 -/docs/collaborate/git/connect-github /docs/cloud/git/connect-github 301 -/docs/collaborate/git/connect-gitlab /docs/cloud/git/connect-gitlab 301 -/docs/collaborate/git/connect-azure-devops /docs/cloud/git/connect-azure-devops 301 -/docs/collaborate/git/setup-azure /docs/cloud/git/setup-azure 301 -/docs/collaborate/git/authenticate-azure /docs/cloud/git/authenticate-azure 301 -/docs/collaborate/git/import-a-project-by-git-url /docs/cloud/git/import-a-project-by-git-url 301 -/docs/collaborate/publish/about-publishing-models /docs/collaborate/govern/about-model-governance 301 -/docs/collaborate/publish/model-contracts /docs/collaborate/govern/model-contracts 301 -/docs/collaborate/publish/model-access /docs/collaborate/govern/model-access 301 -/docs/collaborate/publish/model-versions /docs/collaborate/govern/model-versions 301 -/docs/collaborate/manage-access/about-access /docs/cloud/manage-access/about-user-access 301 -/docs/collaborate/manage-access/seats-and-users /docs/cloud/manage-access/seats-and-users 301 -/docs/collaborate/manage-access/self-service-permissions /docs/cloud/manage-access/self-service-permissions 301 -/docs/collaborate/manage-access/enterprise-permissions /docs/cloud/manage-access/enterprise-permissions 301 -/docs/collaborate/manage-access/sso-overview /docs/cloud/manage-access/sso-overview 301 -/docs/collaborate/manage-access/set-up-sso-saml-2.0 /docs/cloud/manage-access/set-up-sso-saml-2.0 301 -/docs/collaborate/manage-access/set-up-sso-okta /docs/cloud/manage-access/set-up-sso-okta 301 -/docs/collaborate/manage-access/set-up-sso-google-workspace /docs/cloud/manage-access/set-up-sso-google-workspace 301 -/docs/collaborate/manage-access/set-up-sso-azure-active-directory /docs/cloud/manage-access/set-up-sso-azure-active-directory 301 -/docs/collaborate/manage-access/set-up-snowflake-oauth /docs/cloud/manage-access/set-up-snowflake-oauth 301 -/docs/collaborate/manage-access/set-up-bigquery-oauth /docs/cloud/manage-access/set-up-bigquery-oauth 301 -/docs/collaborate/manage-access/audit-log /docs/cloud/manage-access/audit-log 301 -/docs/get-started/develop-in-the-cloud /docs/cloud/develop-in-the-cloud 301 -/docs/get-started/dbt-cloud-tips /docs/cloud/dbt-cloud-tips 301 -/docs/get-started/installation /docs/core/installation 301 -/docs/get-started/about-the-cli /docs/core/about-the-cli 301 -/docs/get-started/homebrew-install /docs/core/homebrew-install 301 -/docs/get-started/pip-install /docs/core/pip-install 301 -/docs/get-started/docker-install /docs/core/docker-install 301 -/docs/get-started/source-install /docs/core/source-install 301 -/docs/get-started/connection-profiles /docs/core/connection-profiles 301 -/docs/get-started/run-your-dbt-projects /docs/running-a-dbt-project/run-your-dbt-projects 301 -/docs/get-started/learning-more/refactoring-legacy-sql /guides/migration/tools/refactoring-legacy-sql 301 -/docs/get-started/learning-more/using-jinja /guides/advanced/using-jinja 301 - -# Quickstart redirects - -/docs/get-started/getting-started/set-up-dbt-cloud /quickstarts 301 -/docs/get-started/getting-started/getting-set-up/setting-up-snowflake /docs/quickstarts/dbt-cloud/snowflake 301 -/docs/get-started/getting-started/getting-set-up/setting-up-redshift /docs/quickstarts/dbt-cloud/redshift 301 -/docs/get-started/getting-started/getting-set-up/setting-up-databricks /quickstarts/databricks 301 -/docs/get-started/getting-started/getting-set-up/setting-up-bigquery /docs/quickstarts/dbt-cloud/bigquery 301 -/docs/get-started/getting-started/getting-set-up/setting-up-databricks /quickstarts/databricks 301 -/docs/get-started/getting-started/getting-set-up/setting-up-redshift /docs/quickstarts/dbt-cloud/redshift 301 -/docs/get-started/getting-started/getting-set-up/setting-up-snowflake /docs/quickstarts/dbt-cloud/snowflake 301 -/docs/get-started/getting-started/building-your-first-project/schedule-a-job /quickstarts/bigquery 301 -/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project /docs/quickstarts/dbt-cloud/bigquery#add-tests-to-your-models 301 -/docs/get-started/getting-started/building-your-first-project/build-your-first-models /quickstarts/bigquery?step=8 301 -/docs/get-started/getting-started/overview /quickstarts 301 -/docs/get-started/getting-started-dbt-core /docs/quickstarts/dbt-core/quickstart 301 - -/docs/get-started/develop-in-the-cloud#set-up-environments /docs/get-started/develop-in-the-cloud 301 -/docs/get-started/develop-in-the-cloud#developer-credentials /docs/get-started/develop-in-the-cloud 301 -/docs/getting-started/develop-in-the-cloud#setting-up-developer-credentials /docs/get-started/develop-in-the-cloud 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-redshift-and-postgres /docs/get-started/connect-your-database#connecting-to-postgres-redshift-and-alloydb 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-snowflake /docs/get-started/connect-your-database#connecting-to-snowflake 301 -/docs/get-started/connect-your-database#connecting-to-snowflake /docs/cloud/connect-data-platform/connect-snowflake 301 -/docs/get-started/connect-your-database#connecting-to-postgres-redshift-and-alloydb /cloud/connect-data-platform/connect-redshift-postgresql-alloydb 301 -/docs/cloud/connect-data-platform/connect-your-database /docs/cloud/connect-data-platform/about-connections 301 -/faqs/connecting-to-two-dbs-not-allowed /faqs/warehouse/connecting-to-two-dbs-not-allowed 301 -/docs/dbt-cloud/cloud-ide/ide-beta /docs/get-started/develop-in-the-cloud 301 - -# Adding new path for quickstarts - -/docs/quickstarts/dbt-cloud/bigquery /quickstarts/bigquery 301 -/quickstarts/databricks /quickstarts/databricks 301 -/docs/quickstarts/dbt-cloud/redshift /quickstarts/redshift 301 -/docs/quickstarts/dbt-cloud/snowflake /quickstarts/snowflake 301 -/quickstarts/starburst-galaxy /quickstarts/starburst-galaxy 301 -/quickstarts/codespace /quickstarts/codespace 301 -/quickstarts/manual-install /quickstarts/manual-install 301 - -## dbt cloud feature page changes - -/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab /docs/get-started/dbt-cloud-features#model-timing-dashboard 301 -/docs/dbt-cloud /docs/get-started/getting-started/set-up-dbt-cloud -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version /docs/dbt-versions/upgrade-core-in-cloud 301 -/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/get-started/develop-in-the-cloud 301 -/docs/dbt-cloud/cloud-overview /docs/get-started/getting-started/set-up-dbt-cloud 301 -/docs/dbt-cloud/using-dbt-cloud/artifacts /docs/deploy/artifacts 301 - -/docs/building-a-dbt-project/building-models/python-models /docs/build/python-models 301 -/docs/deploy/regions /docs/deploy/regions-ip-addresses 301 - -## adapter redirects using diff formats - -/advanced/adapter-development/1-what-are-adapters /guides/dbt-ecosystem/adapter-development/1-what-are-adapters 301! -/advanced/adapter-development/2-prerequisites-for-a-new-adapter /guides/dbt-ecosystem/adapter-development/2-prerequisites-for-a-new-adapter 301! -/advanced/adapter-development/3-building-a-new-adapter /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301! -/advanced/adapter-development/4-testing-a-new-adapter /guides/dbt-ecosystem/adapter-development/4-testing-a-new-adapter 301! -/advanced/adapter-development/5-documenting-a-new-adapter /guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter 301! -/advanced/adapter-development/6-promoting-a-new-adapter /guides/dbt-ecosystem/adapter-development/6-promoting-a-new-adapter 301! -/advanced/adapter-development/7-verifying-a-new-adapter /guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter 301! -/guides/advanced/adapter-development/1-what-are-adapters /guides/dbt-ecosystem/adapter-development/1-what-are-adapters 301! -/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter /guides/dbt-ecosystem/adapter-development/2-prerequisites-for-a-new-adapter 301! -/guides/advanced/adapter-development/3-building-a-new-adapter /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301! -/guides/advanced/adapter-development/4-testing-a-new-adapter /guides/dbt-ecosystem/adapter-development/4-testing-a-new-adapter 301! -/guides/advanced/adapter-development/5-documenting-a-new-adapter /guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter 301! -/guides/advanced/adapter-development/6-promoting-a-new-adapter /guides/dbt-ecosystem/adapter-development/6-promoting-a-new-adapter 301! -/guides/advanced/adapter-development/7-verifying-a-new-adapter /guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter 301! - -/guides/legacy/debugging-errors /guides/best-practices/debugging-errors 301! -/guides/legacy/writing-custom-generic-tests /guides/best-practices/writing-custom-generic-tests 301! -/guides/legacy/creating-new-materializations /guides/advanced/creating-new-materializations 301! - -# add new redirects to the top because they will override later ones - -# getting started guide - -/guides/getting-started /docs/get-started/getting-started/overview 301 -/docs/get-started/getting-started/building-your-first-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301 -/docs/get-started/getting-started/create-a-project /docs/get-started/getting-started/set-up-dbt-cloud 301 -/guides/getting-started/building-your-first-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301 - -/guides/getting-started/building-your-first-project/build-your-first-models /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301 -/guides/getting-started/building-your-first-project/schedule-a-job /docs/get-started/getting-started/building-your-first-project/schedule-a-job 301 -/guides/getting-started/building-your-first-project/test-and-document-your-project /docs/get-started/getting-started/building-your-first-project/test-and-document-your-project 301 -/guides/getting-started/create-a-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models301 -/guides/getting-started/getting-set-up /docs/get-started/getting-started/set-up-dbt-cloud 301 -/guides/getting-started/getting-set-up/setting-up-bigquery /docs/get-started/getting-started/getting-set-up/setting-up-bigquery 301 -/guides/getting-started/getting-set-up/setting-up-databricks /docs/get-started/getting-started/getting-set-up/setting-up-databricks 301 -/guides/getting-started/getting-set-up/setting-up-redshift /docs/get-started/getting-started/getting-set-up/setting-up-redshift 301 -/guides/getting-started/getting-set-up/setting-up-snowflake /docs/get-started/getting-started/getting-set-up/setting-up-snowflake 301 -/guides/getting-started/getting-started /docs/get-started/getting-started/set-up-dbt-cloud 301 -/guides/getting-started/learning-more /docs/get-started/getting-started-dbt-core 301 -/guides/getting-started/learning-more/getting-started-dbt-core /docs/get-started/getting-started-dbt-core 301 -/guides/getting-started/learning-more/refactoring-legacy-sql /docs/get-started/learning-more/refactoring-legacy-sql 301 -/guides/getting-started/learning-more/using-jinja /docs/get-started/learning-more/using-jinja 301 -/docs/dbt-cloud/cloud-quickstart /docs/get-started/getting-started/set-up-dbt-cloud 301 -/docs/cloud-quickstart /docs/dbt-cloud/cloud-quickstart 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud /docs/get-started/getting-started/set-up-dbt-cloud 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database /docs/cloud/connect-data-platform/about-connections 301 -/docs/get-started/connect-your-database /docs/cloud/connect-data-platform/about-connections 301 -/docs/running-a-dbt-project/profile /docs/get-started/connection-profiles 301 - -# other redirects - -/guides/best-practices/materializations/guides/best-practices/materializations/1-overview /guides/best-practices/materializations/1-guide-overview - -/docs/deploy/understanding-state /docs/deploy/about-state 301! -/guides/legacy/understanding-state /docs/deploy/about-state 301! -/guides/migration/versions/Older%20versions/understanding-state /docs/deploy/about-state 301! - -/docs/collaborate/git/resolve-merge-conflicts /docs/collaborate/git/merge-conflicts 301 -/docs/collaborate/environments /docs/collaborate/environments/environments-in-dbt 301 -/docs/running-a-dbt-project/running-dbt-in-production /docs/deploy/deployments 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/deploy/job-notifications 301 -/docs/dbt-cloud/using-dbt-cloud /docs/develop/develop-in-the-cloud 301 -/docs/dbt-cloud/january-2020-pricing-updates https://www.getdbt.com/pricing/ 301 -/docs/dbt-cloud/dbt-cloud-enterprise https://www.getdbt.com/pricing/ 301 -/docs/building-a-dbt-project/archival /docs/build/snapshots 301 -/docs/about/license /community/resources/contributor-license-agreements 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository /docs/collaborate/git/managed-repository 301 -/docs/dbt-cloud/release-notes /docs/dbt-versions/dbt-cloud-release-notes 301 -/docs/dbt-cloud/dbt-cloud-enterprise/audit-log /docs/collaborate/manage-access/audit-log 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth /docs/collaborate/manage-access/set-up-bigquery-oauth 301 -/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth /docs/collaborate/manage-access/set-up-snowflake-oauth 301 -/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta /docs/collaborate/manage-access/set-up-sso-okta 301 -/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory /docs/collaborate/manage-access/set-up-sso-azure-active-directory 301 -/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite /docs/collaborate/manage-access/set-up-sso-google-workspace 301 -/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0 /docs/collaborate/manage-access/set-up-sso-saml-2.0 301 -/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview /docs/collaborate/manage-access/sso-overview 301 -/docs/dbt-cloud/access-control/enterprise-permissions /docs/collaborate/manage-access/enterprise-permissions 301 -/docs/dbt-cloud/access-control/self-service-permissions /docs/collaborate/manage-access/self-service-permissions 301 -/docs/dbt-cloud/access-control/cloud-seats-and-users /docs/collaborate/manage-access/seats-and-users 301 -/docs/dbt-cloud/access-control/access-control-overview /docs/collaborate/manage-access/about-access 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation /docs/collaborate/build-and-view-your-docs 301 -/docs/building-a-dbt-project/documentation /docs/collaborate/documentation 301 -/docs/building-a-dbt-project/managing-environments /docs/collaborate/environments/environments-in-dbt 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url /docs/collaborate/git/import-a-project-by-git-url 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure /docs/collaborate/git/authenticate-azure 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure /docs/collaborate/git/setup-azure 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops /docs/collaborate/git/connect-azure-devops 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab /docs/collaborate/git/connect-gitlab 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application /docs/collaborate/git/connect-github 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/setting-up / 301 -/docs/dbt-cloud/cloud-ide/handling-merge-conflicts /docs/collaborate/git/resolve-merge-conflicts 301 -/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/collaborate/cloud-build-and-view-your-docs 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories /docs/collaborate/git/pr-template 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration /docs/deploy/cloud-ci-job 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles /docs/deploy/dashboard-status-tiles 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness /docs/deploy/source-freshness 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-notifications /docs/deploy/job-notifications 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule /docs/deploy/job-triggers 301 -/docs/dbt-cloud/deployments/airgapped-deployment /docs/deploy/airgapped-deployment 301 -/docs/dbt-cloud/deployments/single-tenant-deployment /docs/deploy/single-tenant 301 -/docs/dbt-cloud/deployments/multi-tenant-deployment /docs/deploy/multi-tenant 301 -/docs/dbt-cloud/deployments/deployment-architecture /docs/deploy/architecture 301 -/docs/dbt-cloud/deployments/deployment-overview /docs/deploy/deployments 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name /docs/build/custom-target-names 301 -/docs/building-a-dbt-project/building-models/using-custom-aliases /docs/build/custom-aliases 301 -/docs/building-a-dbt-project/building-models/using-custom-databases /docs/build/custom-databases 301 -/docs/building-a-dbt-project/building-models/using-custom-schemas /docs/build/custom-schemas 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposures /docs/dbt-cloud-apis/metadata-schema-exposures 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposure /docs/dbt-cloud-apis/metadata-schema-exposure 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-tests /docs/dbt-cloud-apis/metadata-schema-tests 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-test /docs/dbt-cloud-apis/metadata-schema-test 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-snapshots /docs/dbt-cloud-apis/metadata-schema-snapshots 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seeds /docs/dbt-cloud-apis/metadata-schema-seeds 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seed /docs/dbt-cloud-apis/metadata-schema-seed 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-sources /docs/dbt-cloud-apis/metadata-schema-sources 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-source /docs/dbt-cloud-apis/metadata-schema-source 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metrics /docs/dbt-cloud-apis/metadata-schema-metrics 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metric /docs/dbt-cloud-apis/metadata-schema-metric 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-modelByEnv /docs/dbt-cloud-apis/metadata-schema-modelByEnv 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-models /docs/dbt-cloud-apis/metadata-schema-models 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model /docs/dbt-cloud-apis/metadata-schema-model 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying /docs/dbt-cloud-apis/metadata-querying 301 -/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview /docs/dbt-cloud-apis/metadata-api 301 -/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api /docs/dbt-cloud-apis/admin-cloud-api 301 -/docs/dbt-cloud/dbt-cloud-api/service-tokens /docs/dbt-cloud-apis/service-tokens 301 -/docs/dbt-cloud/dbt-cloud-api/user-tokens /docs/dbt-cloud-apis/user-tokens 301 -/docs/dbt-cloud/dbt-cloud-api/cloud-apis /docs/dbt-cloud-apis/overview 301 -/docs/building-a-dbt-project/hooks-operations /docs/build/hooks-operations 301 -/docs/building-a-dbt-project/analyses /docs/build/analyses 301 -/docs/building-a-dbt-project/package-management /docs/build/packages 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables /docs/build/environment-variables 301 -/docs/building-a-dbt-project/building-models/using-variables /docs/build/project-variables 301 -/docs/building-a-dbt-project/jinja-macros /docs/build/jinja-macros 301 -/docs/building-a-dbt-project/building-models/configuring-incremental-models /docs/build/incremental-models 301 -/docs/building-a-dbt-project/building-models/materializations /docs/build/materializations 301 -/docs/building-a-dbt-project/tests /docs/build/tests 301 -/docs/building-a-dbt-project/metrics /docs/build/metrics 301 -/docs/building-a-dbt-project/exposures /docs/build/exposures 301 -/docs/building-a-dbt-project/snapshots /docs/build/snapshots 301 -/docs/building-a-dbt-project/seeds /docs/build/seeds 301 -/docs/building-a-dbt-project/building-models /docs/build/sql-models 301 -/docs/building-a-dbt-project/using-sources /docs/build/sources 301 -/docs/building-a-dbt-project/projects /docs/build/projects 301 -/docs/building-a-dbt-project/building-models/python-models /docs/build/python-models 301 -/docs/building-a-dbt-project/macros /docs/guides/building-packages 301 -/docs/building-a-dbt-project/setting-up /docs/guides/building-packages 301 -/docs/building-a-dbt-project/dbt-jinja-functions /docs/guides/building-packages 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions /docs/dbt-versions/upgrade-core-in-cloud 301 -/docs/core-versions /docs/dbt-versions/core 301 -/docs/dbt-cloud/cloud-dbt-cloud-support /docs/dbt-support 301 -/docs/about/viewpoint /community/resources/viewpoint 301 -/docs/viewpoint /community/resources/viewpoint 301 -/dbt-cli/configure-your-profile /docs/get-started/connection-profiles 301 -/docs/running-a-dbt-project/using-the-cli /docs/get-started/about-the-cli 301 -/dbt-cli/install/from-source /docs/get-started/source-install 301 -/dbt-cli/install/docker /docs/get-started/docker-install 301 -/dbt-cli/install/pip /docs/get-started/pip-install 301 -/dbt-cli/install/homebrew /docs/get-started/homebrew-install 301 -/dbt-cli/install/overview /docs/get-started/installation 301 -/docs/dbt-cloud/cloud-ide/the-dbt-ide /docs/get-started/dbt-cloud-features 301 -/useful*components https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/adding-page-components.md 301 -/guides/legacy/managing-environments /docs/building-a-dbt-project/managing-environments 301 -/docs/running-a-dbt-project/dbt-api /docs/introduction 301 -/img/docs/dbt-cloud/dbt-cloud-enterprise/icon.png https://www.getdbt.com/ui/img/dbt-icon.png 301! -/dbt-cli/installation-guides/centos /docs/get-started/installation 301 -/dbt-cli/installation-guides/centos /docs/get-started/installation 301 -/dbt-cli/installation-guides/install-from-source /dbt-cli/install/from-source 301 -/dbt-cli/installation-guides/macos /docs/get-started/installation 301 -/dbt-cli/installation-guides/ubuntu-debian /docs/get-started/installation 301 -/dbt-cli/installation-guides/windows /docs/get-started/installation 301 -/dbt-cli/installation /docs/get-started/installation 301 -/dbt-jinja-functions /reference/dbt-jinja-functions 301 -/docs /docs/introduction 301 -/docs/adapter /docs/writing-code-in-dbt/jinja-context/adapter 301 -/docs/analyses /docs/building-a-dbt-project/analyses 301 -/docs/api-variable /docs/writing-code-in-dbt/api-variable 301 -/docs/archival /docs/building-a-dbt-project/archival 301 -/docs/artifacts /docs/dbt-cloud/using-dbt-cloud/artifacts 301 -/docs/bigquery-configs /reference/resource-configs/bigquery-configs 301 -/reference/resource-properties/docs /reference/resource-configs/docs 301 -/reference/resource-properties/latest-version /reference/resource-properties/latest_version 301 -/docs/building-a-dbt-project/building-models/bigquery-configs /reference/resource-configs/bigquery-configs 301 -/docs/building-a-dbt-project/building-models/configuring-models /reference/model-configs -/docs/building-a-dbt-project/building-models/enable-and-disable-models /reference/resource-configs/enabled 301 -/docs/building-a-dbt-project/building-models/redshift-configs /reference/resource-configs/redshift-configs 301 -/docs/building-a-dbt-project/building-models/snowflake-configs /reference/resource-configs/snowflake-configs 301 -/docs/building-a-dbt-project/building-models/spark-configs /reference/resource-configs/spark-configs 301 -/docs/building-a-dbt-project/building-models/tags /reference/resource-configs/tags 301 -/docs/building-a-dbt-project/building-models/using-sql-headers /reference/resource-configs/sql_header 301 -/docs/building-a-dbt-project/dbt-projects /docs/building-a-dbt-project/projects 301 -/docs/building-a-dbt-project/dbt-projects/configuring-query-comments /reference/project-configs/query-comment 301 -/docs/building-a-dbt-project/dbt-projects/configuring-quoting /reference/project-configs/quoting 301 -/docs/building-a-dbt-project/dbt-projects/creating-a-project /docs/building-a-dbt-project/projects#creating-a-dbt-project 301 -/docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions /reference/project-configs/require-dbt-version 301 -/docs/building-a-dbt-project/dbt-projects/use-an-existing-project /docs/building-a-dbt-project/projects#using-an-existing-project 301 -/docs/building-a-dbt-project/hooks /docs/building-a-dbt-project/hooks-operations 301 -/docs/building-a-dbt-project/testing-and-documentation /docs/building-a-dbt-project/tests 301 -/docs/building-a-dbt-project/testing-and-documentation/documentation /docs/building-a-dbt-project/testing-and-documentation/documentation 301 -/docs/building-a-dbt-project/testing-and-documentation/documentation-website /docs/building-a-dbt-project/testing-and-documentation/documentation 301 -/docs/building-a-dbt-project/testing-and-documentation/schemayml-files /reference/declaring-properties 301 -/docs/building-a-dbt-project/testing-and-documentation/testing /docs/building-a-dbt-project/tests 301 -/docs/building-a-dbt-project/using-operations /docs/building-a-dbt-project/hooks-operations 301 -/docs/building-models /docs/building-a-dbt-project/building-models 301 -/docs/building-packages /guides/legacy/building-packages 301 -/docs/centos /dbt-cli/installation 301 -/docs/clean /reference/commands/clean 301 -/docs/cloud-choosing-a-dbt-version /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 301 -/docs/cloud-configuring-dbt-cloud /docs/dbt-cloud/cloud-configuring-dbt-cloud 301 -/docs/cloud-enabling-continuous-integration-with-github /docs/deploy/cloud-ci-job 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github /docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github/ /docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration 301 -/docs/cloud-generating-documentation /docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation 301 -/docs/cloud-import-a-project-by-git-url /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url 301 -/docs/cloud-installing-the-github-application /docs/cloud/git/connect-github 301 -/docs/cloud-managing-permissions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-managing-permissions 301 -/docs/cloud-overview /docs/dbt-cloud/cloud-overview 301 -/docs/cloud-seats-and-users /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-seats-and-users 301 -/docs/cloud-setting-a-custom-target-name /docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name 301 -/docs/cloud-snapshotting-source-freshness /docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness 301 -/docs/cloud-supported-dbt-versions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 301 -/docs/cloud-using-a-custom-cron-schedule /docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule 301 -/docs/cloud-using-a-managed-repository /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository 301 -/docs/cmd-docs /reference/commands/cmd-docs 301 -/docs/command-line-interface /reference/dbt-commands 301 -/docs/compile /reference/commands/compile 301 -/docs/config /docs/writing-code-in-dbt/jinja-context/config 301 -/docs/configure-your-profile /dbt-cli/configure-your-profile 301 -/docs/configuring-incremental-models /docs/building-a-dbt-project/building-models/configuring-incremental-models 301 -/docs/configuring-models /reference/model-configs 301 -/docs/configuring-query-comments /docs/building-a-dbt-project/dbt-projects/configuring-query-comments 301 -/docs/configuring-quoting /docs/building-a-dbt-project/dbt-projects/configuring-quoting 301 -/docs/configuring-resources-from-the-project-file /docs/building-a-dbt-project/dbt-projects/configuring-resources-from-the-project-file 301 -/docs/connecting-your-database /docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database 301 -/docs/contributor-license-agreements /docs/contributing/contributor-license-agreements 301 -/docs/creating-a-project /docs/building-a-dbt-project/dbt-projects/creating-a-project 301 -/docs/creating-new-materializations /guides/legacy/creating-new-materializations 301 -/docs/creating-date-partitioned-tables /docs/guides/database-specific-guides/creating-date-partitioned-tables 301 -/docs/custom-schema-tests /guides/legacy/writing-custom-generic-tests 301 -/docs/database-specific-guides / 301 -/docs/dbt-api /docs/running-a-dbt-project/dbt-api 301 -/docs/dbt-cloud-enterprise /docs/dbt-cloud/dbt-cloud-enterprise 301 -/docs/dbt-cloud/cloud-configuring-repositories /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories 301 -/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version /docs/dbt-versions/upgrade-core-in-cloud 301 -/docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions/ /docs/dbt-cloud/access-control/enterprise-permissions 301 -/docs/dbt-cloud/on-premises/architecture /dbt-cloud/on-premises/dependencies 301 -/docs/dbt-projects /docs/building-a-dbt-project/dbt-projects 301 -/docs/dbt_projectyml-file /docs/building-a-dbt-project/dbt-projects/dbt_projectyml-file 301 -/docs/debug /reference/commands/debug 301 -/docs/debug-method /docs/writing-code-in-dbt/jinja-context/debug-method 301 -/docs/deps /reference/commands/deps 301 -/docs/doc /docs/writing-code-in-dbt/jinja-context/doc 301 -/docs/documentation /docs/building-a-dbt-project/documentation 301 -/docs/documentation-website /docs/building-a-dbt-project/documentation 301 -/docs/dont-nest-your-curlies /docs/building-a-dbt-project/dont-nest-your-curlies 301 -/docs/enable-and-disable-models /reference/resource-configs/enabled 301 -/docs/enterprise-permissions /docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions 301 -/docs/env_var /docs/writing-code-in-dbt/jinja-context/env_var 301 -/docs/exceptions /docs/writing-code-in-dbt/jinja-context/exceptions 301 -/docs/execute /docs/writing-code-in-dbt/jinja-context/execute 301 -/docs/exit-codes /reference/exit-codes 301 -/docs/flags /docs/writing-code-in-dbt/jinja-context/flags 301 -/docs/fromjson /docs/writing-code-in-dbt/jinja-context/fromjson 301 -/docs/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 301 -/docs/global-cli-flags /reference/global-cli-flags 301 -/docs/graph /docs/writing-code-in-dbt/jinja-context/graph 301 -/docs/guides/building-packages /guides/legacy/building-packages 301 -/docs/guides/creating-new-materializations /guides/legacy/creating-new-materializations 301 -/docs/guides/debugging-errors /guides/legacy/debugging-errors 301 -/docs/guides/debugging-schema-names /guides/legacy/debugging-schema-names 301 -/docs/guides/getting-help /guides/legacy/getting-help 301 -/docs/guides/managing-environments /guides/legacy/managing-environments 301 -/docs/guides/navigating-the-docs /guides/legacy/navigating-the-docs 301 -/docs/guides/understanding-state /guides/legacy/understanding-state 301 -/docs/guides/videos /guides/legacy/videos 301 -/docs/guides/writing-custom-generic-tests /guides/legacy/writing-custom-generic-tests 301 -/docs/guides/writing-custom-schema-tests /guides/legacy/writing-custom-generic-tests 301 -/docs/guides/best-practices#choose-your-materializations-wisely /guides/legacy/best-practices#choose-your-materializations-wisely 301 -/docs/guides/best-practices#version-control-your-dbt-project /guides/legacy/best-practices#version-control-your-dbt-project 301 -/docs/best-practices /guides/legacy/best-practices 301 -/docs/guides/best-practices /guides/best-practices 301 -/docs/hooks /docs/building-a-dbt-project/hooks-operations 301 -/docs/init /reference/commands/init 301 -/docs/install-from-source /dbt-cli/installation 301 -/docs/installation /docs/core/installation 301 -/docs/invocation_id /docs/writing-code-in-dbt/jinja-context/invocation_id 301 -/docs/jinja-context /docs/writing-code-in-dbt/jinja-context 301 -/docs/license /docs/about/license 301 -/docs/list /reference/commands/list 301 -/docs/log /docs/writing-code-in-dbt/jinja-context/log 301 -/docs/macos /dbt-cli/installation 301 -/docs/macros /guides/legacy/building-packages 301 -/docs/maintaining-multiple-environments-with-dbt / 301 -/docs/managing-environments /guides/legacy/managing-environments 301 -/docs/materializations /docs/building-a-dbt-project/building-models/materializations 301 -/docs/model-selection-syntax /reference/node-selection/syntax 301 -/docs/modules /docs/writing-code-in-dbt/jinja-context/modules 301 -/docs/on-run-end-context /docs/writing-code-in-dbt/jinja-context/on-run-end-context 301 -/docs/overview /docs/introduction 301 -/docs/performance-optimization / 301 -/docs/package-management /docs/building-a-dbt-project/package-management 301 -/docs/profile-bigquery /reference/warehouse-profiles/bigquery-profile 301 -/docs/profile-mssql /reference/warehouse-profiles/mssql-profile 301 -/docs/profile-postgres /reference/warehouse-profiles/postgres-profile 301 -/docs/profile-presto /reference/warehouse-profiles/presto-profile 301 -/docs/profile-redshift /reference/warehouse-profiles/redshift-profile 301 -/docs/profile-snowflake /reference/warehouse-profiles/snowflake-profile 301 -/docs/profile-spark /reference/warehouse-profiles/spark-profile 301 -/docs/redshift-configs /reference/resource-configs/redshift-configs 301 -/docs/spark-configs /reference/resource-configs/spark-configs 301 -/docs/redshift-v2 /reference/warehouse-profiles/redshift-profile 301 -/docs/ref /docs/writing-code-in-dbt/jinja-context/ref 301 -/docs/requiring-specific-dbt-versions /docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions 301 -/docs/requiring-dbt-versions / 301 -/docs/return /docs/writing-code-in-dbt/jinja-context/return 301 -/docs/rpc /reference/commands/rpc 301 -/docs/run /reference/commands/run 301 -/docs/run-operation /reference/commands/run-operation 301 -/docs/run_query /docs/writing-code-in-dbt/jinja-context/run_query 301 -/docs/run_started_at /docs/writing-code-in-dbt/jinja-context/run_started_at 301 -/docs/running-a-dbt-project/command-line-interface /reference/dbt-commands 301 -/docs/running-a-dbt-project/command-line-interface/clean /reference/commands/clean 301 -/docs/running-a-dbt-project/command-line-interface/cmd-docs /reference/commands/cmd-docs 301 -/docs/running-a-dbt-project/command-line-interface/compile /reference/commands/compile 301 -/docs/running-a-dbt-project/command-line-interface/debug /reference/commands/debug 301 -/docs/running-a-dbt-project/command-line-interface/deps /reference/commands/deps 301 -/docs/running-a-dbt-project/command-line-interface/exit-codes /reference/exit-codes 301 -/docs/running-a-dbt-project/command-line-interface/global-cli-flags /reference/global-cli-flags 301 -/docs/running-a-dbt-project/command-line-interface/init /reference/commands/init 301 -/docs/running-a-dbt-project/command-line-interface/list /reference/commands/list 301 -/docs/running-a-dbt-project/command-line-interface/model-selection-syntax /reference/model-selection-syntax 301 -/docs/running-a-dbt-project/command-line-interface/rpc /reference/commands/rpc 301 -/docs/running-a-dbt-project/command-line-interface/run /reference/commands/run 301 -/docs/running-a-dbt-project/command-line-interface/run-operation /reference/commands/run-operation 301 -/docs/running-a-dbt-project/command-line-interface/seed /reference/commands/seed 301 -/docs/running-a-dbt-project/command-line-interface/snapshot /reference/commands/snapshot 301 -/docs/running-a-dbt-project/command-line-interface/source /reference/commands/source 301 -/docs/running-a-dbt-project/command-line-interface/test /reference/commands/test 301 -/docs/running-a-dbt-project/command-line-interface/version /reference/global-cli-flags#version 301 -/docs/running-a-dbt-project/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 301 -/docs/running-a-dbt-project/using-the-command-line-interface/centos /dbt-cli/installation-guides/centos 301 -/docs/running-a-dbt-project/using-the-command-line-interface/configure-your-profile /dbt-cli/configure-your-profile 301 -/docs/running-a-dbt-project/using-the-command-line-interface/install-from-source /dbt-cli/installation-guides/install-from-source 301 -/docs/running-a-dbt-project/using-the-command-line-interface/installation /dbt-cli/installation 301 -/docs/running-a-dbt-project/using-the-command-line-interface/macos /dbt-cli/installation-guides/macos 301 -/docs/running-a-dbt-project/using-the-command-line-interface/ubuntu-debian /dbt-cli/installation-guides/ubuntu-debian 301 -/docs/running-a-dbt-project/using-the-command-line-interface/windows /dbt-cli/installation-guides/windows 301 -/docs/running-dbt-in-production /docs/running-a-dbt-project/running-dbt-in-production 301 -/docs/schema /docs/writing-code-in-dbt/jinja-context/schema 301 -/docs/schemas /docs/writing-code-in-dbt/jinja-context/schemas 301 -/docs/schemayml-files /reference/declaring-properties 301 -/docs/seed /reference/commands/seed 301 -/docs/seeds /docs/building-a-dbt-project/seeds 301 -/docs/setting-up-enterprise-sso-with-azure-active-directory /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory 301 -/docs/setting-up-snowflake-sso /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth 301 -/docs/setting-up-sso-with-google-gsuite /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite 301 -/docs/setting-up-sso-with-okta /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta 301 -/docs/snapshot /reference/commands/snapshot 301 -/docs/snapshots /docs/building-a-dbt-project/snapshots 301 -/docs/snowflake-configs /reference/resource-configs/snowflake-configs 301 -/docs/source /reference/commands/source 301 -/docs/statement-blocks /docs/writing-code-in-dbt/jinja-context/statement-blocks 301 -/docs/supported-databases/profile-bigquery /reference/bigquery-profile 301 -/docs/supported-databases/profile-mssql /reference/mssql-profile 301 -/docs/supported-databases/profile-postgres /reference/postgres-profile 301 -/docs/supported-databases/profile-presto /reference/presto-profile 301 -/docs/supported-databases/profile-redshift /reference/redshift-profile 301 -/docs/supported-databases/profile-snowflake /reference/snowflake-profile 301 -/docs/supported-databases/profile-spark /reference/spark-profile 301 -/docs/tags /reference/resource-configs/tags 301 -/docs/target /docs/writing-code-in-dbt/jinja-context/target 301 -/docs/test /reference/commands/test 301 -/docs/testing /docs/building-a-dbt-project/tests 301 -/docs/testing-and-documentation /docs/building-a-dbt-project/tests 301 -/docs/the-dbt-ide /docs/cloud/about-cloud/dbt-cloud-features 301 -/docs/this /docs/writing-code-in-dbt/jinja-context/this 301 -/docs/tojson /docs/writing-code-in-dbt/jinja-context/tojson 301 -/docs/ubuntu-debian /dbt-cli/installation 301 -/docs/use-an-existing-project /docs/building-a-dbt-project/dbt-projects/use-an-existing-project 301 -/docs/using-custom-aliases /docs/building-a-dbt-project/building-models/using-custom-aliases 301 -/docs/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 301 -/docs/using-custom-schemas /docs/building-a-dbt-project/building-models/using-custom-schemas 301 -/docs/using-dbt-cloud /docs/dbt-cloud/using-dbt-cloud 301 -/docs/using-jinja /guides/getting-started/learning-more/using-jinja 301 -/docs/using-operations /docs/building-a-dbt-project/hooks-operations 301 -/docs/using-sources /docs/building-a-dbt-project/using-sources 301 -/docs/using-sql-headers /reference/resource-configs/sql_header 301 -/docs/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 301 -/docs/using-the-dbt-ide /docs/running-a-dbt-project/using-the-dbt-ide 301 -/docs/using-variables /docs/building-a-dbt-project/building-models/using-variables 301 -/docs/var /docs/writing-code-in-dbt/jinja-context/var 301 -/docs/version /reference/global-cli-flags#version 301 -/docs/videos /guides/legacy/videos 301 -/docs/warehouse-specific-configurations / 301 -/docs/windows /dbt-cli/installation 301 -/docs/writing-code-in-dbt/api-variable / 301 -/docs/writing-code-in-dbt/class-reference /reference/dbt-classes 301 -/docs/writing-code-in-dbt/extending-dbts-programming-environment/creating-new-materializations /guides/legacy/creating-new-materializations 301 -/docs/writing-code-in-dbt/extending-dbts-programming-environment/custom-schema-tests /guides/legacy/writing-custom-schema-tests 301 -/docs/writing-code-in-dbt/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 301 -/docs/writing-code-in-dbt/jinja-context/adapter /reference/dbt-jinja-functions/adapter 301 -/docs/writing-code-in-dbt/jinja-context/as_text /reference/dbt-jinja-functions/as_text 301 -/docs/writing-code-in-dbt/jinja-context/builtins /reference/dbt-jinja-functions/builtins 301 -/docs/writing-code-in-dbt/jinja-context/config /reference/dbt-jinja-functions/config 301 -/docs/writing-code-in-dbt/jinja-context/dbt-project-yml-context /reference/dbt-jinja-functions/dbt-project-yml-context 301 -/docs/writing-code-in-dbt/jinja-context/dbt_version /reference/dbt-jinja-functions/dbt_version 301 -/docs/writing-code-in-dbt/jinja-context/debug-method /reference/dbt-jinja-functions/debug-method 301 -/docs/writing-code-in-dbt/jinja-context/doc /reference/dbt-jinja-functions/doc 301 -/docs/writing-code-in-dbt/jinja-context/env_var /reference/dbt-jinja-functions/env_var 301 -/docs/writing-code-in-dbt/jinja-context/exceptions /reference/dbt-jinja-functions/exceptions 301 -/docs/writing-code-in-dbt/jinja-context/execute /reference/dbt-jinja-functions/execute 301 -/docs/writing-code-in-dbt/jinja-context/flags /reference/dbt-jinja-functions/flags 301 -/docs/writing-code-in-dbt/jinja-context/fromjson /reference/dbt-jinja-functions/fromjson 301 -/docs/writing-code-in-dbt/jinja-context/fromyaml /reference/dbt-jinja-functions/fromyaml 301 -/docs/writing-code-in-dbt/jinja-context/graph /reference/dbt-jinja-functions/graph 301 -/docs/writing-code-in-dbt/jinja-context/invocation_id /reference/dbt-jinja-functions/invocation_id 301 -/docs/writing-code-in-dbt/jinja-context/log /reference/dbt-jinja-functions/log 301 -/docs/writing-code-in-dbt/jinja-context/modules /reference/dbt-jinja-functions/modules 301 -/docs/writing-code-in-dbt/jinja-context/on-run-end-context /reference/dbt-jinja-functions/on-run-end-context 301 -/docs/writing-code-in-dbt/jinja-context/profiles-yml-context /reference/dbt-jinja-functions/profiles-yml-context 301 -/docs/writing-code-in-dbt/jinja-context/project_name /reference/dbt-jinja-functions/project_name 301 -/docs/writing-code-in-dbt/jinja-context/ref /reference/dbt-jinja-functions/ref 301 -/docs/writing-code-in-dbt/jinja-context/return /reference/dbt-jinja-functions/return 301 -/docs/writing-code-in-dbt/jinja-context/run_query /reference/dbt-jinja-functions/run_query 301 -/docs/writing-code-in-dbt/jinja-context/run_started_at /reference/dbt-jinja-functions/run_started_at 301 -/docs/writing-code-in-dbt/jinja-context/schema /reference/dbt-jinja-functions/schema 301 -/docs/writing-code-in-dbt/jinja-context/schemas /reference/dbt-jinja-functions/schemas 301 -/docs/writing-code-in-dbt/jinja-context/source /reference/dbt-jinja-functions/source 301 -/docs/writing-code-in-dbt/jinja-context/statement-blocks /reference/dbt-jinja-functions/statement-blocks 301 -/docs/writing-code-in-dbt/jinja-context/target /reference/dbt-jinja-functions/target 301 -/docs/writing-code-in-dbt/jinja-context/this /reference/dbt-jinja-functions/this 301 -/docs/writing-code-in-dbt/jinja-context/tojson /reference/dbt-jinja-functions/tojson 301 -/docs/writing-code-in-dbt/jinja-context/toyaml /reference/dbt-jinja-functions/toyaml 301 -/docs/writing-code-in-dbt/jinja-context/var /reference/dbt-jinja-functions/var 301 -/docs/writing-code-in-dbt/macros /docs/building-a-dbt-project/jinja-macros 301 -/docs/writing-code-in-dbt/using-jinja /guides/getting-started/learning-more/using-jinja 301 -/faqs/getting-help/ /guides/legacy/getting-help 301 -/migration-guide/upgrading-to-0-17-0 /guides/migration/versions 301 -/migration-guide/upgrading-to-0-18-0 /guides/migration/versions 301 -/reference / 301 -/reference/accounts /dbt-cloud/api 301 -/reference/api /dbt-cloud/api 301 -/reference/bigquery-profile /reference/warehouse-profile/bigquery-profile 301 -/reference/connections /dbt-cloud/api 301 -/reference/data-test-configs /reference/test-configs 301 -/reference/declaring-properties /reference/configs-and-properties 301 -/reference/dbt-artifacts /reference/artifacts/dbt-artifacts 301 -/reference/environments /dbt-cloud/api 301 -/reference/events /reference/events-logging 301 -/reference/jobs /dbt-cloud/api 301 -/reference/model-selection-syntax /reference/node-selection/syntax 301 -/reference/project-configs/on-run-end /reference/project-configs/on-run-start-on-run-end 301 -/reference/project-configs/on-run-start /reference/project-configs/on-run-start-on-run-end 301 -/reference/repositories /dbt-cloud/api 301 -/reference/resource-configs/post-hook /reference/resource-configs/pre-hook-post-hook 301 -/reference/resource-configs/pre-hook /reference/resource-configs/pre-hook-post-hook 301 -/reference/resource-properties/tags /reference/resource-configs/tags 301 -/reference/resource-properties/meta /reference/resource-configs/meta 301 -/reference/runs /dbt-cloud/api 301 -/reference/using-the-dbt-cloud-api /dbt-cloud/api 301 -https://tutorial.getdbt.com/* https://docs.getdbt.com/:splat 301! -/reference/model-selection-syntax/#test-selection-examples /reference/node-selection/test-selection-examples 301 -/docs/building-a-dbt-project/building-models/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 301 -/dbt-cloud/api /dbt-cloud/api-v2 301 -/dbt-cloud/api-v2-old /dbt-cloud/api-v2-legacy 301 -/dbt-cloud/api-v4 /docs/dbt-cloud-apis/admin-cloud-api -/reference/project-configs/source-paths /reference/project-configs/model-paths 301 -/reference/project-configs/data-paths /reference/project-configs/seed-paths 301 -/reference/project-configs/modules-paths /reference/project-configs/packages-install-path 301 -/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/dbt-cloud/using-dbt-cloud/cloud-notifications 301 -/reference/warehouse-profiles/presto-profile /reference/profiles.yml 301 -/setting-up /guides/getting-started/getting-set-up/setting-up-bigquery 301 -/tutorial/setting-up /quickstarts 301 -/tutorial/test-and-document-your-project /guides/getting-started/building-your-first-project/test-and-document-your-project 301 -/tutorial/build-your-first-models /guides/getting-started/building-your-first-project/build-your-first-models 301 -/tutorial/deploy-your-project /guides/getting-started/building-your-first-project/schedule-a-job 301 -/tutorial/using-jinja /guides/getting-started/learning-more/using-jinja 301 -/tutorial/2b-create-a-project-dbt-cli /guides/getting-started/learning-more/getting-started-dbt-core 301 -/tutorial/create-a-project-dbt-cli /guides/getting-started/learning-more/getting-started-dbt-core 301 -/tutorial/2a-create-a-project-dbt-cloud /guides/getting-started 301 -/tutorial/create-a-project-dbt-cloud /guides/getting-started 301 -/tutorial/getting-started /guides/getting-started 301 -/docs/dbt-cloud/cloud-changelog /docs/dbt-cloud/release-notes 301 -/faqs/all /docs/faqs 301! -/faqs/_ /docs/faqs/:splat 301 -/faqs/dbt-jinja-functions /reference/dbt-jinja-functions 301 -/tutorial/learning-more/_ /guides/getting-started/learning-more/:splat 301 -/tutorial/getting-set-up/\_ /guides/getting-started/getting-set-up/:splat 301 -/tutorial/building-your-first-project/\* /guides/getting-started/building-your-first-project/:splat 301 -/tutorial/refactoring-legacy-sql /guides/migration/tools/refactoring-legacy-sql 301 -/blog/change-data-capture-metrics /blog/change-data-capture 301 -/blog/intelligent-slim-ci /docs/deploy/continuous-integration 301 -/blog/model-timing-tab /blog/how-we-shaved-90-minutes-off-model 301 -/reference/warehouse-setups/resource-configs/materialize-configs/indexes /reference/resource-configs/materialize-configs#indexes 301 -/docs/build/building-models /docs/build/models 301 -/docs/build/bigquery-profile /reference/resource-configs/bigquery-configs 301 -/reference/warehouse-profiles/bigquery-setup /reference/warehouse-setups/bigquery-setup 301 -/date-trunc-sql /blog/date-trunc-sql 301 -/docs/using-hooks / 301 -/blog/how-we-structure-our-dbt-projects /guides/best-practices/how-we-structure/1-guide-overview 301 - -/data-testing-why-you-need-it-and-how-to-get-started https://www.getdbt.com/blog/data-quality-testing/ 301 - -# supported data platforms page - -/docs/profile /docs/supported-data-platforms 301 -/docs/available-adapters /docs/supported-data-platforms 301 -/docs/supported-databases /docs/supported-data-platforms 301 - -# migration and legacy guides - -/docs/guides/migration-guide/upgrading-to-0-14-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-0-15-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-0-16-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-0-17-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-0-18-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-0-19-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-from-0-10-to-0-11 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-014 /guides/migration/versions 301 -/docs/upgrading-to-014 /guides/migration/versions 301 -/docs/upgrading-to-0-14-1 /guides/migration/versions 301 -/docs/upgrading-to-0-16-0 /guides/migration/versions 301 -/docs/guides/migration-guide/upgrading-to-0-20-0 /guides/migration/versions/upgrading-to-v0.20 301 -/docs/guides/migration-guide/upgrading-to-0-21-0 /guides/migration/versions/upgrading-to-v0.21 301 -/docs/guides/migration-guide/upgrading-to-1-0-0 /guides/migration/versions/upgrading-to-v1.0 301 -/docs/guides/migration-guide/upgrading-to-v1.0 /guides/migration/versions/upgrading-to-v1.0 301 -/docs/guides/getting-help /guides/legacy/getting-help 301 -/docs/guides/migration-guide/_ /guides/migration/versions/:splat 301! -/docs/guides/_ /guides/legacy/:splat 301! - -# adapter development docs - -/docs/contributing/what-are-adapters /guides/advanced/adapter-development/1-what-are-adapters 301 -/docs/contributing/adapter-development/1-what-are-adapters /guides/advanced/adapter-development/1-what-are-adapters 301 -/docs/contributing/prerequisites-for-a-new-adapter /guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter 301 - -/docs/contributing/adapter-development/2-prerequisites-for-a-new-adapter /guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter 301 -/docs/contributing/building-a-new-adapter /guides/advanced/adapter-development/3-building-a-new-adapter 301 - -/docs/contributing/adapter-development/3-building-a-new-adapter /guides/advanced/adapter-development/3-building-a-new-adapter 301 - -/v0.13/docs/building-a-new-adapter /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301 -/docs/building-a-new-adapter /guides/advanced/adapter-development/3-building-a-new-adapter 301 - -/docs/contributing/testing-a-new-adapter /guides/advanced/adapter-development/4-testing-a-new-adapter 301 -/docs/contributing/adapter-development/4-testing-a-new-adapter /guides/advanced/adapter-development/4-testing-a-new-adapter 301 - -/docs/contributing/documenting-a-new-adapter /guides/advanced/adapter-development/5-documenting-a-new-adapter 301 -/docs/contributing/adapter-development/5-documenting-a-new-adapter /guides/advanced/adapter-development/5-documenting-a-new-adapter 301 - -/docs/contributing/promoting-a-new-adapter /guides/advanced/adapter-development/6-promoting-a-new-adapter 301 -/docs/contributing/adapter-development/6-promoting-a-new-adapter /guides/advanced/adapter-development/6-promoting-a-new-adapter 301 - -/docs/contributing/verifying-a-new-adapter /guides/advanced/adapter-development/7-verifying-a-new-adapter 301 -/docs/contributing/adapter-development/7-verifying-a-new-adapter /guides/advanced/adapter-development/7-verifying-a-new-adapter 301 - -/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer /docs/use-dbt-semantic-layer/dbt-semantic-layer 301! -/reference/warehouse-profiles/impala-profile /reference/warehouse-setups/impala-setup 301 -/reference/warehouse-profiles/exasol-profile /reference/warehouse-setups/exasol-setup 301 -/reference/warehouse-profiles/layer-profile /reference/warehouse-setups/layer-setup 301 -/reference/warehouse-profiles/postgres-profile /reference/warehouse-setups/postgres-setup 301 -/reference/warehouse-profiles/greenplum-profile /reference/warehouse-setups/greenplum-setup 301 -/reference/warehouse-profiles/alloydb-profile /reference/warehouse-setups/alloydb-setup 301 -/reference/warehouse-profiles/azuresynapse-profile /reference/warehouse-setups/azuresynapse-setup 301 -/reference/warehouse-profiles/snowflake-profile /reference/warehouse-setups/snowflake-setup 301 -/reference/warehouse-profiles/rockset-profile /reference/warehouse-setups/rockset-setup 301 -/reference/warehouse-profiles/trino-profile /reference/warehouse-setups/trino-setup 301 -/reference/warehouse-profiles/glue-profile /reference/warehouse-setups/glue-setup 301 -/reference/warehouse-profiles/duckdb-profile /reference/warehouse-setups/duckdb-setup 301 -/reference/warehouse-profiles/vertica-profile /reference/warehouse-setups/vertica-setup 301 -/reference/warehouse-profiles/clickhouse-profile /reference/warehouse-setups/clickhouse-setup 301 -/reference/warehouse-profiles/athena-profile /reference/warehouse-setups/athena-setup 301 -/reference/warehouse-profiles/iomete-profile /reference/warehouse-setups/iomete-setup 301 -/reference/warehouse-profiles/mssql-profile /reference/warehouse-setups/mssql-setup 301 -/reference/warehouse-profiles/tidb-profile /reference/warehouse-setups/tidb-setup 301 -/reference/warehouse-profiles/materialize-profile /reference/warehouse-setups/materialize-setup 301 -/reference/warehouse-profiles/redshift-profile /reference/warehouse-setups/redshift-setup 301 -/reference/warehouse-profiles/databricks-profile /reference/warehouse-setups/databricks-setup 301 -/reference/warehouse-profiles/bigquery-profile /reference/warehouse-setups/bigquery-setup 301 -/reference/warehouse-profiles/dremio-profile /reference/warehouse-setups/dremio-setup 301 -/reference/warehouse-profiles/oracle-profile /reference/warehouse-setups/oracle-setup 301 -/reference/warehouse-profiles/teradata-profile /reference/warehouse-setups/teradata-setup 301 -/reference/warehouse-profiles/singlestore-profile /reference/warehouse-setups/singlestore-setup 301 -/reference/warehouse-profiles/sqlite-profile /reference/warehouse-setups/sqlite-setup 301 -/reference/warehouse-profiles/spark-profile /reference/warehouse-setups/spark-setup 301 -/reference/warehouse-profiles/mindsdb-profile /reference/warehouse-setups/mindsdb-setup 301 -/reference/warehouse-profiles/ibmdb2-profile /reference/warehouse-setups/ibmdb2-setup 301 -/reference/warehouse-profiles/firebolt-profile /reference/warehouse-setups/firebolt-setup 301 -/reference/warehouse-profiles/mysql-profile /reference/warehouse-setups/mysql-setup 301 -/reference/warehouse-profiles/hive-profile /reference/warehouse-setups/hive-setup 301 -/reference/using-sources /docs/build/sources 301 - -# ide ia redirects - -/docs/dbt-cloud/cloud-ide/the-dbt-ide /docs/getting-started/dbt-cloud-features 301! -/docs/dbt-cloud/cloud-ide/handling-merge-conflicts /docs/collaborate/git/resolve-merge-conflicts 301! -/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/getting-started/develop-in-the-cloud 301! -/docs/dbt-cloud/cloud-ide/ide-beta /docs/getting-started/develop-in-the-cloud 301! -/docs/running-a-dbt-project/using-the-dbt-ide /docs/getting-started/develop-in-the-cloud 301! -/dbt-cloud/cloud-ide/the-ide-git-button /docs/collaborate/git/version-control-basics 301! -/docs/building-a-dbt-project/setting-up /guides/legacy/building-packages 301! -/docs/building-a-dbt-project/dbt-jinja-functions /reference/dbt-jinja-functions 301! - -# Community docs - -/docs/contributing/long-lived-discussions-guidelines /community/resources/forum-guidelines 301 -/docs/guides/legacy/navigating-the-docs.md /community/contribute 301 -/community/writing-on-discourse/ /community/contributing/contributing-online-community 301 -/community/contributing/ /community/contribute 301 -/docs/contributing/contributor-license-agreements /community/resources/contributor-license-agreements 301 -/community/maintaining-a-channel /community/resources/maintaining-a-channel 301 -/docs/contributing/oss-expectations /community/resources/oss-expectations 301 -/docs/slack-rules-of-the-road /community/resources/community-rules-of-the-road 301 -/docs/contributing/slack-rules-of-the-road /community/resources/community-rules-of-the-road 301 -/community/resources/slack-rules-of-the-road /community/resources/community-rules-of-the-road 301 -/blog/getting-started-with-the-dbt-semantic-layer /blog/understanding-the-components-of-the-dbt-semantic-layer 301! -/docs/getting-started/develop-in-the-cloud#creating-a-development-environment /docs/get-started/develop-in-the-cloud#set-up-and-access-the-cloud-ide 301 -/docs/cloud-developer-ide /docs/build/custom-target-names#dbt-cloud-ide 301 -/website/docs/docs/contributing/building-a-new-adapter.md /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301 -/guides/legacy/getting-help /community/resources/getting-help 301 - -# Blog docs - -/blog/tags/release-notes /docs/dbt-versions/dbt-cloud-release-notes 301 - -# Faq docs - -/faqs/dbt-jinja-functions /reference/dbt-jinja-functions 301 - -/website/docs/docs/contributing/documenting-a-new-adapter.md /guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter 301 - -/docs/docs/contributing/documenting-a-new-adapter /docs/contributing/documenting-a-new-adapter 301 - - -/v0.8/reference / 301 -/v0.10/reference / 301 -/v0.12/reference / 301 -/v0.13/reference / 301 -/v0.13/docs/requiring-dbt-versions / 301 -/v0.14/docs/cloud-developer-ide / 301 -/v0.15/docs/cloud-import-a-project-by-git-url /docs/cloud/git/import-a-project-by-git-url 301 - -/v0.15/docs/configure-your-profile /docs/core/connection-profiles 301 - -# Removing on premise Cloud content -/docs/dbt-cloud/on-premises/dependencies /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/faqs /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/index /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/installation /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/prerequisites /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/setup /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/system-requirements /docs/deploy/single-tenant 301 -/docs/dbt-cloud/on-premises/upgrading-kots /docs/deploy/single-tenant 301 From 3f2f682179f2b9106ebdbf2f08f9da2c27074ca9 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 12 Oct 2023 16:54:55 -0400 Subject: [PATCH 13/43] Update sidebars.js --- website/sidebars.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/website/sidebars.js b/website/sidebars.js index e1db4400661..70d0d4547c1 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -142,6 +142,9 @@ const sidebarSettings = { "docs/cloud/secure/about-privatelink", "docs/cloud/secure/snowflake-privatelink", "docs/cloud/secure/databricks-privatelink", + "docs/cloud/secure/redshift-privatelink", + "docs/cloud/secure/postgres-privatelink", + "docs/cloud/secure/ip-restrictions", ], }, // PrivateLink "docs/cloud/billing", From dc2f6a31eeffb31e4cd7b1b561e49e21ff949ade Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Thu, 12 Oct 2023 16:55:47 -0400 Subject: [PATCH 14/43] Update sidebars.js --- website/sidebars.js | 1 - 1 file changed, 1 deletion(-) diff --git a/website/sidebars.js b/website/sidebars.js index 70d0d4547c1..f8e8d725a92 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -144,7 +144,6 @@ const sidebarSettings = { "docs/cloud/secure/databricks-privatelink", "docs/cloud/secure/redshift-privatelink", "docs/cloud/secure/postgres-privatelink", - "docs/cloud/secure/ip-restrictions", ], }, // PrivateLink "docs/cloud/billing", From 1edef5246c5d185ea5f7b09f48b42ccdc3acbeaf Mon Sep 17 00:00:00 2001 From: mirnawong1 <89008547+mirnawong1@users.noreply.github.com> Date: Fri, 13 Oct 2023 14:30:46 +0100 Subject: [PATCH 15/43] Update project-dependencies.md --- .../govern/project-dependencies.md | 39 ++++++++++++++----- 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md index 7785e428678..13fc76bb743 100644 --- a/website/docs/docs/collaborate/govern/project-dependencies.md +++ b/website/docs/docs/collaborate/govern/project-dependencies.md @@ -5,16 +5,11 @@ sidebar_label: "Project dependencies" description: "Reference public models across dbt projects" --- -:::caution Closed Beta - dbt Cloud Enterprise -"Project" dependencies and cross-project `ref` are features of dbt Cloud Enterprise, currently in Closed Beta. To access these features while they are in beta, please contact your account team at dbt Labs. - -**Prerequisites:** In order to add project dependencies and resolve cross-project `ref`, you must: -- Have the feature enabled (speak to your account team) -- Use dbt v1.6 for **both** the upstream ("producer") project and the downstream ("consumer") project. -- Have a deployment environment in the upstream ("producer") project [that is set to be your production environment](/docs/deploy/deploy-environments#set-as-production-environment-beta) -- Have a successful run of the upstream ("producer") project +:::caution Available in Publib Preview for dbt Cloud Enterprise accounts +"Project" dependencies and cross-project `ref` are features of dbt Cloud Enterprise, currently in [Public Preview](/docs/dbt-versions/product-lifecycles#dbt-cloud). To access these features while they are in beta, please contact your account team at dbt Labs. ::: + For a long time, dbt has supported code reuse and extension by installing other projects as [packages](/docs/build/packages). When you install another project as a package, you are pulling in its full source code, and adding it to your own. This enables you to call macros and run models defined in that other project. While this is a great way to reuse code, share utility macros, and establish a starting point for common transformations, it's not a great way to enable collaboration across teams and at scale, especially at larger organizations. @@ -23,6 +18,30 @@ This year, dbt Labs is introducing an expanded notion of `dependencies` across m - **Packages** — Familiar and pre-existing type of dependency. You take this dependency by installing the package's full source code (like a software library). - **Projects** — A _new_ way to take a dependency on another project. Using a metadata service that runs behind the scenes, dbt Cloud resolves references on-the-fly to public models defined in other projects. You don't need to parse or run those upstream models yourself. Instead, you treat your dependency on those models as an API that returns a dataset. The maintainer of the public model is responsible for guaranteeing its quality and stability. + +## Prerequisites + +In order to add project dependencies and resolve cross-project `ref`, you must: +- Have the feature enabled (speak to your account team) +- Use dbt v1.6 for **both** the upstream ("producer") project and the downstream ("consumer") project. +- Have a deployment environment in the upstream ("producer") project [that is set to be your production environment](/docs/deploy/deploy-environments#set-as-production-environment-beta) +- Have a successful run of the upstream ("producer") project + +### About dependencies.yml + +There are differences between using a `dependencies.yml` compared to a `packages.yml` file: + +- **`dependencies.yml`** + - Primarily designed for dbt Mesh and cross-project reference workflow. + - Supports Projects and non-private dbt Packages (private packages aren't supported yet. Refer to [FAQs](#faqs) for more info). + - Helps maintain your project's organization by allowing you to specify hub packages like `dbt_utils`, reducing the need for multiple YAML files. + - Does not support conditional configuration using Jinja-in-yaml + +- **`packages.yml`** +- Does not contribute to the dbt Mesh workflow. +- Serves as a list of dbt Packages (such as dbt projects) that you want to download into your root or parent dbt project. +- Can only include packages, including private packages (doesn't support Projects) + ## Example As an example, let's say you work on the Marketing team at the Jaffle Shop. The name of your team's project is `jaffle_marketing`: @@ -36,7 +55,7 @@ name: jaffle_marketing As part of your modeling of marketing data, you need to take a dependency on two other projects: -- `dbt_utils` as a [package](#packages-use-case): An collection of utility macros that you can use while writing the SQL for your own models. This package is, open-source public, and maintained by dbt Labs. +- `dbt_utils` as a [package](#packages-use-case): A collection of utility macros that you can use while writing the SQL for your own models. This package is, open-source public, and maintained by dbt Labs. - `jaffle_finance` as a [project use-case](#projects-use-case): Data models about the Jaffle Shop's revenue. This project is private and maintained by your colleagues on the Finance team. You want to select from some of this project's final models, as a starting point for your own work. @@ -99,7 +118,7 @@ There are a few cases where installing another internal project as a package can - Unified deployments — In a production environment, if the central data platform team of Jaffle Shop wanted to schedule the deployment of models across both `jaffle_finance` and `jaffle_marketing`, they could use dbt's [selection syntax](/reference/node-selection/syntax) to create a new "passthrough" project that installed both projects as packages. - Coordinated changes — In development, if you wanted to test the effects of a change to a public model in an upstream project (`jaffle_finance.monthly_revenue`) on a downstream model (`jaffle_marketing.roi_by_channel`) _before_ introducing changes to a staging or production environment, you can install the `jaffle_finance` package as a package within `jaffle_marketing`. The installation can point to a specific git branch, however, if you find yourself frequently needing to perform end-to-end testing across both projects, we recommend you re-examine if this represents a stable interface boundary. -These are the exceptions, rather than the rule. Installing another team's project as a package adds complexity, latency, and risk of unnecessary costs. By defining clear interface boundaries across teams, by serving one team's public models as "APIs" to another, and by enabling practitioners to develop with a more narrowly-defined scope, we can enable more people to contribute, with more confidence, while requiring less context upfront. +These are the exceptions, rather than the rule. Installing another team's project as a package adds complexity, latency, and risk of unnecessary costs. By defining clear interface boundaries across teams, by serving one team's public models as "APIs" to another, and by enabling practitioners to develop with a more narrowly defined scope, we can enable more people to contribute, with more confidence, while requiring less context upfront. ## FAQs From d7dee94f0c87e01ab2d5f1a30f3a8d312a7176cc Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Fri, 13 Oct 2023 15:06:30 +0100 Subject: [PATCH 16/43] clarify --- website/docs/docs/build/packages.md | 24 ++++++++++++++--- .../govern/project-dependencies.md | 26 +++++++++++-------- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/website/docs/docs/build/packages.md b/website/docs/docs/build/packages.md index 74e25262994..6756534fccf 100644 --- a/website/docs/docs/build/packages.md +++ b/website/docs/docs/build/packages.md @@ -3,7 +3,7 @@ title: "Packages" id: "packages" --- -## What is a package? + Software engineers frequently modularize code into libraries. These libraries help programmers operate with leverage: they can spend more time focusing on their unique business logic, and less time implementing code that someone else has already spent the time perfecting. In dbt, libraries like these are called _packages_. dbt's packages are so powerful because so many of the analytic problems we encountered are shared across organizations, for example: @@ -22,13 +22,14 @@ dbt _packages_ are in fact standalone dbt projects, with models and macros that * Models in the package will be materialized when you `dbt run`. * You can use `ref` in your own models to refer to models from the package. * You can use macros in the package in your own project. +* It's important to note that defining and installing dbt packages is different from [defining and installing Python packages](/docs/build/python-models#using-pypi-packages) -:::note Using Python packages - -Defining and installing dbt packages is different from [defining and installing Python packages](/docs/build/python-models#using-pypi-packages). +:::info Project Dependencies versus Packages + Packages are different to Project dependencies, a feature that allows cross-project `ref`. Refer to [Project dependencies](/docs/collaborate/govern/project-dependencies) for more info about the difference between `dependencies.yml` and `packages.yml`. ::: + ## How do I add a package to my project? 1. Add a file named `dependencies.yml` or `packages.yml` to your dbt project. This should be at the same level as your `dbt_project.yml` file. 2. Specify the package(s) you wish to add using one of the supported syntaxes, for example: @@ -366,3 +367,18 @@ packages: ``` + +### About dependencies.yml + +There are some important differences between using a `dependencies.yml` compared to a `packages.yml` file: + +- `dependencies.yml` + - Primarily designed for dbt Mesh and cross-project reference workflow. + - Supports both Projects and non-private dbt packages (private packages aren't supported yet). + - Helps maintain your project's organization by allowing you to specify hub packages like `dbt_utils`, reducing the need for multiple YAML files. + - Does not support conditional configuration using Jinja-in-yaml (Refer to [FAQs](#faqs) for more info). + +- `packages.yml` + - Does not contribute to the dbt Mesh workflow. + - Serves as a list of dbt Packages (such as dbt projects) that you want to download into your root or parent dbt project. + - Can only include packages, including private packages (doesn't support Projects) diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md index 13fc76bb743..698f551749a 100644 --- a/website/docs/docs/collaborate/govern/project-dependencies.md +++ b/website/docs/docs/collaborate/govern/project-dependencies.md @@ -5,8 +5,11 @@ sidebar_label: "Project dependencies" description: "Reference public models across dbt projects" --- -:::caution Available in Publib Preview for dbt Cloud Enterprise accounts -"Project" dependencies and cross-project `ref` are features of dbt Cloud Enterprise, currently in [Public Preview](/docs/dbt-versions/product-lifecycles#dbt-cloud). To access these features while they are in beta, please contact your account team at dbt Labs. +:::info Available in Public Preview for dbt Cloud Enterprise accounts + +Project dependencies and cross-project `ref` are features available in [dbt Cloud Enterprise](https://www.getdbt.com/pricing), currently in [Public Preview](/docs/dbt-versions/product-lifecycles#dbt-cloud). + +Enterprise users can use these features by designating a [public model](/docs/collaborate/govern/model-access) and adding a [cross-project ref](#how-to-use-ref). ::: @@ -26,21 +29,22 @@ In order to add project dependencies and resolve cross-project `ref`, you must: - Use dbt v1.6 for **both** the upstream ("producer") project and the downstream ("consumer") project. - Have a deployment environment in the upstream ("producer") project [that is set to be your production environment](/docs/deploy/deploy-environments#set-as-production-environment-beta) - Have a successful run of the upstream ("producer") project +- Have a multi-tenant or single-tenant [dbt Cloud Enterprise](https://www.getdbt.com/pricing) account -- IS DEPLOYMENT /TENANCY INFO RIGHT? ### About dependencies.yml -There are differences between using a `dependencies.yml` compared to a `packages.yml` file: +There are some important differences between using a `dependencies.yml` compared to a `packages.yml` file: -- **`dependencies.yml`** +- `dependencies.yml` - Primarily designed for dbt Mesh and cross-project reference workflow. - - Supports Projects and non-private dbt Packages (private packages aren't supported yet. Refer to [FAQs](#faqs) for more info). + - Supports both Projects and non-private dbt packages (private packages aren't supported yet). - Helps maintain your project's organization by allowing you to specify hub packages like `dbt_utils`, reducing the need for multiple YAML files. - - Does not support conditional configuration using Jinja-in-yaml + - Does not support conditional configuration using Jinja-in-yaml (Refer to [FAQs](#faqs) for more info). -- **`packages.yml`** -- Does not contribute to the dbt Mesh workflow. -- Serves as a list of dbt Packages (such as dbt projects) that you want to download into your root or parent dbt project. -- Can only include packages, including private packages (doesn't support Projects) +- `packages.yml` + - Does not contribute to the dbt Mesh workflow. + - Serves as a list of dbt Packages (such as dbt projects) that you want to download into your root or parent dbt project. + - Can only include packages, including private packages (doesn't support Projects) ## Example @@ -85,7 +89,7 @@ When you're building on top of another team's work, resolving the references in - You don't need to mirror any conditional configuration of the upstream project such as `vars`, environment variables, or `target.name`. You can reference them directly wherever the Finance team is building their models in production. Even if the Finance team makes changes like renaming the model, changing the name of its schema, or [bumping its version](/docs/collaborate/govern/model-versions), your `ref` would still resolve successfully. - You eliminate the risk of accidentally building those models with `dbt run` or `dbt build`. While you can select those models, you can't actually build them. This prevents unexpected warehouse costs and permissions issues. This also ensures proper ownership and cost allocation for each team's models. -### Usage +### How to use ref **Writing `ref`:** Models referenced from a `project`-type dependency must use [two-argument `ref`](/reference/dbt-jinja-functions/ref#two-argument-variant), including the project name: From eb39713b6310b1e59e5b97591b20a1d925811869 Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Fri, 13 Oct 2023 15:10:01 +0100 Subject: [PATCH 17/43] add guide link --- .../docs/docs/collaborate/govern/project-dependencies.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md index 698f551749a..e2020c95320 100644 --- a/website/docs/docs/collaborate/govern/project-dependencies.md +++ b/website/docs/docs/collaborate/govern/project-dependencies.md @@ -110,6 +110,8 @@ with monthly_revenue as ( **Cycle detection:** Currently, "project" dependencies can only go in one direction, meaning that the `jaffle_finance` project could not add a new model that depends, in turn, on `jaffle_marketing.roi_by_channel`. dbt will check for cycles across projects and raise errors if any are detected. We are considering support for this pattern in the future, whereby dbt would still check for node-level cycles while allowing cycles at the project level. +For more guidance on how to use dbt Mesh, refer to the dedicated [dbt Mesh guide](/guides/best-practices/how-we-mesh/mesh-1-intro). + ### Comparison If you were to instead install the `jaffle_finance` project as a `package` dependency, you would instead be pulling down its full source code and adding it to your runtime environment. This means: @@ -131,3 +133,7 @@ These are the exceptions, rather than the rule. Installing another team's projec If you're using private packages with the [git token method](/docs/build/packages#git-token-method), you must define them in the `packages.yml` file instead of the `dependencies.yml` file. This is because conditional rendering (like Jinja-in-yaml) is not supported. + + +## Related docs +- [dbt Mesh guide](/guides/best-practices/how-we-mesh/mesh-1-intro) From c0a47631c0cb71fd8347d0706e4822981bb1b946 Mon Sep 17 00:00:00 2001 From: dave-connors-3 <73915542+dave-connors-3@users.noreply.github.com> Date: Fri, 13 Oct 2023 09:22:32 -0500 Subject: [PATCH 18/43] Update website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md --- website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 611d40a6567..1a5d7f090ea 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -6,7 +6,7 @@ hoverSnippet: Learn how to get started with dbt Mesh ## What is dbt Mesh? -Historically, building data teams has involved two extremes, building a centralized team or using embedded analysts. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts, allowing embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficultly of this model though is managing the compplexity of dependencies, goverance, and workflows between all groups — creating friction in monorepos or complexity and silos in multi-repos. Ideally, you want to teams to be able to work independently, but also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. +Historically, building data teams has involved two extremes, building a centralized team or using embedded analysts. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts allows embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficultly of this model is managing the complexity of dependencies, governance, and workflows between all groups — attempting to manage this can often create friction in monorepos or complexity and silos in multiple repos. Ideally, teams should be able to work independently and also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. dbt Mesh is not a product, but a pattern, enabled a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: From 31e921920f9d8a9f51f16d098281ef29a3f5a5aa Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 09:27:18 -0500 Subject: [PATCH 19/43] dave edits on page one --- .../how-we-mesh/mesh-1-intro.md | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 611d40a6567..95c315bb22f 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -6,28 +6,28 @@ hoverSnippet: Learn how to get started with dbt Mesh ## What is dbt Mesh? -Historically, building data teams has involved two extremes, building a centralized team or using embedded analysts. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts, allowing embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficultly of this model though is managing the compplexity of dependencies, goverance, and workflows between all groups — creating friction in monorepos or complexity and silos in multi-repos. Ideally, you want to teams to be able to work independently, but also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. +Historically, building data teams has involved two extremes, building a centralized team or using embedded analysts. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts, allowing embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficulty of this model though is managing the compplexity of dependencies, goverance, and workflows between all groups — creating friction in monorepos or complexity and silos in multi-repos. Ideally, you want to teams to be able to work independently, but also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. -dbt Mesh is not a product, but a pattern, enabled a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: +dbt Mesh is not a product, it is a pattern enabled by a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: -- **Cross-project references** - this is the core feature that enables a mesh structure. `ref`s now work across projects in dbt Cloud-enabled projects on Enterprise plans. -- **Governance** - dbt Cloud’s new governance features allow you to manage access and permissions across projects. +- **Cross-project references** - this is foundational feature that enables a mesh structure. `ref`s now work across dbt Cloud projects on Enterprise plans. +- **Governance** - dbt's new governance features allow you to manage access to your dbt models both within and across projects. - **Groups** - groups allow you to assign models to subsets of models within a project. - - **Access** - access configs allow you to control who can view and reference models both within and across projects. -- **Versioning** - building a dbt Mesh involves treating your data models as stable APIs. To achieve this you need mechanisms to version your models and allow graceful adoption and deprecation of models as they evolve. -- **Contracts** - data contracts set strict expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream consumers. + - **Access** - access configs allow you to control who can reference models. +- **Versioning** - building a dbt Mesh involves treating your data models as stable APIs. Model versioning is the mechanism to allow graceful adoption and deprecation of models as they evolve. +- **Contracts** - data contracts set strict expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream comsumers' data products. ## Who is dbt Mesh for? -dbt Mesh is not for every organization! If you're just starting your dbt journey, don't worry about building a dbt Mesh right away, it increases some meta-complexity around managing your projects that could distract from building initial value in dbt. However, if you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring a dbt Mesh: +dbt Mesh is not for every organization! If you're just starting your dbt journey, don't worry about building a dbt Mesh right away. It increases some meta-complexity around managing your projects that could distract from building initial value in dbt. However, if you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring a dbt Mesh: - **The number of models** in your project is degrading performance and slowing down development. -- Teams have developed **separate workflows** and need to decouple development. +- Teams have developed **separate workflows** and need to decouple development from each other. - **Security and governance** requirements are increasing and would benefit from increased isolation. dbt Cloud is designed to coordinate the features above and simplify the meta-complexities (such as scoped CI and multi-project lineage) to solve for these problems. -## Learning goals +## Learning goals ✏️ - Understand the **purpose and tradeoffs** of building a dbt Mesh. - Develop an intuition for various **dbt Mesh patterns** and how to design a dbt Mesh for your organization. From d37f3d8abcc67d89f0372addcfbf12d51eaabdca Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 09:28:44 -0500 Subject: [PATCH 20/43] tweak intro --- website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 4bd19c10c7e..4cd11ecea91 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -6,7 +6,7 @@ hoverSnippet: Learn how to get started with dbt Mesh ## What is dbt Mesh? -Historically, building data teams has involved two extremes, building a centralized team or using embedded analysts. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts allows embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficulty of this model is managing the complexity of dependencies, governance, and workflows between all groups — attempting to manage this can often create friction in monorepos or complexity and silos in multiple repos. Ideally, teams should be able to work independently and also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. +Historically, building data teams has involved two extremes: building a centralized team or using embedded analysts across your org. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts allows embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficulty of this model is managing the complexity of dependencies, governance, and workflows between all groups — attempting to manage this can often create friction in monorepos or complexity and silos in multiple repos. Ideally, teams should be able to work independently and also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. dbt Mesh is not a product, it is a pattern enabled by a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: From bc47f9896d1c4804b7b9e2a6ee4315d4485c2b6b Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 09:40:28 -0500 Subject: [PATCH 21/43] tweak page 2 --- .../how-we-mesh/mesh-2-structures.md | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index e7d213dd2e4..5e34139cc13 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -6,29 +6,32 @@ hoverSnippet: Learn how to get started with dbt Mesh ## Exploring mesh patterns -Building a dbt Mesh is not a one-size-fits-all process. In fact, it's the opposite, it's about customizing your project structure to fit _your_ team and _your_ data. Often we've had to fit the data team and project structure into our company's org chart, or manage everything in one project to handle the constraints of our data and warehouse. dbt Mesh allows us to mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. Let's explore some language for discussing the design of these patterns. +Building a dbt Mesh is not a one-size-fits-all process. In fact, it's the opposite! It's about customizing your project structure to fit _your_ team and _your_ data. Often we've had to fit the data team and project structure into our company's org chart, or manage everything in one project to handle the constraints of our data and warehouse. dbt Mesh allows us to mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. -## Vertical splits +## DAG Splitting -Vertical splits are about separating out layers of transformation in the DAG order. Let's look at some examples. +The first (and perhaps most difficult!) decision when migrating to a mesh is deciding where to draw the line in your DAG to define the interfaces between the functional areas in your project. Let's explore some language for discussing the design of these patterns. +### Vertical splits + +Vertical splits separate out layers of transformation in the DAG order. Let's look at some examples. - **Splitting up staging and mart layers.** Creating a more tightly-controlled, shared set of components that other projects build on but can't edit. - **Isolating earlier models for security and governance requirements.** Separating out and masking PII data so that downstream consumers can't access it is a common use case for a vertical split. -- **Protecting complex or expensive data.** If you have a large or complex model that's expensive to run, you might want to isolate it so that it's safer from accidental selection and easier to debug when it has issues. +- **Protecting complex or expensive data.** If you have a large or complex model that's expensive to run, you might want to isolate it so that it's safer from accidental selection, independently deployable, and easier to debug when it has issues. -## Horizontal splits +### Horizontal splits -Horizonal splits are about splitting up the data based on source or domain. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. Let's consider some possibilites for horizontal splitting. +Horizonal splits separate your DAG based on source or domain. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. Let's consider some possibilites for horizontal splitting. -- **Team consumption patterns.** For example, splitting out marketing data and financial data. +- **Team consumption patterns.** For example, splitting out the marketing team's data flow into a separate project. - **Data from different sources.** For example, click event data and transactional ecommerce data. - **Team workflows.** If two embedded groups operate in different project management tools at different paces, or are staffed differently, you may want to split the projects up so they can move independently. ## Combining these divisions -- **These are not either/or techniques**. You can and should combine them in any way that makes sense for your organization. -- **Pick one type of split and focus on that first**. If you have a hub-and-spoke team topology for example, handle breaking out the central platform project before you split the remainder into domains. Then if you need to break those domains up vertically you can shift back to that. -- **DRY applies to underlying data not just code.** Regardless of your splits, you should not be sourcing the same rows and columns into multiple nodes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface error for conflicts and erodes the single source of truth we're trying to create in our dbt project. +- **These are not either/or techniques**. You should consider both types of divisions, and combine them in any way that makes sense for your organization. +- **Pick one type of split and focus on that first**. If you have a hub-and-spoke team topology for example, handle breaking out the central platform project before you split the remainder into domains. Then if you need to break those domains up horizontally you can focus on that after the fact. +- **DRY applies to underlying data not just code.** Regardless of your splits, you should not be sourcing the same rows and columns into multiple nodes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface area for conflicts and erodes the single source of truth we're trying to create in our dbt projects. ## Monorepo vs multi-repo From 0f1521194f9b1259791fc0779274a87e163d9ac1 Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 10:37:05 -0500 Subject: [PATCH 22/43] replace the `TODO Dave` with content --- .../how-we-mesh/mesh-2-structures.md | 2 +- .../how-we-mesh/mesh-3-implementation.md | 118 ++++++++++++++++-- 2 files changed, 110 insertions(+), 10 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 5e34139cc13..921ceeece41 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -31,7 +31,7 @@ Horizonal splits separate your DAG based on source or domain. These splits are o - **These are not either/or techniques**. You should consider both types of divisions, and combine them in any way that makes sense for your organization. - **Pick one type of split and focus on that first**. If you have a hub-and-spoke team topology for example, handle breaking out the central platform project before you split the remainder into domains. Then if you need to break those domains up horizontally you can focus on that after the fact. -- **DRY applies to underlying data not just code.** Regardless of your splits, you should not be sourcing the same rows and columns into multiple nodes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface area for conflicts and erodes the single source of truth we're trying to create in our dbt projects. +- **DRY applies to underlying data not just code.** Regardless of your splits, you should not be sourcing the same rows and columns into multiple nodes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface area for conflicts and erodes the single source of truth we're trying to create in our dbt project. ## Monorepo vs multi-repo diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md index ae7f1d120b1..9a532885091 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md @@ -6,10 +6,22 @@ hoverSnippet: Learn how to get started with dbt Mesh Let's examine an outline of steps to start implementing a dbt Mesh in your organization. +## Our example project + +We've provided a set of example projects you can use to explore the topics covered here. If you want to follow along with the next section you can take the baseline [Jaffle Shop](https://github.com/dbt-labs/jaffle-shop) project, and we'll split it into 3 separate projects in a multi-repo dbt Mesh. Note that you'll need to leverage dbt Cloud for this, as cross-project references are powered via dbt Cloud's APIs. + +- **[Platform](https://github.com/dbt-labs/jaffle-shop-mesh-platform)** - containing our centralized staging models. +- **[Marketing](https://github.com/dbt-labs/jaffle-shop-mesh-marketing)** - containing our marketing marts. +- **[Finance](https://github.com/dbt-labs/jaffle-shop-mesh-finance)** - containing our finance marts. + +You can also just read along and look at the already ‘meshified’ versions of the project. + ## Research your current structure +While we've already decided how to break apart our jaffle shop project, the right place to start in the real world is understanding how your project is already being built and deployed: + - **Look at your selectors** to figure out how people are grouping models right now. -- **Examine jobs that you run**, look at how they're defined. +- **Examine jobs that you run**, look at how they are grouping and deploying models. - **Look at your lineage graph** to see how models are connected. - **Talk to teams** about what sort of separation is naturally existing right now. - Are there various domains people are focused on? @@ -18,14 +30,102 @@ Let's examine an outline of steps to start implementing a dbt Mesh in your organ ## Add groups and access -Once you have a sense of some initial groupings, implement group and access permissions within a project. +Once you have a sense of some initial groupings, the first step is to implement **group and access permissions** within a project. + +- First we'll create a [group](/docs/build/groups) to define the owner of a set of models. + +```yml +# in models/__groups.yml + +groups: + - name: marketing + owner: + - name: Ben Jaffleck + email: ben.jaffleck@jaffleshop.com +``` + +- Then, we can add models to that group using the `group:` key in the model's yml entry. + +```yml +# in models/marketing/__models.yml + +models: + - name: fct_marketing_model + group: marketing + - name: stg_marketing_model + group: marketing +``` + +- Once models are added to the group, we will **add [access](/docs/collaborate/govern/model-access) settings to the models** based on their connections between groups, *opting for the most private access that will maintain current functionality*. This means that any model that has *only* relationships to other models in the same group should be `private` , and any model that has cross-group relationships, or is a terminal node in the group DAG should be `protected` so that other parts of the DAG can continue to reference it. + +```yml +# in models/marketing/__models.yml + +models: + - name: fct_marketing_model + group: marketing + access: protected + - name: stg_marketing_model + group: marketing + access: private +``` + +- **Validate these groups by incrementally migrating your jobs** to execute these groups specifically via selection syntax. We would recommend doing this in parallel to your production jobs until you’re sure about them. This will help you feel out if you’ve drawn the lines in the right place. +- If you find yourself **consistently making changes across multiple groups** when you update logic, that’s a sign that **you may want to rethink your groupings**. + +## Split your projects + +- When you’ve **confirmed the right groups**, it's time to split your projects. + - **Do _one_ group at a time**! + - **Do _not_ refactor as you migrate**, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully migrated the project then you can start optimizing it for its new life as part of your mesh. +- Start by splitting your project within the same repository for full git tracking and easy reversion if you need to start from scratch. +- **Move your grouped models into a subfolder**. This will include any model in the selected group, it's associated yml entry, as well as its parent or child resources as appropriate depending on where this group sits in your DAG. + - Note that just like in your dbt project, circular refereneces are not allowed! Project B cannot have parents and children in Project A, for example. +- Copy any macros used by the resources you moved. +- Create a new `packages.yml` file in your subdirectory with the packages that are used by the resources you moved. +- Create a new `dbt_project.yml` file in the subdirectory. +- For any model that has a cross-project dependency (this may be in the files you moved, or in the files that remain in your project): + - update the relevant `{{ ref() }}` function to have two arguments, where the first is the name of the source project and the second is the name of the model: e.g. `{{ ref('jaffle_shop', 'my_upstream_model') }}` + - Update the upstream, cross-project parents’ `access` configs to `public` , ensuring any project can safely `{{ ref() }}` those models. + - We *highly* recommend adding a [model contract](/docs/collaborate/govern/model-contracts) to the upstream models to ensure the data shape is consistent and reliable for your downstream consumers. +- **Create a `dependencies.yml` file** ([docs](/docs/collaborate/govern/project-dependencies)) for the downstream project, declaring the upstream project as a dependency. + +```yml + +# in dependencies.yml +projects: + - name: jaffle_shop +``` + + +## Connecting existing projects via the mesh + +Some organizations may already be coordinating across multiple dbt projects. Most often this is via: + +1. Installing parent projects as dbt packages +2. Using `{{ source() }}` functions to read the outputs of a parent project as inputs to a child project. + +This has a few drawbacks: + +1. If using packages, each project has to include *all* resources from *all* projects in its manifest, slowing down dbt and the development cycle. +2. If using sources, there are breakages in the lineage, as there's no real connection between the parent and child projects. -- TODO: Dave -- **Validate these groups by incrementally migrating your jobs** to be based on them. We would recommend in parallel to your production jobs until you’re sure about them. This will help you feel out if you’ve drawn the lines in the right place. +The migration steps here are much simpler than splitting up a monolith! -## Do the splits +1. If using the `package` method: + 1. In the parent project: + 1. mark all models being imported downstream as `public` and add a model contract. + 2. In the child project: + 1. Remove the package entry from `packages.yml` + 2. Add the upstream project to your `dependencies.yml` + 3. Update the `{{ ref() }}` functions to models from the upstream project to include the project name argument. +1. If using `source` method: + 1. In the parent project: + 1. mark all models being imported downstream as `public` and add a model contract. + 2. In the child project: + 2. Add the upstream project to your `dependencies.yml` + 3. Replace the `{{ source() }}` functions with cross project `{{ ref() }}` functions. + 4. Remove the unnecessary `source` definitions. -- When you’ve **confirmed the right groups** it's time to split the projects out. -- Once again, **use `dbt-meshify`** to pull chunks out into their own projects. - - **Do _one_ group at a time**, using the groups as your selectors. - - **Do _not_ refactor as you migrate**, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully landed the project then you can start optimizing it for its new life as part of the mesh. +**** +We recommend using the `dbt-meshify` [command line tool]() to help you do this. From fde5c98f55c6c51c00144d29afbbcaadf72be1ec Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Fri, 13 Oct 2023 16:52:45 +0100 Subject: [PATCH 23/43] fold in jerco's feedback --- website/docs/docs/build/packages.md | 10 ++++++++-- .../docs/collaborate/govern/project-dependencies.md | 11 +++++++---- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/website/docs/docs/build/packages.md b/website/docs/docs/build/packages.md index 6756534fccf..b2fae578bba 100644 --- a/website/docs/docs/build/packages.md +++ b/website/docs/docs/build/packages.md @@ -25,8 +25,14 @@ dbt _packages_ are in fact standalone dbt projects, with models and macros that * It's important to note that defining and installing dbt packages is different from [defining and installing Python packages](/docs/build/python-models#using-pypi-packages) -:::info Project Dependencies versus Packages - Packages are different to Project dependencies, a feature that allows cross-project `ref`. Refer to [Project dependencies](/docs/collaborate/govern/project-dependencies) for more info about the difference between `dependencies.yml` and `packages.yml`. +:::info `dependencies.yml` has replaced `packages.yml` +Starting from dbt v1.6, `dependencies.yml` has replaced `packages.yml`. It can include both types of dependencies: packages and projects. + +- "Package" dependencies are a way of adding the source code from someone else's dbt project into your own, like a library. +- "Project" dependencies are a different way of building on top of someone else's work in dbt. Refer to [Project dependencies](/docs/collaborate/govern/project-dependencies) for more info. +- +You can rename `packages.yml` to `dependencies.yml`, _unless_ you need to use Jinja within your packages specification. For example,adding an environment variable with a git token in a private git package specification. + ::: diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md index e2020c95320..1faa2f746a9 100644 --- a/website/docs/docs/collaborate/govern/project-dependencies.md +++ b/website/docs/docs/collaborate/govern/project-dependencies.md @@ -22,15 +22,18 @@ This year, dbt Labs is introducing an expanded notion of `dependencies` across m - **Projects** — A _new_ way to take a dependency on another project. Using a metadata service that runs behind the scenes, dbt Cloud resolves references on-the-fly to public models defined in other projects. You don't need to parse or run those upstream models yourself. Instead, you treat your dependency on those models as an API that returns a dataset. The maintainer of the public model is responsible for guaranteeing its quality and stability. +Starting in dbt v1.6 or higher, `packages.yml` has been renamed to `dependencies.yml`. However, if you need use Jinja within your packages config, such as an environment variable for your private package, you need to keep using `packages.yml` for your packages for now. Refer to [FAQs](#faqs) for more info. + ## Prerequisites In order to add project dependencies and resolve cross-project `ref`, you must: - Have the feature enabled (speak to your account team) -- Use dbt v1.6 for **both** the upstream ("producer") project and the downstream ("consumer") project. +- Use dbt v1.6 or higher for **both** the upstream ("producer") project and the downstream ("consumer") project. - Have a deployment environment in the upstream ("producer") project [that is set to be your production environment](/docs/deploy/deploy-environments#set-as-production-environment-beta) - Have a successful run of the upstream ("producer") project -- Have a multi-tenant or single-tenant [dbt Cloud Enterprise](https://www.getdbt.com/pricing) account -- IS DEPLOYMENT /TENANCY INFO RIGHT? +- Have a multi-tenant or single-tenant [dbt Cloud Enterprise](https://www.getdbt.com/pricing) account (Azure ST is not supported but coming soon) + ## Example As an example, let's say you work on the Marketing team at the Jaffle Shop. The name of your team's project is `jaffle_marketing`: From 66836f046e9a4b290aee721117d58134160fd628 Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Fri, 13 Oct 2023 16:54:59 +0100 Subject: [PATCH 24/43] clarify --- website/docs/docs/build/packages.md | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/website/docs/docs/build/packages.md b/website/docs/docs/build/packages.md index b2fae578bba..e3c45823b0b 100644 --- a/website/docs/docs/build/packages.md +++ b/website/docs/docs/build/packages.md @@ -26,12 +26,11 @@ dbt _packages_ are in fact standalone dbt projects, with models and macros that :::info `dependencies.yml` has replaced `packages.yml` -Starting from dbt v1.6, `dependencies.yml` has replaced `packages.yml`. It can include both types of dependencies: packages and projects. - -- "Package" dependencies are a way of adding the source code from someone else's dbt project into your own, like a library. -- "Project" dependencies are a different way of building on top of someone else's work in dbt. Refer to [Project dependencies](/docs/collaborate/govern/project-dependencies) for more info. +Starting from dbt v1.6, `dependencies.yml` has replaced `packages.yml`. This file can now contain both types of dependencies: "package" and "project" dependencies. +- "Package" dependencies lets you add source code from someone else's dbt project into your own, like a library. +- "Project" dependencies provide a different way to build on top of someone else's work in dbt. Refer to [Project dependencies](/docs/collaborate/govern/project-dependencies) for more info. - -You can rename `packages.yml` to `dependencies.yml`, _unless_ you need to use Jinja within your packages specification. For example,adding an environment variable with a git token in a private git package specification. +You can rename `packages.yml` to `dependencies.yml`, _unless_ you need to use Jinja within your packages specification. This could be necessary, for example, if you want to add an environment variable with a git token in a private git package specification. ::: From 6a5a3c4549ed5b3e5a08d437a78727b3147677c4 Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 11:23:53 -0500 Subject: [PATCH 25/43] rewrite intro --- .../how-we-mesh/mesh-1-intro.md | 32 +++++++++++-------- .../how-we-mesh/mesh-4-conclusion.md | 9 ------ website/sidebars.js | 1 - 3 files changed, 18 insertions(+), 24 deletions(-) delete mode 100644 website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 4cd11ecea91..3dc9f86c010 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -6,30 +6,34 @@ hoverSnippet: Learn how to get started with dbt Mesh ## What is dbt Mesh? -Historically, building data teams has involved two extremes: building a centralized team or using embedded analysts across your org. More recently, hub-and-spoke models have become popular as a way to balance the tradeoffs: using a centralized platform team _and_ embedded analysts allows embeds to develop domain expertise while the central team focuses on building a strong operational foundation. A major difficulty of this model is managing the complexity of dependencies, governance, and workflows between all groups — attempting to manage this can often create friction in monorepos or complexity and silos in multiple repos. Ideally, teams should be able to work independently and also be able to collaborate; sharing data, code, and best practices. dbt Mesh provides the tooling for teams to finally achieve this. +Organizations of all sizes rely upon dbt to manage their data transformations, from small startups to large enterprises. At scale, it can be challenging to coordinate all the organizational and technical requirements demanded by your stakeholders within the scope of a single dbt project. To date, there also hasn't been a first-class way to effectively manage the dependencies, governance, and workflows between multiple dbt projects. -dbt Mesh is not a product, it is a pattern enabled by a convergence of several features in dbt Cloud. It’s inspired by dbt’s best practices and ideas from [data mesh](https://en.wikipedia.org/wiki/Data_mesh). These features include: +Regardless of your organization's size and complexity, dbt should empower data teams to work independently and collaboratively; sharing data, code, and best practices without sacrificing security or autonomy. dbt Mesh provides the tooling for teams to finally achieve this. -- **Cross-project references** - this is foundational feature that enables a mesh structure. `ref`s now work across dbt Cloud projects on Enterprise plans. +dbt Mesh is not a single product: it is a pattern enabled by a convergence of several features in dbt: + +- **[Cross-project references](/docs/collaborate/govern/project-dependencies#usage)** - this is the foundational feature that enables the multi-project deployments. `{{ ref() }}`s now work across dbt Cloud projects on Enterprise plans. +- **[dbt Explorer](/docs/collaborate/explore-projects)** - dbt Cloud's metadata-powered documentation platform, complete with full, cross-project lineage. - **Governance** - dbt's new governance features allow you to manage access to your dbt models both within and across projects. - - **Groups** - groups allow you to assign models to subsets of models within a project. - - **Access** - access configs allow you to control who can reference models. -- **Versioning** - building a dbt Mesh involves treating your data models as stable APIs. Model versioning is the mechanism to allow graceful adoption and deprecation of models as they evolve. -- **Contracts** - data contracts set strict expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream comsumers' data products. + - **[Groups](/docs/build/groups)** - groups allow you to assign models to subsets of models within a project. + - **[Access](/docs/collaborate/govern/model-access)** - access configs allow you to control who can reference models. +- **[Model Versions](/docs/collaborate/govern/model-versions)** - When coordinating across projects and teams, we recommend treating your data models as stable APIs. Model versioning is the mechanism to allow graceful adoption and deprecation of models as they evolve. +- **[Model Contracts](/docs/collaborate/govern/model-contracts)** - data contracts set explicit expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream comsumers' data products. ## Who is dbt Mesh for? -dbt Mesh is not for every organization! If you're just starting your dbt journey, don't worry about building a dbt Mesh right away. It increases some meta-complexity around managing your projects that could distract from building initial value in dbt. However, if you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring a dbt Mesh: +The multi-project architecture is for organizations with mature, complex transformation workflows in dbt who want to increase the flexibilty and performance of their dbt projects. If you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring this paradigm: - **The number of models** in your project is degrading performance and slowing down development. - Teams have developed **separate workflows** and need to decouple development from each other. - **Security and governance** requirements are increasing and would benefit from increased isolation. -dbt Cloud is designed to coordinate the features above and simplify the meta-complexities (such as scoped CI and multi-project lineage) to solve for these problems. +dbt Cloud is designed to coordinate the features above and simplify the complexity to solve for these problems. + +If you're just starting your dbt journey, don't worry about building a multi-project architecture right away. You can _incrementally_ adopt the features in this guide as you scale. The collection of features work effectively as independent tools. Familiarizing yourself with the tooling and features that make up a multi-project architecture, and how they can apply to your organization will help you make better decisions as you grow. -## Learning goals ✏️ +## Learning goals -- Understand the **purpose and tradeoffs** of building a dbt Mesh. -- Develop an intuition for various **dbt Mesh patterns** and how to design a dbt Mesh for your organization. -- Establish recommended steps to **incrementally adopt** a dbt Mesh pattern in your dbt implementation. -- Offer **tooling** to help you more quickly and easily implement your dbt Mesh plan. +- Understand the **purpose and tradeoffs** of building a multi-project architecture. +- Develop an intuition for various **dbt Mesh patterns** and how to design a multi-project architecture for your organization. +- Establish recommended steps to **incrementally adopt** these patterns in your dbt implementation. diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md b/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md deleted file mode 100644 index d96bbbf741e..00000000000 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-4-conclusion.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -title: "Go forth and mesh!" -description: Getting started with dbt Mesh patterns -hoverSnippet: Learn how to get started with dbt Mesh ---- - -dbt Mesh is a powerful new pattern for data transformation. It helps adapt teams and their data towards each other, rather than making arbitrary decisions based on the constraints of either one. By creating alignment between your people and data flows, developers can move faster, analysts can be more productive, and data consumers can be more confident in the data they use. - -It's important to reiterate that you can _incrementally_ adopt the ideas in this guide in your organization as you hit constraints. The collection of features tha enable dbt Mesh work effectively as independent tools, and there's no pressure to adopt this as the _right pattern_ to build with. That said, familiarizing yourself with the tooling and dbt Mesh concepts, as well as thinking through how they can apply to your organization, will help you make better decisions as you grow. We hope this guide has given you a good starting point to do that. diff --git a/website/sidebars.js b/website/sidebars.js index dab84017dd9..819c4d30779 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -952,7 +952,6 @@ const sidebarSettings = { items: [ "guides/best-practices/how-we-mesh/mesh-2-structures", "guides/best-practices/how-we-mesh/mesh-3-implementation", - "guides/best-practices/how-we-mesh/mesh-4-conclusion", ], }, { From c6f75fc29181a2991eb0f46f941448dbbd3bf2e0 Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 12:25:23 -0500 Subject: [PATCH 26/43] updated mesh 2 --- .../how-we-mesh/mesh-2-structures.md | 50 +++++++++++-------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 921ceeece41..38b1f96f12b 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -1,42 +1,52 @@ --- -title: "Deciding how to structure your mesh" +title: Deciding how to structure your mesh description: Getting started with dbt Mesh patterns hoverSnippet: Learn how to get started with dbt Mesh --- - ## Exploring mesh patterns -Building a dbt Mesh is not a one-size-fits-all process. In fact, it's the opposite! It's about customizing your project structure to fit _your_ team and _your_ data. Often we've had to fit the data team and project structure into our company's org chart, or manage everything in one project to handle the constraints of our data and warehouse. dbt Mesh allows us to mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. +When adopting a multi-project architecture, where do we draw the lines between our projects? + +How should we organize our data workflows in a world where instead of having a single dbt DAG, we have multiple projects speaking to each other, each comprised of their own DAG? + +Adopting the dbt Mesh pattern is not a one-size-fits-all process. In fact, it's the opposite! It's about customizing your project structure to fit _your_ team and _your_ data. Now we can mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. + +While there is not a single best way to implement this pattern, there are some common decisions points that will be helpful for you to consider. + +At a high level you’ll need to decide: + +- Where to draw the lines between your dbt Projects -- ie how do you determine where to split your DAG and which models go in which project? +- How to manage your code -- do you want multiple dbt Projects living in the same repository (mono-repo) or do you want to have multiple repos with one repo per project? + +## Define your project interfaces by splitting your DAG -## DAG Splitting +The first (and perhaps most difficult!) decision when migrating to a multi-project architecture is deciding where to draw the line in your DAG to define the interfaces between your projects. Let's explore some language for discussing the design of these patterns. -The first (and perhaps most difficult!) decision when migrating to a mesh is deciding where to draw the line in your DAG to define the interfaces between the functional areas in your project. Let's explore some language for discussing the design of these patterns. ### Vertical splits -Vertical splits separate out layers of transformation in the DAG order. Let's look at some examples. +Vertical splits separate out layers of transformation in DAG order. Let's look at some examples. -- **Splitting up staging and mart layers.** Creating a more tightly-controlled, shared set of components that other projects build on but can't edit. -- **Isolating earlier models for security and governance requirements.** Separating out and masking PII data so that downstream consumers can't access it is a common use case for a vertical split. -- **Protecting complex or expensive data.** If you have a large or complex model that's expensive to run, you might want to isolate it so that it's safer from accidental selection, independently deployable, and easier to debug when it has issues. +- **Splitting up staging and mart layers.** to create a more tightly-controlled, shared set of components that other projects build on but can't edit. +- **Isolating earlier models for security and governance requirements.** to separate out and mask PII data so that downstream consumers can't access it is a common use case for a vertical split. +- **Protecting complex or expensive data.** to isolate large or complex models that are expensive to run so that they are safe from accidental selection, independently deployable, and easier to debug when they have issues. ### Horizontal splits -Horizonal splits separate your DAG based on source or domain. These splits are often based around the shape and size of the data and how it's used, rather than the security or governance requirements. Let's consider some possibilites for horizontal splitting. +Horizonal splits separate your DAG based on source or domain. These splits are often based around the shape and size of the data and how it's used. Let's consider some possibilites for horizontal splitting. - **Team consumption patterns.** For example, splitting out the marketing team's data flow into a separate project. -- **Data from different sources.** For example, click event data and transactional ecommerce data. -- **Team workflows.** If two embedded groups operate in different project management tools at different paces, or are staffed differently, you may want to split the projects up so they can move independently. +- **Data from different sources.** For example, clickstream event data and transactional ecommerce data may need to be modeled independently of each other. +- **Team workflows.** For example, if two embedded groups who operate at different paces, you may want to split the projects up so they can move independently. -## Combining these divisions +### Combining these strategies -- **These are not either/or techniques**. You should consider both types of divisions, and combine them in any way that makes sense for your organization. +- **These are not either/or techniques**. You should consider both types of splits, and combine them in any way that makes sense for your organization. - **Pick one type of split and focus on that first**. If you have a hub-and-spoke team topology for example, handle breaking out the central platform project before you split the remainder into domains. Then if you need to break those domains up horizontally you can focus on that after the fact. -- **DRY applies to underlying data not just code.** Regardless of your splits, you should not be sourcing the same rows and columns into multiple nodes. Working within a mesh structure it becomes increasingly important that we don’t duplicate work, which creates surface area for conflicts and erodes the single source of truth we're trying to create in our dbt project. +- **DRY applies to underlying data not just code.** Regardless of your strategy, you should not be sourcing the same rows and columns into multiple nodes. When working within a mesh pattern it becomes increasingly important that we don't duplicate logic or data. -## Monorepo vs multi-repo +## Determine your git strategy -A dbt Mesh can exist as multiple projects in a single repo (monorepo) or as multiple projects in their own repositories (multi-repo). +A multi-project architecture can exist in a single repo (monorepo) or as multiple projects in their own repositories (multi-repo). -- **Monorepos are often easier to get started with**, but can become unwieldy as the number of models and teams grow. -- If you're a **smaller team** looking primarily to speed up and simplify development, a **monorepo** is likely the right choice. -- If you're a **larger team with multiple groups**, and need to decouple projects for security and enablement of different development styles and rhythms, a **multi-repo setup** is your best bet. +- If you're a **smaller team** looking primarily to speed up and simplify development, a **monorepo** is likely the right choice, but can become unwieldy as the number of projects, models and contributors grow. +- If you’re a **larger team with multiple groups**, and need to decouple projects for security and enablement of different development styles and rhythms, a **multi-repo setup** is your best bet. From 14960125c79b76e04030d45c28813edb028df6a3 Mon Sep 17 00:00:00 2001 From: Dave Connors Date: Fri, 13 Oct 2023 12:46:03 -0500 Subject: [PATCH 27/43] tweaks --- .../how-we-mesh/mesh-3-implementation.md | 81 +++++++++---------- 1 file changed, 40 insertions(+), 41 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md index 9a532885091..bf866aedeb0 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md @@ -4,25 +4,11 @@ description: Getting started with dbt Mesh patterns hoverSnippet: Learn how to get started with dbt Mesh --- -Let's examine an outline of steps to start implementing a dbt Mesh in your organization. +As mentioned before, the key decision to migrating to a multi-project architecture is understanding how your project is already being grouped, built, and deployed. We can use this information to inform our decision to split our project apart. -## Our example project - -We've provided a set of example projects you can use to explore the topics covered here. If you want to follow along with the next section you can take the baseline [Jaffle Shop](https://github.com/dbt-labs/jaffle-shop) project, and we'll split it into 3 separate projects in a multi-repo dbt Mesh. Note that you'll need to leverage dbt Cloud for this, as cross-project references are powered via dbt Cloud's APIs. - -- **[Platform](https://github.com/dbt-labs/jaffle-shop-mesh-platform)** - containing our centralized staging models. -- **[Marketing](https://github.com/dbt-labs/jaffle-shop-mesh-marketing)** - containing our marketing marts. -- **[Finance](https://github.com/dbt-labs/jaffle-shop-mesh-finance)** - containing our finance marts. - -You can also just read along and look at the already ‘meshified’ versions of the project. - -## Research your current structure - -While we've already decided how to break apart our jaffle shop project, the right place to start in the real world is understanding how your project is already being built and deployed: - -- **Look at your selectors** to figure out how people are grouping models right now. -- **Examine jobs that you run**, look at how they are grouping and deploying models. -- **Look at your lineage graph** to see how models are connected. +- **Examine your jobs** which sets of models are most often built together? +- **Look at your lineage graph** how are models connected? +- **Look at your selectors** defined in `selectors.yml` - how are people already defining resource groups? - **Talk to teams** about what sort of separation is naturally existing right now. - Are there various domains people are focused on? - Are there various sizes, shapes, and sources of data that get handled separately (such as click event data)? @@ -30,7 +16,7 @@ While we've already decided how to break apart our jaffle shop project, the righ ## Add groups and access -Once you have a sense of some initial groupings, the first step is to implement **group and access permissions** within a project. +Once you have a sense of some initial groupings, the first step is to implement **group and access permissions** within a single project. - First we'll create a [group](/docs/build/groups) to define the owner of a set of models. @@ -71,24 +57,20 @@ models: ``` - **Validate these groups by incrementally migrating your jobs** to execute these groups specifically via selection syntax. We would recommend doing this in parallel to your production jobs until you’re sure about them. This will help you feel out if you’ve drawn the lines in the right place. -- If you find yourself **consistently making changes across multiple groups** when you update logic, that’s a sign that **you may want to rethink your groupings**. +- If you find yourself **consistently making changes across multiple groups** when you update logic, that’s a sign that **you may want to rethink your groups**. ## Split your projects -- When you’ve **confirmed the right groups**, it's time to split your projects. - - **Do _one_ group at a time**! - - **Do _not_ refactor as you migrate**, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully migrated the project then you can start optimizing it for its new life as part of your mesh. -- Start by splitting your project within the same repository for full git tracking and easy reversion if you need to start from scratch. -- **Move your grouped models into a subfolder**. This will include any model in the selected group, it's associated yml entry, as well as its parent or child resources as appropriate depending on where this group sits in your DAG. - - Note that just like in your dbt project, circular refereneces are not allowed! Project B cannot have parents and children in Project A, for example. -- Copy any macros used by the resources you moved. -- Create a new `packages.yml` file in your subdirectory with the packages that are used by the resources you moved. -- Create a new `dbt_project.yml` file in the subdirectory. -- For any model that has a cross-project dependency (this may be in the files you moved, or in the files that remain in your project): - - update the relevant `{{ ref() }}` function to have two arguments, where the first is the name of the source project and the second is the name of the model: e.g. `{{ ref('jaffle_shop', 'my_upstream_model') }}` - - Update the upstream, cross-project parents’ `access` configs to `public` , ensuring any project can safely `{{ ref() }}` those models. - - We *highly* recommend adding a [model contract](/docs/collaborate/govern/model-contracts) to the upstream models to ensure the data shape is consistent and reliable for your downstream consumers. -- **Create a `dependencies.yml` file** ([docs](/docs/collaborate/govern/project-dependencies)) for the downstream project, declaring the upstream project as a dependency. +1. **Move your grouped models into a subfolder**. This will include any model in the selected group, it's associated yml entry, as well as its parent or child resources as appropriate depending on where this group sits in your DAG. + 1. Note that just like in your dbt project, circular refereneces are not allowed! Project B cannot have parents and children in Project A, for example. +2. **Create a new `dbt_project.yml` file** in the subdirectory. +3. **Copy any macros** used by the resources you moved. +4. **Create a new `packages.yml` file** in your subdirectory with the packages that are used by the resources you moved. +5. **Update `{{ ref }}` functions** — For any model that has a cross-project dependency (this may be in the files you moved, or in the files that remain in your project): + 1. Update the `{{ ref() }}` function to have two arguments, where the first is the name of the source project and the second is the name of the model: e.g. `{{ ref('jaffle_shop', 'my_upstream_model') }}` + 2. Update the upstream, cross-project parents’ `access` configs to `public` , ensuring any project can safely `{{ ref() }}` those models. + 3. We *highly* recommend adding a [model contract](/docs/collaborate/govern/model-contracts) to the upstream models to ensure the data shape is consistent and reliable for your downstream consumers. +6. **Create a `dependencies.yml` file** ([docs](/docs/collaborate/govern/project-dependencies)) for the downstream project, declaring the upstream project as a dependency. ```yml @@ -97,8 +79,15 @@ projects: - name: jaffle_shop ``` +### Best practices + +- When you’ve **confirmed the right groups**, it's time to split your projects. + - **Do *one* group at a time**! + - **Do *not* refactor as you migrate**, however tempting that may be. Focus on getting 1-to-1 parity and log any issues you find in doing the migration for later. Once you’ve fully migrated the project then you can start optimizing it for its new life as part of your mesh. +- Start by splitting your project within the same repository for full git tracking and easy reversion if you need to start from scratch. + -## Connecting existing projects via the mesh +## Connecting existing projects Some organizations may already be coordinating across multiple dbt projects. Most often this is via: @@ -114,7 +103,7 @@ The migration steps here are much simpler than splitting up a monolith! 1. If using the `package` method: 1. In the parent project: - 1. mark all models being imported downstream as `public` and add a model contract. + 1. mark all models being referenced downstream as `public` and add a model contract. 2. In the child project: 1. Remove the package entry from `packages.yml` 2. Add the upstream project to your `dependencies.yml` @@ -123,9 +112,19 @@ The migration steps here are much simpler than splitting up a monolith! 1. In the parent project: 1. mark all models being imported downstream as `public` and add a model contract. 2. In the child project: - 2. Add the upstream project to your `dependencies.yml` - 3. Replace the `{{ source() }}` functions with cross project `{{ ref() }}` functions. - 4. Remove the unnecessary `source` definitions. + 1. Add the upstream project to your `dependencies.yml` + 2. Replace the `{{ source() }}` functions with cross project `{{ ref() }}` functions. + 3. Remove the unnecessary `source` definitions. + +## Additional Resources +### Our example projects + +We've provided a set of example projects you can use to explore the topics covered here. We've split our [Jaffle Shop](https://github.com/dbt-labs/jaffle-shop) project into 3 separate projects in a multi-repo dbt Mesh. Note that you'll need to leverage dbt Cloud to use multi-project architecture, as cross-project references are powered via dbt Cloud's APIs. + +- **[Platform](https://github.com/dbt-labs/jaffle-shop-mesh-platform)** - containing our centralized staging models. +- **[Marketing](https://github.com/dbt-labs/jaffle-shop-mesh-marketing)** - containing our marketing marts. +- **[Finance](https://github.com/dbt-labs/jaffle-shop-mesh-finance)** - containing our finance marts. + +### dbt-meshify -**** -We recommend using the `dbt-meshify` [command line tool]() to help you do this. +We recommend using the `dbt-meshify` [command line tool]() to help you do this. This comes with CLI operations to automate most of the above steps. From 04987dc70ccf04b6c6e5c831346fab2b291c293c Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 13:59:23 -0400 Subject: [PATCH 28/43] update wording --- .../docs/guides/best-practices/how-we-mesh/mesh-2-structures.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 38b1f96f12b..c263958d37b 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -46,7 +46,7 @@ Horizonal splits separate your DAG based on source or domain. These splits are o ## Determine your git strategy -A multi-project architecture can exist in a single repo (monorepo) or as multiple projects in their own repositories (multi-repo). +A multi-project architecture can exist in a single repo (monorepo) or as multiple projects, each in their own repositories (multi-repo). - If you're a **smaller team** looking primarily to speed up and simplify development, a **monorepo** is likely the right choice, but can become unwieldy as the number of projects, models and contributors grow. - If you’re a **larger team with multiple groups**, and need to decouple projects for security and enablement of different development styles and rhythms, a **multi-repo setup** is your best bet. From 6d82874240decf79dbd6c49880b88f8d44addf14 Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 14:00:23 -0400 Subject: [PATCH 29/43] update wording --- .../docs/guides/best-practices/how-we-mesh/mesh-2-structures.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index c263958d37b..826f92b38a7 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -46,7 +46,7 @@ Horizonal splits separate your DAG based on source or domain. These splits are o ## Determine your git strategy -A multi-project architecture can exist in a single repo (monorepo) or as multiple projects, each in their own repositories (multi-repo). +A multi-project architecture can exist in a single repo (monorepo) or as multiple projects, with each one being in their own repository (multi-repo). - If you're a **smaller team** looking primarily to speed up and simplify development, a **monorepo** is likely the right choice, but can become unwieldy as the number of projects, models and contributors grow. - If you’re a **larger team with multiple groups**, and need to decouple projects for security and enablement of different development styles and rhythms, a **multi-repo setup** is your best bet. From 1c06f4af767bca31a37b4a692b3199eb664cd6cb Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 14:09:04 -0400 Subject: [PATCH 30/43] update mesh to dbt Mesh --- .../docs/guides/best-practices/how-we-mesh/mesh-2-structures.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 826f92b38a7..caebc1b025b 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -1,5 +1,5 @@ --- -title: Deciding how to structure your mesh +title: Deciding how to structure your dbt Mesh description: Getting started with dbt Mesh patterns hoverSnippet: Learn how to get started with dbt Mesh --- From 811f602deb51cd7eb8c2dd182ea595dfe3fb37e5 Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 15:51:14 -0400 Subject: [PATCH 31/43] Update website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md Co-authored-by: Leona B. Campbell <3880403+runleonarun@users.noreply.github.com> --- website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 3dc9f86c010..029cd7a0cd1 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -15,7 +15,7 @@ dbt Mesh is not a single product: it is a pattern enabled by a convergence of se - **[Cross-project references](/docs/collaborate/govern/project-dependencies#usage)** - this is the foundational feature that enables the multi-project deployments. `{{ ref() }}`s now work across dbt Cloud projects on Enterprise plans. - **[dbt Explorer](/docs/collaborate/explore-projects)** - dbt Cloud's metadata-powered documentation platform, complete with full, cross-project lineage. - **Governance** - dbt's new governance features allow you to manage access to your dbt models both within and across projects. - - **[Groups](/docs/build/groups)** - groups allow you to assign models to subsets of models within a project. + - **[Groups](/docs/collaborate/govern/model-access#groups)** - groups allow you to assign models to subsets within a project. - **[Access](/docs/collaborate/govern/model-access)** - access configs allow you to control who can reference models. - **[Model Versions](/docs/collaborate/govern/model-versions)** - When coordinating across projects and teams, we recommend treating your data models as stable APIs. Model versioning is the mechanism to allow graceful adoption and deprecation of models as they evolve. - **[Model Contracts](/docs/collaborate/govern/model-contracts)** - data contracts set explicit expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream comsumers' data products. From 25da30db2972611b4b4736e4a90a7317a3627aed Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 15:52:59 -0400 Subject: [PATCH 32/43] Apply suggestions from code review Co-authored-by: Leona B. Campbell <3880403+runleonarun@users.noreply.github.com> --- .../guides/best-practices/how-we-mesh/mesh-1-intro.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 029cd7a0cd1..8f991f96372 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -16,15 +16,15 @@ dbt Mesh is not a single product: it is a pattern enabled by a convergence of se - **[dbt Explorer](/docs/collaborate/explore-projects)** - dbt Cloud's metadata-powered documentation platform, complete with full, cross-project lineage. - **Governance** - dbt's new governance features allow you to manage access to your dbt models both within and across projects. - **[Groups](/docs/collaborate/govern/model-access#groups)** - groups allow you to assign models to subsets within a project. - - **[Access](/docs/collaborate/govern/model-access)** - access configs allow you to control who can reference models. -- **[Model Versions](/docs/collaborate/govern/model-versions)** - When coordinating across projects and teams, we recommend treating your data models as stable APIs. Model versioning is the mechanism to allow graceful adoption and deprecation of models as they evolve. -- **[Model Contracts](/docs/collaborate/govern/model-contracts)** - data contracts set explicit expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream comsumers' data products. + - **[Access](/docs/collaborate/govern/model-access#access-modifiers)** - access configs allow you to control who can reference models. +- **[Model Versions](/docs/collaborate/govern/model-versions)** - when coordinating across projects and teams, we recommend treating your data models as stable APIs. Model versioning is the mechanism to allow graceful adoption and deprecation of models as they evolve. +- **[Model Contracts](/docs/collaborate/govern/model-contracts)** - data contracts set explicit expectations on the shape of the data to ensure data changes upstream of dbt or within a project's logic don't break downstream consumers' data products. ## Who is dbt Mesh for? -The multi-project architecture is for organizations with mature, complex transformation workflows in dbt who want to increase the flexibilty and performance of their dbt projects. If you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring this paradigm: +The multi-project architecture helps organizations with mature, complex transformation workflows in dbt increase the flexibility and performance of their dbt projects. If you're already using dbt and your project has started to experience any of the following, you're likely ready to start exploring this paradigm: -- **The number of models** in your project is degrading performance and slowing down development. +- The **number of models** in your project is degrading performance and slowing down development. - Teams have developed **separate workflows** and need to decouple development from each other. - **Security and governance** requirements are increasing and would benefit from increased isolation. From 4ae3be4cb3ba969410fb5ec6d79ebb4b298bbb88 Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 15:59:27 -0400 Subject: [PATCH 33/43] clean up wording --- .../best-practices/how-we-mesh/mesh-2-structures.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index caebc1b025b..4d904515f00 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -11,11 +11,11 @@ How should we organize our data workflows in a world where instead of having a s Adopting the dbt Mesh pattern is not a one-size-fits-all process. In fact, it's the opposite! It's about customizing your project structure to fit _your_ team and _your_ data. Now we can mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. -While there is not a single best way to implement this pattern, there are some common decisions points that will be helpful for you to consider. +While there is not a single best way to implement this pattern, there are some common decision points that will be helpful for you to consider. -At a high level you’ll need to decide: +At a high level, you’ll need to decide: -- Where to draw the lines between your dbt Projects -- ie how do you determine where to split your DAG and which models go in which project? +- Where to draw the lines between your dbt Projects -- i.e. how do you determine where to split your DAG and which models go in which project? - How to manage your code -- do you want multiple dbt Projects living in the same repository (mono-repo) or do you want to have multiple repos with one repo per project? ## Define your project interfaces by splitting your DAG @@ -32,17 +32,17 @@ Vertical splits separate out layers of transformation in DAG order. Let's look a ### Horizontal splits -Horizonal splits separate your DAG based on source or domain. These splits are often based around the shape and size of the data and how it's used. Let's consider some possibilites for horizontal splitting. +Horizontal splits separate your DAG based on source or domain. These splits are often based around the shape and size of the data and how it's used. Let's consider some possibilities for horizontal splitting. - **Team consumption patterns.** For example, splitting out the marketing team's data flow into a separate project. - **Data from different sources.** For example, clickstream event data and transactional ecommerce data may need to be modeled independently of each other. -- **Team workflows.** For example, if two embedded groups who operate at different paces, you may want to split the projects up so they can move independently. +- **Team workflows.** For example, if two embedded groups operate at different paces, you may want to split the projects up so they can move independently. ### Combining these strategies - **These are not either/or techniques**. You should consider both types of splits, and combine them in any way that makes sense for your organization. - **Pick one type of split and focus on that first**. If you have a hub-and-spoke team topology for example, handle breaking out the central platform project before you split the remainder into domains. Then if you need to break those domains up horizontally you can focus on that after the fact. -- **DRY applies to underlying data not just code.** Regardless of your strategy, you should not be sourcing the same rows and columns into multiple nodes. When working within a mesh pattern it becomes increasingly important that we don't duplicate logic or data. +- **DRY applies to underlying data, not just code.** Regardless of your strategy, you should not be sourcing the same rows and columns into multiple nodes. When working within a mesh pattern it becomes increasingly important that we don't duplicate logic or data. ## Determine your git strategy From cae5f847cca7acfd6f30454f9e65f6155cecc8cc Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 16:06:40 -0400 Subject: [PATCH 34/43] clean up wording on third page --- .../how-we-mesh/mesh-3-implementation.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md index bf866aedeb0..4561a6be1b7 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md @@ -4,10 +4,10 @@ description: Getting started with dbt Mesh patterns hoverSnippet: Learn how to get started with dbt Mesh --- -As mentioned before, the key decision to migrating to a multi-project architecture is understanding how your project is already being grouped, built, and deployed. We can use this information to inform our decision to split our project apart. +As mentioned before, the key decision in migrating to a multi-project architecture is understanding how your project is already being grouped, built, and deployed. We can use this information to inform our decision to split our project apart. -- **Examine your jobs** which sets of models are most often built together? -- **Look at your lineage graph** how are models connected? +- **Examine your jobs** - which sets of models are most often built together? +- **Look at your lineage graph** - how are models connected? - **Look at your selectors** defined in `selectors.yml` - how are people already defining resource groups? - **Talk to teams** about what sort of separation is naturally existing right now. - Are there various domains people are focused on? @@ -30,7 +30,7 @@ groups: email: ben.jaffleck@jaffleshop.com ``` -- Then, we can add models to that group using the `group:` key in the model's yml entry. +- Then, we can add models to that group using the `group:` key in the model's YAML entry. ```yml # in models/marketing/__models.yml @@ -61,7 +61,7 @@ models: ## Split your projects -1. **Move your grouped models into a subfolder**. This will include any model in the selected group, it's associated yml entry, as well as its parent or child resources as appropriate depending on where this group sits in your DAG. +1. **Move your grouped models into a subfolder**. This will include any model in the selected group, it's associated YAML entry, as well as its parent or child resources as appropriate depending on where this group sits in your DAG. 1. Note that just like in your dbt project, circular refereneces are not allowed! Project B cannot have parents and children in Project A, for example. 2. **Create a new `dbt_project.yml` file** in the subdirectory. 3. **Copy any macros** used by the resources you moved. From 311eb598ecba7f9a1eca783a6801eca9c8030765 Mon Sep 17 00:00:00 2001 From: Jason Ganz Date: Fri, 13 Oct 2023 16:27:11 -0400 Subject: [PATCH 35/43] Apply suggestions from code review Co-authored-by: Leona B. Campbell <3880403+runleonarun@users.noreply.github.com> --- .../best-practices/how-we-mesh/mesh-2-structures.md | 12 ++++++------ .../how-we-mesh/mesh-3-implementation.md | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md index 4d904515f00..937515954af 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-2-structures.md @@ -5,11 +5,11 @@ hoverSnippet: Learn how to get started with dbt Mesh --- ## Exploring mesh patterns -When adopting a multi-project architecture, where do we draw the lines between our projects? +When adopting a multi-project architecture, where do you draw the lines between projects? -How should we organize our data workflows in a world where instead of having a single dbt DAG, we have multiple projects speaking to each other, each comprised of their own DAG? +How should you organize data workflows in a world where instead of having a single dbt DAG, you have multiple projects speaking to each other, each comprised of their own DAG? -Adopting the dbt Mesh pattern is not a one-size-fits-all process. In fact, it's the opposite! It's about customizing your project structure to fit _your_ team and _your_ data. Now we can mold our organizational knowledge graph to our organizational people graph, bringing people and data closer together rather than compromising one for the other. +Adopting the dbt Mesh pattern is not a one-size-fits-all process. In fact, it's the opposite! It's about customizing your project structure to fit _your_ team and _your_ data. Now you can mold your organizational knowledge graph to your organizational people graph, bringing people and data closer together rather than compromising one for the other. While there is not a single best way to implement this pattern, there are some common decision points that will be helpful for you to consider. @@ -26,9 +26,9 @@ The first (and perhaps most difficult!) decision when migrating to a multi-proje Vertical splits separate out layers of transformation in DAG order. Let's look at some examples. -- **Splitting up staging and mart layers.** to create a more tightly-controlled, shared set of components that other projects build on but can't edit. -- **Isolating earlier models for security and governance requirements.** to separate out and mask PII data so that downstream consumers can't access it is a common use case for a vertical split. -- **Protecting complex or expensive data.** to isolate large or complex models that are expensive to run so that they are safe from accidental selection, independently deployable, and easier to debug when they have issues. +- **Splitting up staging and mart layers** to create a more tightly-controlled, shared set of components that other projects build on but can't edit. +- **Isolating earlier models for security and governance requirements** to separate out and mask PII data so that downstream consumers can't access it is a common use case for a vertical split. +- **Protecting complex or expensive data** to isolate large or complex models that are expensive to run so that they are safe from accidental selection, independently deployable, and easier to debug when they have issues. ### Horizontal splits diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md index 4561a6be1b7..cfbbc7a1f28 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-3-implementation.md @@ -8,17 +8,17 @@ As mentioned before, the key decision in migrating to a multi-project architectu - **Examine your jobs** - which sets of models are most often built together? - **Look at your lineage graph** - how are models connected? -- **Look at your selectors** defined in `selectors.yml` - how are people already defining resource groups? -- **Talk to teams** about what sort of separation is naturally existing right now. +- **Look at your selectors** defined in `selectors.yml` - how do people already define resource groups? +- **Talk to teams** about what sort of separation naturally exists right now. - Are there various domains people are focused on? - Are there various sizes, shapes, and sources of data that get handled separately (such as click event data)? - Are there people focused on separate levels of transformation, such as landing and staging data or building marts? ## Add groups and access -Once you have a sense of some initial groupings, the first step is to implement **group and access permissions** within a single project. +Once you have a sense of some initial groupings, you can first implement **group and access permissions** within a single project. -- First we'll create a [group](/docs/build/groups) to define the owner of a set of models. +- First you can create a [group](/docs/build/groups) to define the owner of a set of models. ```yml # in models/__groups.yml @@ -42,7 +42,7 @@ models: group: marketing ``` -- Once models are added to the group, we will **add [access](/docs/collaborate/govern/model-access) settings to the models** based on their connections between groups, *opting for the most private access that will maintain current functionality*. This means that any model that has *only* relationships to other models in the same group should be `private` , and any model that has cross-group relationships, or is a terminal node in the group DAG should be `protected` so that other parts of the DAG can continue to reference it. +- Once you've added models to the group, you can **add [access](/docs/collaborate/govern/model-access) settings to the models** based on their connections between groups, *opting for the most private access that will maintain current functionality*. This means that any model that has *only* relationships to other models in the same group should be `private` , and any model that has cross-group relationships, or is a terminal node in the group DAG should be `protected` so that other parts of the DAG can continue to reference it. ```yml # in models/marketing/__models.yml From d6fe5689a68cc3fd204dfef0cdad9342a55ab67a Mon Sep 17 00:00:00 2001 From: mirnawong1 <89008547+mirnawong1@users.noreply.github.com> Date: Mon, 16 Oct 2023 11:51:58 -0400 Subject: [PATCH 36/43] Update packages.md --- website/docs/docs/build/packages.md | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/website/docs/docs/build/packages.md b/website/docs/docs/build/packages.md index e3c45823b0b..8d18a55e949 100644 --- a/website/docs/docs/build/packages.md +++ b/website/docs/docs/build/packages.md @@ -373,17 +373,3 @@ packages: -### About dependencies.yml - -There are some important differences between using a `dependencies.yml` compared to a `packages.yml` file: - -- `dependencies.yml` - - Primarily designed for dbt Mesh and cross-project reference workflow. - - Supports both Projects and non-private dbt packages (private packages aren't supported yet). - - Helps maintain your project's organization by allowing you to specify hub packages like `dbt_utils`, reducing the need for multiple YAML files. - - Does not support conditional configuration using Jinja-in-yaml (Refer to [FAQs](#faqs) for more info). - -- `packages.yml` - - Does not contribute to the dbt Mesh workflow. - - Serves as a list of dbt Packages (such as dbt projects) that you want to download into your root or parent dbt project. - - Can only include packages, including private packages (doesn't support Projects) From a4ad635ccc91b4752cb422b76344cbff7acd0cb7 Mon Sep 17 00:00:00 2001 From: Ly Nguyen Date: Mon, 16 Oct 2023 15:58:42 -0700 Subject: [PATCH 37/43] Add "card" landing pages --- website/docs/docs/build/enhance-your-code.md | 36 ++++++++++++++ .../docs/docs/build/enhance-your-models.md | 21 +++++++++ .../docs/docs/build/organize-your-outputs.md | 36 ++++++++++++++ .../docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md | 35 ++++++++++++++ .../git/git-configuration-in-dbt-cloud.md | 35 ++++++++++++++ .../docs/cloud/secure/secure-your-tenant.md | 47 +++++++++++++++++++ .../collaborate/collaborate-with-others.md | 36 ++++++++++++++ .../docs/dbt-cloud-apis/authentication.md | 20 ++++++++ website/sidebars.js | 16 +++---- 9 files changed, 274 insertions(+), 8 deletions(-) create mode 100644 website/docs/docs/build/enhance-your-code.md create mode 100644 website/docs/docs/build/enhance-your-models.md create mode 100644 website/docs/docs/build/organize-your-outputs.md create mode 100644 website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md create mode 100644 website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md create mode 100644 website/docs/docs/cloud/secure/secure-your-tenant.md create mode 100644 website/docs/docs/collaborate/collaborate-with-others.md create mode 100644 website/docs/docs/dbt-cloud-apis/authentication.md diff --git a/website/docs/docs/build/enhance-your-code.md b/website/docs/docs/build/enhance-your-code.md new file mode 100644 index 00000000000..ba7ca6e538c --- /dev/null +++ b/website/docs/docs/build/enhance-your-code.md @@ -0,0 +1,36 @@ +--- +title: "Enhance your code" +description: "Learn how you can enhance your code" +--- + +
+ + + + + +
+
+
+ + + + + +
\ No newline at end of file diff --git a/website/docs/docs/build/enhance-your-models.md b/website/docs/docs/build/enhance-your-models.md new file mode 100644 index 00000000000..84b8bb5e9f8 --- /dev/null +++ b/website/docs/docs/build/enhance-your-models.md @@ -0,0 +1,21 @@ +--- +title: "Enhance your models" +description: "Learn how you can enhance your models" +--- + +
+ + + + + +
+
\ No newline at end of file diff --git a/website/docs/docs/build/organize-your-outputs.md b/website/docs/docs/build/organize-your-outputs.md new file mode 100644 index 00000000000..e207484be22 --- /dev/null +++ b/website/docs/docs/build/organize-your-outputs.md @@ -0,0 +1,36 @@ +--- +title: "Organize your outputs" +description: "Learn how you can organize your outputs" +--- + +
+ + + + + +
+
+
+ + + + + +
\ No newline at end of file diff --git a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md new file mode 100644 index 00000000000..d68a0f46165 --- /dev/null +++ b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md @@ -0,0 +1,35 @@ +--- +title: "dbt Cloud IDE" +description: "Learn how to configure Git in dbt Cloud" +--- + +
+ + + + + +
+
+
+ + + + +
\ No newline at end of file diff --git a/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md b/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md new file mode 100644 index 00000000000..7b6513cdae0 --- /dev/null +++ b/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md @@ -0,0 +1,35 @@ +--- +title: "Git configuration in dbt Cloud" +description: "Learn about the Git providers supported in dbt Cloud" +--- + +
+ + + + + +
+
+
+ + + + +
\ No newline at end of file diff --git a/website/docs/docs/cloud/secure/secure-your-tenant.md b/website/docs/docs/cloud/secure/secure-your-tenant.md new file mode 100644 index 00000000000..14a12d83d2e --- /dev/null +++ b/website/docs/docs/cloud/secure/secure-your-tenant.md @@ -0,0 +1,47 @@ +--- +title: "Secure your tenant" +description: "Learn how to secure your tenant for dbt Cloud" +--- + +
+ + + + + + + +
+
+
+ + + + + + +
\ No newline at end of file diff --git a/website/docs/docs/collaborate/collaborate-with-others.md b/website/docs/docs/collaborate/collaborate-with-others.md new file mode 100644 index 00000000000..ac895478a75 --- /dev/null +++ b/website/docs/docs/collaborate/collaborate-with-others.md @@ -0,0 +1,36 @@ +--- +title: "Collaborate with others" +description: "Learn how dbt Cloud makes it easier to collaborate with others" +--- + +
+ + + + + +
+
+
+ + + + + +
\ No newline at end of file diff --git a/website/docs/docs/dbt-cloud-apis/authentication.md b/website/docs/docs/dbt-cloud-apis/authentication.md new file mode 100644 index 00000000000..c75389cf0f3 --- /dev/null +++ b/website/docs/docs/dbt-cloud-apis/authentication.md @@ -0,0 +1,20 @@ +--- +title: "Authentication" +description: "Learn how to authenticate with user tokens and service account tokens " +--- + +
+ + + + + +
\ No newline at end of file diff --git a/website/sidebars.js b/website/sidebars.js index 1d9e1e8ce53..f962370b8d8 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -103,7 +103,7 @@ const sidebarSettings = { { type: "category", label: "Configure Git", - link: { type: "doc", id: "docs/cloud/git/import-a-project-by-git-url" }, + link: { type: "doc", id: "docs/cloud/git/git-configuration-in-dbt-cloud" }, items: [ "docs/cloud/git/import-a-project-by-git-url", "docs/cloud/git/connect-github", @@ -125,7 +125,7 @@ const sidebarSettings = { label: "Develop in the IDE", link: { type: "doc", - id: "docs/cloud/dbt-cloud-ide/develop-in-the-cloud", + id: "docs/cloud/dbt-cloud-ide/dbt-cloud-ide", }, items: [ "docs/cloud/dbt-cloud-ide/develop-in-the-cloud", @@ -137,7 +137,7 @@ const sidebarSettings = { { type: "category", label: "Secure your tenant", - link: { type: "doc", id: "docs/cloud/secure/ip-restrictions" }, + link: { type: "doc", id: "docs/cloud/secure/secure-your-tenant" }, items: [ "docs/cloud/secure/ip-restrictions", "docs/cloud/secure/about-privatelink", @@ -313,7 +313,7 @@ const sidebarSettings = { type: "category", label: "Enhance your models", collapsed: true, - link: { type: "doc", id: "docs/build/materializations" }, + link: { type: "doc", id: "docs/build/enhance-your-models" }, items: [ "docs/build/materializations", "docs/build/incremental-models", @@ -323,7 +323,7 @@ const sidebarSettings = { type: "category", label: "Enhance your code", collapsed: true, - link: { type: "doc", id: "docs/build/project-variables" }, + link: { type: "doc", id: "docs/build/enhance-your-code" }, items: [ "docs/build/project-variables", "docs/build/environment-variables", @@ -335,7 +335,7 @@ const sidebarSettings = { type: "category", label: "Organize your outputs", collapsed: true, - link: { type: "doc", id: "docs/build/custom-schemas" }, + link: { type: "doc", id: "docs/build/organize-your-outputs" }, items: [ "docs/build/custom-schemas", "docs/build/custom-databases", @@ -389,7 +389,7 @@ const sidebarSettings = { { type: "category", label: "Collaborate with others", - link: { type: "doc", id: "docs/collaborate/explore-projects" }, + link: { type: "doc", id: "docs/collaborate/collaborate-with-others" }, items: [ "docs/collaborate/explore-projects", { @@ -461,7 +461,7 @@ const sidebarSettings = { { type: "category", label: "Authentication", - link: { type: "doc", id: "docs/dbt-cloud-apis/user-tokens" }, + link: { type: "doc", id: "docs/dbt-cloud-apis/authentication" }, items: [ "docs/dbt-cloud-apis/user-tokens", "docs/dbt-cloud-apis/service-tokens", From 1fd1b7ac6a91b6f1961b61f6eeca875415018ab7 Mon Sep 17 00:00:00 2001 From: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> Date: Mon, 16 Oct 2023 19:53:45 -0400 Subject: [PATCH 38/43] Adding category links to sidebar --- website/docs/docs/build/enhance-your-code.md | 2 ++ website/docs/docs/build/enhance-your-models.md | 2 ++ website/docs/docs/build/organize-your-outputs.md | 2 ++ website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md | 2 ++ .../docs/docs/cloud/git/git-configuration-in-dbt-cloud.md | 2 ++ website/docs/docs/cloud/secure/secure-your-tenant.md | 2 ++ website/docs/docs/collaborate/collaborate-with-others.md | 2 ++ website/docs/docs/dbt-cloud-apis/authentication.md | 2 ++ website/sidebars.js | 8 ++++++++ 9 files changed, 24 insertions(+) diff --git a/website/docs/docs/build/enhance-your-code.md b/website/docs/docs/build/enhance-your-code.md index ba7ca6e538c..5f2d48f6f5a 100644 --- a/website/docs/docs/build/enhance-your-code.md +++ b/website/docs/docs/build/enhance-your-code.md @@ -1,6 +1,8 @@ --- title: "Enhance your code" description: "Learn how you can enhance your code" +pagination_next: "docs/build/project-variables" +pagination_prev: null ---
diff --git a/website/docs/docs/build/enhance-your-models.md b/website/docs/docs/build/enhance-your-models.md index 84b8bb5e9f8..68aa19d5502 100644 --- a/website/docs/docs/build/enhance-your-models.md +++ b/website/docs/docs/build/enhance-your-models.md @@ -1,6 +1,8 @@ --- title: "Enhance your models" description: "Learn how you can enhance your models" +pagination_next: "docs/build/materializations" +pagination_prev: null ---
diff --git a/website/docs/docs/build/organize-your-outputs.md b/website/docs/docs/build/organize-your-outputs.md index e207484be22..ad5efeda1c7 100644 --- a/website/docs/docs/build/organize-your-outputs.md +++ b/website/docs/docs/build/organize-your-outputs.md @@ -1,6 +1,8 @@ --- title: "Organize your outputs" description: "Learn how you can organize your outputs" +pagination_next: "docs/build/custom-schemas" +pagination_prev: null ---
diff --git a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md index d68a0f46165..3c41432bc62 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/dbt-cloud-ide.md @@ -1,6 +1,8 @@ --- title: "dbt Cloud IDE" description: "Learn how to configure Git in dbt Cloud" +pagination_next: "docs/cloud/dbt-cloud-ide/develop-in-the-cloud" +pagination_prev: null ---
diff --git a/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md b/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md index 7b6513cdae0..fb8c0186236 100644 --- a/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md +++ b/website/docs/docs/cloud/git/git-configuration-in-dbt-cloud.md @@ -1,6 +1,8 @@ --- title: "Git configuration in dbt Cloud" description: "Learn about the Git providers supported in dbt Cloud" +pagination_next: "docs/cloud/git/import-a-project-by-git-url" +pagination_prev: null ---
diff --git a/website/docs/docs/cloud/secure/secure-your-tenant.md b/website/docs/docs/cloud/secure/secure-your-tenant.md index 14a12d83d2e..45fac692ca9 100644 --- a/website/docs/docs/cloud/secure/secure-your-tenant.md +++ b/website/docs/docs/cloud/secure/secure-your-tenant.md @@ -1,6 +1,8 @@ --- title: "Secure your tenant" description: "Learn how to secure your tenant for dbt Cloud" +pagination_next: "docs/cloud/secure/ip-restrictions" +pagination_prev: null ---
diff --git a/website/docs/docs/collaborate/collaborate-with-others.md b/website/docs/docs/collaborate/collaborate-with-others.md index ac895478a75..7875a8044b6 100644 --- a/website/docs/docs/collaborate/collaborate-with-others.md +++ b/website/docs/docs/collaborate/collaborate-with-others.md @@ -1,6 +1,8 @@ --- title: "Collaborate with others" description: "Learn how dbt Cloud makes it easier to collaborate with others" +pagination_next: "docs/collaborate/explore-projects" +pagination_prev: null ---
diff --git a/website/docs/docs/dbt-cloud-apis/authentication.md b/website/docs/docs/dbt-cloud-apis/authentication.md index c75389cf0f3..7deadd68f18 100644 --- a/website/docs/docs/dbt-cloud-apis/authentication.md +++ b/website/docs/docs/dbt-cloud-apis/authentication.md @@ -1,6 +1,8 @@ --- title: "Authentication" description: "Learn how to authenticate with user tokens and service account tokens " +pagination_next: "docs/dbt-cloud-apis/user-tokens" +pagination_prev: null ---
diff --git a/website/sidebars.js b/website/sidebars.js index f962370b8d8..b4da9d911d7 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -105,6 +105,7 @@ const sidebarSettings = { label: "Configure Git", link: { type: "doc", id: "docs/cloud/git/git-configuration-in-dbt-cloud" }, items: [ + "docs/cloud/git/git-configuration-in-dbt-cloud", "docs/cloud/git/import-a-project-by-git-url", "docs/cloud/git/connect-github", "docs/cloud/git/connect-gitlab", @@ -128,6 +129,7 @@ const sidebarSettings = { id: "docs/cloud/dbt-cloud-ide/dbt-cloud-ide", }, items: [ + "docs/cloud/dbt-cloud-ide/dbt-cloud-ide", "docs/cloud/dbt-cloud-ide/develop-in-the-cloud", "docs/cloud/dbt-cloud-ide/ide-user-interface", "docs/cloud/dbt-cloud-ide/lint-format", @@ -139,6 +141,7 @@ const sidebarSettings = { label: "Secure your tenant", link: { type: "doc", id: "docs/cloud/secure/secure-your-tenant" }, items: [ + "docs/cloud/secure/secure-your-tenant", "docs/cloud/secure/ip-restrictions", "docs/cloud/secure/about-privatelink", "docs/cloud/secure/snowflake-privatelink", @@ -315,6 +318,7 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/build/enhance-your-models" }, items: [ + "docs/build/enhance-your-models", "docs/build/materializations", "docs/build/incremental-models", ], @@ -325,6 +329,7 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/build/enhance-your-code" }, items: [ + "docs/build/enhance-your-code", "docs/build/project-variables", "docs/build/environment-variables", "docs/build/packages", @@ -337,6 +342,7 @@ const sidebarSettings = { collapsed: true, link: { type: "doc", id: "docs/build/organize-your-outputs" }, items: [ + "docs/build/organize-your-outputs", "docs/build/custom-schemas", "docs/build/custom-databases", "docs/build/custom-aliases", @@ -391,6 +397,7 @@ const sidebarSettings = { label: "Collaborate with others", link: { type: "doc", id: "docs/collaborate/collaborate-with-others" }, items: [ + "docs/collaborate/collaborate-with-others", "docs/collaborate/explore-projects", { type: "category", @@ -463,6 +470,7 @@ const sidebarSettings = { label: "Authentication", link: { type: "doc", id: "docs/dbt-cloud-apis/authentication" }, items: [ + "docs/dbt-cloud-apis/authentication", "docs/dbt-cloud-apis/user-tokens", "docs/dbt-cloud-apis/service-tokens", ], From 846f5d8e7276f469949aeee2323b9b3e61fee6cc Mon Sep 17 00:00:00 2001 From: Ly Nguyen <107218380+nghi-ly@users.noreply.github.com> Date: Mon, 16 Oct 2023 17:15:26 -0700 Subject: [PATCH 39/43] Update website/docs/docs/build/enhance-your-models.md --- website/docs/docs/build/enhance-your-models.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/build/enhance-your-models.md b/website/docs/docs/build/enhance-your-models.md index 68aa19d5502..46e7fa74353 100644 --- a/website/docs/docs/build/enhance-your-models.md +++ b/website/docs/docs/build/enhance-your-models.md @@ -9,7 +9,7 @@ pagination_prev: null From e88588bc1811e22305d07c2a6928a7c9e60e9d79 Mon Sep 17 00:00:00 2001 From: Ly Nguyen <107218380+nghi-ly@users.noreply.github.com> Date: Mon, 16 Oct 2023 17:45:12 -0700 Subject: [PATCH 40/43] Update website/docs/docs/cloud/secure/secure-your-tenant.md Co-authored-by: Matt Shaver <60105315+matthewshaver@users.noreply.github.com> --- website/docs/docs/cloud/secure/secure-your-tenant.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/cloud/secure/secure-your-tenant.md b/website/docs/docs/cloud/secure/secure-your-tenant.md index 45fac692ca9..95cb8adffba 100644 --- a/website/docs/docs/cloud/secure/secure-your-tenant.md +++ b/website/docs/docs/cloud/secure/secure-your-tenant.md @@ -9,7 +9,7 @@ pagination_prev: null From d56251ba1f6b4ae117cb9077de7efd1bf4da91a9 Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Mon, 16 Oct 2023 21:30:14 -0400 Subject: [PATCH 41/43] remove line --- website/docs/docs/collaborate/govern/project-dependencies.md | 1 - 1 file changed, 1 deletion(-) diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md index 1faa2f746a9..793b79923cf 100644 --- a/website/docs/docs/collaborate/govern/project-dependencies.md +++ b/website/docs/docs/collaborate/govern/project-dependencies.md @@ -27,7 +27,6 @@ Starting in dbt v1.6 or higher, `packages.yml` has been renamed to `dependencies ## Prerequisites In order to add project dependencies and resolve cross-project `ref`, you must: -- Have the feature enabled (speak to your account team) - Use dbt v1.6 or higher for **both** the upstream ("producer") project and the downstream ("consumer") project. - Have a deployment environment in the upstream ("producer") project [that is set to be your production environment](/docs/deploy/deploy-environments#set-as-production-environment-beta) - Have a successful run of the upstream ("producer") project From d9c44bbfdd5eb04ea2559eb608c395155c5134ae Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Mon, 16 Oct 2023 21:44:44 -0400 Subject: [PATCH 42/43] fix build error --- website/docs/docs/core/about-core-setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/core/about-core-setup.md b/website/docs/docs/core/about-core-setup.md index 7794b481f72..a4d5ff09ee3 100644 --- a/website/docs/docs/core/about-core-setup.md +++ b/website/docs/docs/core/about-core-setup.md @@ -3,7 +3,7 @@ title: About dbt Core setup id: about-core-setup description: "Configuration settings for dbt Core." sidebar_label: "About dbt Core setup" -pagination_next: "docs/core/about-the-cli" +pagination_next: "docs/core/about-dbt-core" pagination_prev: null --- From e38f2966ff7e6a43d7a786cefc5b5153d62bc4aa Mon Sep 17 00:00:00 2001 From: mirnawong1 <89008547+mirnawong1@users.noreply.github.com> Date: Mon, 16 Oct 2023 22:10:55 -0700 Subject: [PATCH 43/43] Update website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md --- website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md index 8f991f96372..ba1660a8d82 100644 --- a/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md +++ b/website/docs/guides/best-practices/how-we-mesh/mesh-1-intro.md @@ -12,7 +12,7 @@ Regardless of your organization's size and complexity, dbt should empower data t dbt Mesh is not a single product: it is a pattern enabled by a convergence of several features in dbt: -- **[Cross-project references](/docs/collaborate/govern/project-dependencies#usage)** - this is the foundational feature that enables the multi-project deployments. `{{ ref() }}`s now work across dbt Cloud projects on Enterprise plans. +- **[Cross-project references](/docs/collaborate/govern/project-dependencies#how-to-use-ref)** - this is the foundational feature that enables the multi-project deployments. `{{ ref() }}`s now work across dbt Cloud projects on Enterprise plans. - **[dbt Explorer](/docs/collaborate/explore-projects)** - dbt Cloud's metadata-powered documentation platform, complete with full, cross-project lineage. - **Governance** - dbt's new governance features allow you to manage access to your dbt models both within and across projects. - **[Groups](/docs/collaborate/govern/model-access#groups)** - groups allow you to assign models to subsets within a project.