diff --git a/.github/ISSUE_TEMPLATE/internal-orch-team.yml b/.github/ISSUE_TEMPLATE/internal-orch-team.yml new file mode 100644 index 00000000000..8c4d61df10c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/internal-orch-team.yml @@ -0,0 +1,49 @@ +name: Orchestration team - Request changes to docs +description: File a docs update request that is not already tracked in Orch team's Release Plans (Notion database). +labels: ["content","internal-orch-team"] +body: + - type: markdown + attributes: + value: | + * You can ask questions or submit ideas for the dbt docs in [Issues](https://github.com/dbt-labs/docs-internal/issues/new/choose) + * Before you file an issue read the [Contributing guide](https://github.com/dbt-labs/docs-internal#contributing). + * Check to make sure someone hasn't already opened a similar [issue](https://github.com/dbt-labs/docs-internal/issues). + + - type: checkboxes + id: contributions + attributes: + label: Contributions + description: Please read the contribution docs before opening an issue or pull request. + options: + - label: I have read the contribution docs, and understand what's expected of me. + + - type: textarea + attributes: + label: Link to the page on docs.getdbt.com requiring updates + description: Please link to the page or pages you'd like to see improved. + validations: + required: true + + - type: textarea + attributes: + label: What part(s) of the page would you like to see updated? + description: | + - Give as much detail as you can to help us understand the change you want to see. + - Why should the docs be changed? What use cases does it support? + - What is the expected outcome? + validations: + required: true + + - type: textarea + attributes: + label: Reviewers/Stakeholders/SMEs + description: List the reviewers, stakeholders, and subject matter experts (SMEs) to collaborate with for the docs update. + validations: + required: true + + - type: textarea + attributes: + label: Related Jira tickets + description: Add any other context or screenshots about the feature request here. + validations: + required: false diff --git a/.github/workflows/repo-sync.yml b/.github/workflows/repo-sync.yml new file mode 100644 index 00000000000..8f2320217b2 --- /dev/null +++ b/.github/workflows/repo-sync.yml @@ -0,0 +1,111 @@ +name: Repo Sync + +# **What it does**: Syncs docs.getdbt.com public repo into the docs private repo +# This GitHub Actions workflow keeps the `current` branch of those two repos in sync. +# **Why we have it**: To keep the open-source repository up-to-date +# while still having an internal repository for sensitive work. +# For more details, see https://github.com/repo-sync/repo-sync#how-it-works + +on: + schedule: + - cron: '0 6,12,18 * * *' # Run at 6:00 AM, 12:00 PM, and 6:00 PM + +jobs: + repo-sync: + permissions: + contents: write + pull-requests: write + name: Repo Sync + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + # Use the INTERMEDIATE_BRANCH as the checkout reference + ref: ${{ secrets.INTERMEDIATE_BRANCH }} + token: ${{ secrets.GITHUB_TOKEN }} + # Fetch all history for all branches and tags + fetch-depth: 0 + + # Sync the source repo to the destination branch using repo-sync/github-sync + - uses: repo-sync/github-sync@v2 + name: Sync repo to branch + with: + # Source repository to sync from + source_repo: ${{ secrets.SOURCE_REPO }} + # Source branch to sync from + source_branch: current + # Destination branch to sync to + destination_branch: ${{ secrets.INTERMEDIATE_BRANCH }} + github_token: ${{ secrets.WORKFLOW_TOKEN }} + + - name: Ship pull request + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.WORKFLOW_TOKEN }} + result-encoding: string + script: | + const {owner, repo} = context.repo; + const head = '${{ secrets.INTERMEDIATE_BRANCH }}'; + const base = 'current' + + async function closePullRequest(prNumber) { + console.log('closing PR', prNumber) + await github.rest.pulls.update({ + owner, + repo, + pull_number: prNumber, + state: 'closed' + }); + console.log('closed PR', prNumber) + } + + console.log('Creating new PR') + let pull, pull_number + try { + const response = await github.rest.pulls.create({ + owner, + repo, + head, + base, + title: 'REPO SYNC - Public to Private', + body: 'This is an automated pull request to sync changes between the public and private repos.', + }); + pull = response.data + pull_number = pull.number + console.log('Created pull request successfully', pull.html_url) + + } catch (err) { + // Don't error/alert if there's no commits to sync + if (err.message?.includes('No commits')) { + console.log(err.message) + return + } + throw err + } + + const { data: prFiles } = await github.rest.pulls.listFiles({ owner, repo, pull_number }) + if (prFiles.length) { + console.log(prFiles.length, 'files have changed') + } else { + console.log('No files changed, closing') + await closePullRequest(pull_number) + return + } + + console.log('Checking for merge conflicts') + if (pull.mergeable_state === 'dirty') { + console.log('Pull request has a conflict', pull.html_url) + await closePullRequest(pull_number) + throw new Error('PR has a conflict, please resolve manually') + } + console.log('No detected merge conflicts') + + + console.log('Merging the PR') + await github.rest.pulls.merge({ + owner, + repo, + pull_number, + merge_method: 'merge', + }) + console.log('Merged the PR successfully') diff --git a/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md b/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md index 3aa9368a2ca..5098e9d29e3 100644 --- a/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md +++ b/website/blog/2021-11-23-how-to-upgrade-dbt-versions.md @@ -12,6 +12,18 @@ date: 2021-11-29 is_featured: true --- +:::tip February 2024 Update + +It's been a few years since dbt-core turned 1.0! Since then, we've committed to releasing zero breaking changes whenever possible and it's become much easier to upgrade dbt Core versions. + +In 2024, we're taking this promise further by: +- Stabilizing interfaces for everyone — adapter maintainers, metadata consumers, and (of course) people writing dbt code everywhere — as discussed in [our November 2023 roadmap update](https://github.com/dbt-labs/dbt-core/blob/main/docs/roadmap/2023-11-dbt-tng.md). +- Introducing **Keep on latest version** in dbt Cloud. No more manual upgrades and no more need for _a second sandbox project_ just to try out new features in development. For more details, refer to [Upgrade Core version in Cloud](/docs/dbt-versions/upgrade-dbt-version-in-cloud). + +We're leaving the rest of this post as is, so we can all remember how it used to be. Enjoy a stroll down memory lane. + +::: + As we get closer to dbt v1.0 shipping in December, it's a perfect time to get your installation up to scratch. dbt 1.0 represents the culmination of over five years of development and refinement to the analytics engineering experience - smoothing off sharp edges, speeding up workflows and enabling whole new classes of work. Even with all the new shinies on offer, upgrading can be daunting – you rely on dbt to power your analytics workflow and can’t afford to change things just to discover that your daily run doesn’t work anymore. I’ve been there. This is the checklist I wish I had when I owned my last company’s dbt project. diff --git a/website/blog/2023-12-15-serverless-free-tier-data-stack-with-dlt-and-dbt-core.md b/website/blog/2023-12-15-serverless-free-tier-data-stack-with-dlt-and-dbt-core.md index d2c6652d883..aed00e1c650 100644 --- a/website/blog/2023-12-15-serverless-free-tier-data-stack-with-dlt-and-dbt-core.md +++ b/website/blog/2023-12-15-serverless-free-tier-data-stack-with-dlt-and-dbt-core.md @@ -23,6 +23,7 @@ Unfortunately, there are significantly fewer properties than ads - it seems many **The tools:** I want to be able to run my project on [Google Cloud Functions](https://cloud.google.com/functions) due to the generous free tier. [dlt](https://dlthub.com/) is a new Python library for declarative data ingestion which I have wanted to test for some time. Finally, I will use dbt Core for transformation. + ## The starting point If I want to have reliable information on the state of the market I will need to: diff --git a/website/blog/2024-02-13-dbt-explorer.md b/website/blog/2024-02-13-dbt-explorer.md new file mode 100644 index 00000000000..9dbc1b7db7c --- /dev/null +++ b/website/blog/2024-02-13-dbt-explorer.md @@ -0,0 +1,86 @@ +--- +title: "Column-Level Lineage, Model Performance, and Recommendations: ship trusted data products with dbt Explorer" +description: "Learn about how to get the most out of the new features in dbt Explorer" +slug: dbt-explorer + +authors: [dave_connors] + +tags: [analytics craft] +hide_table_of_contents: false + +date: 2024-02-13 +is_featured: true +--- + +## What’s in a data platform? + +[Raising a dbt project](https://docs.getdbt.com/blog/how-to-build-a-mature-dbt-project-from-scratch) is hard work. We, as data professionals, have poured ourselves into raising happy healthy data products, and we should be proud of the insights they’ve driven. It certainly wasn’t without its challenges though — we remember the terrible twos, where we worked hard to just get the platform to walk straight. We remember the angsty teenage years where tests kept failing, seemingly just to spite us. A lot of blood, sweat, and tears are shed in the service of clean data! + +Once the project could dress and feed itself, we also worked hard to get buy-in from our colleagues who put their trust in our little project. Without deep trust and understanding of what we built, our colleagues who depend on your data (or even those involved in developing it with you — it takes a village after all!) are more likely to be in your DMs with questions than in their BI tools, generating insights. + +When our teammates ask about where the data in their reports come from, how fresh it is, or about the right calculation for a metric, what a joy! This means they want to put what we’ve built to good use — the challenge is that, historically, *it hasn’t been all that easy to answer these questions well.* That has often meant a manual, painstaking process of cross checking run logs and your dbt documentation site to get the stakeholder the information they need. + +Enter [dbt Explorer](https://www.getdbt.com/product/dbt-explorer)! dbt Explorer centralizes documentation, lineage, and execution metadata to reduce the work required to ship trusted data products faster. + + +## dbt Explorer: an upgrade to data discovery + +In the days of yore, answering a question about your data platform may have required a bit of cryptography, sifting through possibly-up-to-date documentation in your internal wiki, run logs to figure out when your models were executed, and slacking the data team member with the most tenure. In the past several years, dbt Docs helped centralize the documentation workflow and dramatically improved the documentation process. While useful, dbt Docs only ever provides a single point in time snapshot, and lacks any sense of your platform’s deployment and execution information. dbt Explorer supercharges the docs experience by providing stateful awareness of your data platform, making support and triage of your platform easier than ever — it even proactively lets you know what to focus on to build even higher quality data products! + +### Where’s this data coming from? + +Your stakeholders and fellow developers both need a way to orient themselves within your dbt project, and a way to know the full provenance of the number staring at them in their spreadsheet. *Where did this info come from? Does it include XYZ data source, or just ABC?* + +It’s the classic stakeholder question for a reason! Knowing data lineage inherently increases your level of trust in the reporting you use to make the right decisions. The dbt DAG has long served as the map of your data flows, tracing the flow from raw data to ready-to-query data mart. + + + + + +dbt Explorer builds on this experience in three key ways: + +- **Lineage 🤝 Docs** - dbt Explorer’s lineage is embedded into the documentation page for each resource, meaning there’s no need to toggle between your DAG and your docs, and lose valuable context. Similarly, when you’re navigating the DAG in full screen mode, clicking on a resource in your project loads a summary panel of the most critical info about the resource you’re interested in (including execution status, data contract info, you name it). Understanding the lineage via the DAG and the context from your written documentation is one workflow in Explorer, not two. +- **Cross project lineage -** if you’re using the new [dbt Mesh](https://www.getdbt.com/product/dbt-mesh) architecture, you may trace your data back to the end of the DAG and find its source is not raw data, but in fact the output of another team’s dbt project! Luckily, dbt Explorer provides first class support for visualizing and understanding cross project lineage when using the dbt Mesh: + - **Account View + Project DAG:** dbt Explorer provides a higher level view of the relationships between all your projects in your dbt Cloud Account — you can trace the lineage across the projects, and easily drill down into each project. When you click on a project in this view, the side panel includes a list of all the public models available for use. Double clicking opens up the lineage for that specific project, making it easy to traverse across your organization’s knowledge graph! + - **Cross Project Icons:** When you’re in a project’s lineage, dbt Explorer marks cross-project relationships to make it clear when there are dependencies that span multiple projects. Stakeholders can quickly understand which project owners they may need to contact if they need more information about a dataset. +- **Column level lineage -** long time listeners of the pod know that column level lineage is a frequently requested feature within dbt. It’s one thing to know how data flows between models, but the column level relationships help you understand *precisely* how data is used in models — this makes debugging data issues a lot simpler! We’re stoked to announce that dbt Explorer offers this feature embedded alongside your model lineage as well. + + + +With dbt Explorer, you can answer any question about your data’s lineage at any grain, whether its project to project, model to model, or column to column. + +### Ok but is it fresh? Is it *right*? + +Once the data’s journey to your BI tool is clear, there’s a natural second question one would ask before using it — is it, uh, *good data?* Just knowing where it came from is not enough to build trust in the data product — you need to know if it’s timely and accurate. + +dbt Explorer marries the execution metadata to the documentation experience — it reflects the latest state of your project across all your job runs in your [production environment,](https://docs.getdbt.com/docs/deploy/deploy-environments#set-as-production-environment) and embeds the execution information throughout the product. For each model, seed, or snapshot, Explorer displays its latest execution status, as well as statuses for any tests run against those resources. Sources show the latest source freshness info, and exposures embed the aggregate test and freshness info right into the details page! No more leaving the docs site to check the most recent logs to see what’s fresh and what’s not — Explorer centralizes everything so you don’t have to! + + + + + + +### Is the project healthy? Are we managing it properly? + +Beyond building solid data products and making sure they are trusted and used, developers need to know how they may improve their projects’ quality, or what areas may need some focus for refactoring and optimization in the next quarter. There’s always a balance between maintaining a data platform and adding new features to it. Historically, it’s been hard to know exactly where to invest time and effort to improve the health of your project — dbt Explorer provides two features that shine a light on possible areas for improvement within your project. + +#### Recommendations + +One of dbt’s more popular open source packages is [dbt_project_evaluator](https://github.com/dbt-labs/dbt-project-evaluator) , which tests your project against a set of well established dbt best practices. dbt Explorer now surfaces many of the same recommendations directly within the explorer UI using the metadata from the Discovery API, without any need to download and run the package! + +Each model and source has a `Recommendations` tab on their resource details page, with specific recommendations on how to improve the quality of that resource. Explorer also offers a global view, showing *****all***** the recommendations across the project, and includes some top level metrics measuring the test and documentation coverage of the models in your project. These recommendations provide insight into how you can build a more well documented, well tested, and well built project, leading to less confusion and more trust. + + + + +#### Model Performance Trends + +A huge pain point for analytics engineers is trying to understand if their [dbt models are taking longer or are running less efficiently over time](https://docs.getdbt.com/blog/how-we-shaved-90-minutes-off-model). A model that worked great when your data was small may not work so great when your platform matures! Unless things start to actively break, it can be hard to know where to focus your refactoring work. + +dbt Explorer now surfaces model execution metadata to take the guesswork out of fine tuning your dbt runs. There’s a new high level overview page to highlight models that are taking the longest to run, erroring the most, and that have the highest rate of test failures. Each model details page also has a new `Performance` tab, which shows that particular model’s execution history for up to three months of job runs. Spotting an ominous slow increase in runtimes may indicate it’s time for some refactoring — no need to comb through countless `run_results.json` files yourself! dbt Explorer gets you the data you need where you need it. + + + +## Bon voyage! + +They say the best time to ~~invest~~ ~~plant a tree~~ document your dbt project is yesterday, and the second best time is today. With all the bells and whistles that supercharge your documentation experience in dbt Explorer, there’s no time like the present! Leaning into your documentation and taking advantage of your metadata in dbt Explorer will lead to better data products shipped faster — get out there and explore! \ No newline at end of file diff --git a/website/docs/best-practices/clone-incremental-models.md b/website/docs/best-practices/clone-incremental-models.md index 11075b92161..b3b037847ae 100644 --- a/website/docs/best-practices/clone-incremental-models.md +++ b/website/docs/best-practices/clone-incremental-models.md @@ -7,7 +7,7 @@ hoverSnippet: Learn how to clone incremental models for CI jobs. --- Before you begin, you must be aware of a few conditions: -- `dbt clone` is only available with dbt version 1.6 and newer. Refer to our [upgrade guide](/docs/dbt-versions/upgrade-core-in-cloud) for help enabling newer versions in dbt Cloud +- `dbt clone` is only available with dbt version 1.6 and newer. Refer to our [upgrade guide](/docs/dbt-versions/upgrade-dbt-version-in-cloud) for help enabling newer versions in dbt Cloud - This strategy only works for warehouse that support zero copy cloning (otherwise `dbt clone` will just create pointer views). - Some teams may want to test that their incremental models run in both incremental mode and full-refresh mode. diff --git a/website/docs/best-practices/how-we-mesh/mesh-4-faqs.md b/website/docs/best-practices/how-we-mesh/mesh-4-faqs.md index 2b11c3563eb..2f984da9d04 100644 --- a/website/docs/best-practices/how-we-mesh/mesh-4-faqs.md +++ b/website/docs/best-practices/how-we-mesh/mesh-4-faqs.md @@ -273,7 +273,7 @@ The [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) allows users to develop -Yes, your account must be on [at least dbt v1.6](/docs/dbt-versions/upgrade-core-in-cloud) to take advantage of [cross-project dependencies](/docs/collaborate/govern/project-dependencies), one of the most crucial underlying capabilities required to implement a dbt Mesh. +Yes, your account must be on [at least dbt v1.6](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to take advantage of [cross-project dependencies](/docs/collaborate/govern/project-dependencies), one of the most crucial underlying capabilities required to implement a dbt Mesh. diff --git a/website/docs/docs/build/about-metricflow.md b/website/docs/docs/build/about-metricflow.md index 19d27bc60d2..fc83af63306 100644 --- a/website/docs/docs/build/about-metricflow.md +++ b/website/docs/docs/build/about-metricflow.md @@ -15,7 +15,7 @@ MetricFlow handles SQL query construction and defines the specification for dbt Before you start, consider the following guidelines: - Define metrics in YAML and query them using these [new metric specifications](https://github.com/dbt-labs/dbt-core/discussions/7456). -- You must be on [dbt version](/docs/dbt-versions/upgrade-core-in-cloud) 1.6 or higher to use MetricFlow. +- You must be on [dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) 1.6 or higher to use MetricFlow. - Use MetricFlow with Snowflake, BigQuery, Databricks, Postgres (dbt Core only), or Redshift. - Discover insights and query your metrics using the [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl) and its diverse range of [available integrations](/docs/use-dbt-semantic-layer/avail-sl-integrations). diff --git a/website/docs/docs/build/metricflow-commands.md b/website/docs/docs/build/metricflow-commands.md index 675a2d056ac..4693aa1a774 100644 --- a/website/docs/docs/build/metricflow-commands.md +++ b/website/docs/docs/build/metricflow-commands.md @@ -75,7 +75,7 @@ You can use the `dbt sl` prefix before the command name to execute them in the d - [`list dimensions`](#list) — Lists unique dimensions for metrics. - [`list dimension-values`](#list-dimension-values) — List dimensions with metrics. - [`list entities`](#list-entities) — Lists all unique entities. -- [`list saved queries`)(#list-saved-queries) — Lists available saved queries. Use the `--show-exports` flag to display each export listed under a saved query. +- [`list saved queries`](#list-saved-queries) — Lists available saved queries. Use the `--show-exports` flag to display each export listed under a saved query. - [`query`](#query) — Query metrics, saved queries, and dimensions you want to see in the command line interface. Refer to [query examples](#query-examples) to help you get started. 1.0). The likelihood of experiencing problems upgrading between successive minor versions is much lower, which is why upgrading regularly is recommended. +Once you know what code changes you'll need to make, you can start implementing them. We recommend you create a separate dbt project, **Upgrade Project**, to test your changes before making them live in your main dbt project. In your **Upgrade Project**, connect to the same repository you use for your production project. This time, set the development environment [settings](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to run the latest version of dbt Core. Next, check out a branch `dbt-version-upgrade`, make the appropriate updates to your project, and verify your dbt project compiles and runs with the new version in the IDE. If upgrading directly to the latest version results in too many issues, try testing your project iteratively on successive minor versions. There are years of development and a few breaking changes between distant versions of dbt Core (for example, 0.14 --> 1.0). The likelihood of experiencing problems upgrading between successive minor versions is much lower, which is why upgrading regularly is recommended. Once you have your project compiling and running on the latest version of dbt in the development environment for your `dbt-version-upgrade` branch, try replicating one of your production jobs to run off your branch's code. You can do this by creating a new deployment environment for testing, setting the custom branch to 'ON' and referencing your `dbt-version-upgrade` branch. You'll also need to set the dbt version in this environment to the latest dbt Core version. diff --git a/website/docs/docs/deploy/retry-jobs.md b/website/docs/docs/deploy/retry-jobs.md index beefb35379e..f439351aec5 100644 --- a/website/docs/docs/deploy/retry-jobs.md +++ b/website/docs/docs/deploy/retry-jobs.md @@ -9,7 +9,7 @@ If your dbt job run completed with a status of **Error**, you can rerun it from ## Prerequisites - You have a [dbt Cloud account](https://www.getdbt.com/signup). -- You must be using [dbt version](/docs/dbt-versions/upgrade-core-in-cloud) 1.6 or newer. +- You must be using [dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) 1.6 or newer. - The most recent run of the job hasn't completed successfully. The latest status of the run is **Error**. - The job command that failed in the run must be one that supports the [retry command](/reference/commands/retry). diff --git a/website/docs/docs/use-dbt-semantic-layer/exports.md b/website/docs/docs/use-dbt-semantic-layer/exports.md index e7cb767723f..5b33e825549 100644 --- a/website/docs/docs/use-dbt-semantic-layer/exports.md +++ b/website/docs/docs/use-dbt-semantic-layer/exports.md @@ -4,7 +4,7 @@ description: "Use exports to materialize tables to the data platform on a schedu sidebar_label: "Materialize with exports" --- -# Materialize queries with exports +# Materialize queries with exports The exports feature in the dbt Semantic Layer enhances the [saved queries](/docs/build/saved-queries) by allowing you to materialize commonly used queries directly within your data platform. @@ -18,7 +18,7 @@ Essentially, exports are like any other table in your data platform. They enable ## Prerequisites - You have a multi-tenant dbt Cloud account on a [Team or Enterprise](https://www.getdbt.com/pricing/) plan. (Single-tenant is not supported at this time) -- You are on [dbt version](/docs/dbt-versions/upgrade-core-in-cloud) 1.7 or newer. +- You are on [dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) 1.7 or newer. - You have the dbt Semantic Layer [configured](/docs/use-dbt-semantic-layer/setup-sl) in your dbt project. - You have a dbt Cloud environment with a [Job scheduler](/docs/deploy/job-scheduler) enabled. @@ -50,9 +50,9 @@ saved_queries: query_params: metrics: - YOUR_METRIC_NAME - group_bys: + group_by: - TimeDimension() - - ... # Additional group_bys + - ... # Additional group_by where: - ... # Additional where clauses exports: diff --git a/website/docs/docs/use-dbt-semantic-layer/gsheets.md b/website/docs/docs/use-dbt-semantic-layer/gsheets.md index d7525fa7b26..1fbc4e941d4 100644 --- a/website/docs/docs/use-dbt-semantic-layer/gsheets.md +++ b/website/docs/docs/use-dbt-semantic-layer/gsheets.md @@ -5,6 +5,8 @@ tags: [Semantic Layer] sidebar_label: "Google Sheets (beta)" --- +# Google Sheets + :::info Beta functionality Google Sheets integration with the dbt Semantic Layer is a [beta](/docs/dbt-versions/product-lifecycles#dbt-cloud) feature. ::: @@ -20,25 +22,27 @@ The dbt Semantic Layer offers a seamless integration with Google Sheets through - You must have a dbt Cloud Team or Enterprise [account](https://www.getdbt.com/pricing). Suitable for both Multi-tenant and Single-tenant deployment. - Single-tenant accounts should contact their account representative for necessary setup and enablement. -## Installing the add-on - -1. Navigate to the [dbt Semantic Layer for Sheets App](https://gsuite.google.com/marketplace/app/foo/392263010968) to install the add-on. +## Installing the add-on - - You can also find it in Google Sheets by going to [**Extensions -> Add-on -> Get add-ons**](https://support.google.com/docs/answer/2942256?hl=en&co=GENIE.Platform%3DDesktop&oco=0#zippy=%2Cinstall-add-ons%2Cinstall-an-add-on) and searching for it there. +1. Navigate to the [dbt Semantic Layer for Sheets App](https://gsuite.google.com/marketplace/app/foo/392263010968) to install the add-on. You can also find it in Google Sheets by going to [**Extensions -> Add-on -> Get add-ons**](https://support.google.com/docs/answer/2942256?hl=en&co=GENIE.Platform%3DDesktop&oco=0#zippy=%2Cinstall-add-ons%2Cinstall-an-add-on) and searching for it there. 2. After installing, open the Add-On menu and select the "dbt Semantic Layer for Sheets". This will open a custom menu to the right-hand side of your screen. -3. Authenticate with your Host, dbt Cloud Environment ID, and Service Token. -4. Start querying your metrics using the **Query Builder**. For more info on the menu functions, refer to [Custom menu functions](#custom-menu-functions). - -When querying your data with Google Sheets: +3. Find your **Host** and **Environment ID** in dbt Cloud. Navigate to **Account Settings** and select **Projects** on the left sidebar. Select your project and then navigate to the **Semantic Layer** settings. You'll need this to authenticate in Google Sheets in the following step. + - You can [generate your service token](/docs/dbt-cloud-apis/service-tokens) by clicking **Generate Service Token** within the Semantic Layer configuration page or navigating to **API tokens** in dbt Cloud. + +4. In Google Sheets, authenticate with your host, dbt Cloud environment ID, and service token. + + +5. Start querying your metrics using the **Query Builder**. For more info on the menu functions, refer to [Custom menu functions](#custom-menu-functions). + +When querying your data with Google Sheets: - It returns the data to the cell you have clicked on. - The custom menu operation has a timeout limit of six (6) minutes. - If you're using this extension, make sure you're signed into Chrome with the same Google profile you used to set up the Add-On. Log in with one Google profile at a time as using multiple Google profiles at once might cause issues. - ## Custom menu functions -The custom menu provides the following capabilities: +The custom menu provides the following capabilities: | Menu items | Description | |---------------|-------------------------------------------------------| diff --git a/website/docs/docs/use-dbt-semantic-layer/tableau.md b/website/docs/docs/use-dbt-semantic-layer/tableau.md index 05d78f30f55..11e24762684 100644 --- a/website/docs/docs/use-dbt-semantic-layer/tableau.md +++ b/website/docs/docs/use-dbt-semantic-layer/tableau.md @@ -5,6 +5,9 @@ tags: [Semantic Layer] sidebar_label: "Tableau (beta)" --- +# Tableau + + :::info Beta functionality The Tableau integration with the dbt Semantic Layer is a [beta feature](/docs/dbt-versions/product-lifecycles#dbt-cloud). ::: diff --git a/website/docs/reference/artifacts/dbt-artifacts.md b/website/docs/reference/artifacts/dbt-artifacts.md index 31525777500..58074c8b426 100644 --- a/website/docs/reference/artifacts/dbt-artifacts.md +++ b/website/docs/reference/artifacts/dbt-artifacts.md @@ -19,15 +19,19 @@ They could also be used to: dbt has produced artifacts since the release of dbt-docs in v0.11.0. Starting in dbt v0.19.0, we are committing to a stable and sustainable way of versioning, documenting, and validating dbt artifacts. -## When are artifacts produced? +### When are artifacts produced? Most dbt commands (and corresponding RPC methods) produce artifacts: -- [semantic manifest](/docs/dbt-cloud-apis/sl-manifest): Lives in the `/target` directory of your dbt project and stores various artifacts (such as compiled models and tests) generated during the execution of your project. +- [semantic manifest](/docs/dbt-cloud-apis/sl-manifest): produced whenever your dbt project is parsed - [manifest](/reference/artifacts/manifest-json): produced by commands that read and understand your project - [run results](/reference/artifacts/run-results-json): produced by commands that run, compile, or catalog nodes in your DAG - [catalog](catalog-json): produced by `docs generate` - [sources](/reference/artifacts/sources-json): produced by `source freshness` +## Where are artifacts produced? + +By default, artifacts are written to the `/target` directory of your dbt project. You can configure the location using the [`target-path`](/reference/project-configs/target-path#configuration). + ## Common metadata All artifacts produced by dbt include a `metadata` dictionary with these properties: diff --git a/website/docs/reference/commands/deps.md b/website/docs/reference/commands/deps.md index 900e4ae0041..980386b1c1d 100644 --- a/website/docs/reference/commands/deps.md +++ b/website/docs/reference/commands/deps.md @@ -60,26 +60,34 @@ Update your versions in packages.yml, then run dbt deps -dbt generates the `package-lock.yml` file in the _project_root_ where `packages.yml` is recorded, which contains all the resolved packages, the first time you run `dbt deps`. Each subsequent run records the packages installed in this file. If the subsequent `dbt deps` runs contain no updated packages in `dependencies.yml` or `packages.yml`, dbt-core installs from `package-lock.yml`. +## Predictable package installs -When you update the package spec and run `dbt deps` again, the `package-lock.yml` and `packages.yml` files update accordingly. +Starting in dbt Core v1.7, dbt generates a `package-lock.yml` file in the root of your project. This contains the complete set of resolved packages based on the `packages` configuration in `dependencies.yml` or `packages.yml`. Each subsequent invocation of `dbt deps` will install from the _locked_ set of packages specified in this file. Storing the complete set of required packages (with pinned versions) in version-controlled code ensures predictable installs in production and consistency across all developers and environments. -There are two flags related to `package-lock.yml`: -- `dbt deps --lock` — creates or updates the `package-lock.yml` file but does not install the packages. -- `dbt deps --upgrade` — creates or updates the `package-lock.yml` file with the most recent dependencies from `packages.yml`. Also install the packages unless the `--lock` flag is also passed. +The `package-lock.yml` file includes a `sha1_hash` of the `packages` config. This enables dbt to detect if the `packages` config has been updated, and to rerun dependency resolution. To only check for changes to the `packages` config and update the lock file accordingly without installing those packages, provide the `--lock` flag (that is, `dbt deps --lock`). + +It's possible to force package resolution to rerun, even if the `packages` config hasn't changed, by running `dbt deps --upgrade`. This enables you to get the latest commits from the `main` branch of an internally maintained `git` package while accepting the risk of unpredictable builds. An alternative to running `dbt deps --upgrade` in production is to "ignore" the lock file by adding `package-lock.yml` to your project's `.gitignore` file. If you pursue either approach, dbt Labs strongly recommends adding version pins for third-party packages within your `packages` config. + +## Add specific packages + +The `dbt deps` command can add or update an existing package configuration — no need to remember the exact syntax for package configurations. + +For Hub packages (default), which are the easiest to install: -Examples of the `--add-package` flag: ```shell -# add package from hub (--source arg defaults to "hub") dbt deps --add-package dbt-labs/dbt_utils@1.0.0 -# add package from hub with semantic version range +# with semantic version range dbt deps --add-package dbt-labs/snowplow@">=0.7.0,<0.8.0" +``` +For other package types, use the `--source` flag: +```shell # add package from git dbt deps --add-package https://github.com/fivetran/dbt_amplitude@v0.3.0 --source git # add package from local dbt deps --add-package /opt/dbt/redshift --source local ``` + diff --git a/website/docs/reference/commands/test.md b/website/docs/reference/commands/test.md index 373ad9b6db3..cad61a05ac5 100644 --- a/website/docs/reference/commands/test.md +++ b/website/docs/reference/commands/test.md @@ -3,6 +3,7 @@ title: "About dbt test command" sidebar_label: "test" id: "test" --- + `dbt test` runs tests defined on models, sources, snapshots, and seeds. It expects that you have already created those resources through the appropriate commands. @@ -29,3 +30,47 @@ dbt test --select "one_specific_model,test_type:generic" ``` For more information on writing tests, see the [Testing Documentation](/docs/build/data-tests). + + + + + +`dbt test` runs data tests defined on models, sources, snapshots, and seeds and unit tests defined on SQL models. It expects that you have already created those resources through the appropriate commands. + +The tests to run can be selected using the `--select` flag discussed [here](/reference/node-selection/syntax). + +```bash +# run data and unit tests +dbt test + +# run only data tests +dbt test --select test_type:data + +# run only unit tests +dbt test --select test_type:unit + +# run tests for one_specific_model +dbt test --select "one_specific_model" + +# run tests for all models in package +dbt test --select "some_package.*" + +# run only data tests defined singularly +dbt test --select "test_type:singular" + +# run only data tests defined generically +dbt test --select "test_type:generic" + +# run data tests limited to one_specific_model +dbt test --select "one_specific_model,test_type:data" + +# run unit tests limited to one_specific_model +dbt test --select "one_specific_model,test_type:unit" +``` + +For more information on writing tests, read the [data testing](/docs/build/data-tests) and [unit testing](/docs/build/unit-tests) documentation. + + + + + diff --git a/website/docs/reference/commands/version.md b/website/docs/reference/commands/version.md new file mode 100644 index 00000000000..91619f2af7a --- /dev/null +++ b/website/docs/reference/commands/version.md @@ -0,0 +1,31 @@ +--- +title: "About dbt --version" +sidebar_label: "version" +id: "version" +--- + +The `--version` command-line flag returns information about the currently installed version of dbt Core or the dbt Cloud CLI. + +This flag is not supported when invoking dbt in other dbt Cloud runtimes (for example, the IDE or scheduled runs). + + + +```text +$ dbt --version +Core: + - installed: 1.7.6 + - latest: 1.7.6 - Up to date! +Plugins: + - snowflake: 1.7.1 - Up to date! +``` + + + + + +```text +$ dbt --version +dbt Cloud CLI - 0.35.7 (fae78a6f5f6f2d7dff3cab3305fe7f99bd2a36f3 2024-01-18T22:34:52Z) +``` + + diff --git a/website/docs/reference/dbt-commands.md b/website/docs/reference/dbt-commands.md index 2e534049dce..4c4c142ee0d 100644 --- a/website/docs/reference/dbt-commands.md +++ b/website/docs/reference/dbt-commands.md @@ -41,6 +41,7 @@ You can run dbt commands in your specific tool by prefixing them with `dbt`. Fo | [snapshot](/reference/commands/snapshot) | Executes "snapshot" jobs defined in a project | All | All [supported versions](/docs/dbt-versions/core) | | [source](/reference/commands/source) | Provides tools for working with source data (including validating that sources are "fresh") | All | All [supported versions](/docs/dbt-versions/core) | | [test](/reference/commands/test) | Executes tests defined in a project | All | All [supported versions](/docs/dbt-versions/core) | +| [--version](/reference/commands/version) | Displays the currently installed version of dbt CLI | dbt Core
dbt Cloud CLI | All [supported versions](/docs/dbt-versions/core) | diff --git a/website/docs/reference/global-configs/version-compatibility.md b/website/docs/reference/global-configs/version-compatibility.md index c7a1227adaf..fdc9bc1d48e 100644 --- a/website/docs/reference/global-configs/version-compatibility.md +++ b/website/docs/reference/global-configs/version-compatibility.md @@ -12,4 +12,9 @@ You can use the `VERSION_CHECK` config to disable this check and suppress the er dbt --no-version-check run Running with dbt=1.0.0 Found 13 models, 2 tests, 1 archives, 0 analyses, 204 macros, 2 operations.... -``` \ No newline at end of file +``` + +:::info Keep on latest version + + +::: diff --git a/website/docs/reference/global-configs/yaml-configurations.md b/website/docs/reference/global-configs/yaml-configurations.md index 73b8f558a9f..9bac8063c1f 100644 --- a/website/docs/reference/global-configs/yaml-configurations.md +++ b/website/docs/reference/global-configs/yaml-configurations.md @@ -19,6 +19,6 @@ config: -The exception: Some global configurations are actually set in `dbt_project.yml`, instead of `profiles.yml`, because they control where dbt places logs and artifacts. Those file paths are always relative to the location of `dbt_project.yml`. For more details, see ["Log and target paths"](#log-and-target-paths) below. +The exception: Some global configurations are actually set in `dbt_project.yml`, instead of `profiles.yml`, because they control where dbt places logs and artifacts. Those file paths are always relative to the location of `dbt_project.yml`. For more details, refer to [Log and target paths](/reference/global-configs/logs#log-and-target-paths). - \ No newline at end of file + diff --git a/website/docs/reference/project-configs/require-dbt-version.md b/website/docs/reference/project-configs/require-dbt-version.md index 6b17bb46120..1d00eeb0be0 100644 --- a/website/docs/reference/project-configs/require-dbt-version.md +++ b/website/docs/reference/project-configs/require-dbt-version.md @@ -3,6 +3,9 @@ datatype: version-range | [version-range] description: "Read this guide to understand the require-dbt-version configuration in dbt." default_value: None --- + + + ```yml @@ -19,6 +22,12 @@ When you set this configuration, dbt sends a helpful error message for any user If this configuration is not specified, no version check will occur. +:::info Keep on latest version + + + +::: + ### YAML quoting This configuration needs to be interpolated by the YAML parser as a string. As such, you should quote the value of the configuration, taking care to avoid whitespace. For example: @@ -77,6 +86,8 @@ Pinning to a specific dbt version is discouraged because it limits project flexi While you can restrict your project to run only with an exact version of dbt Core, we do not recommend this for dbt Core v1.0.0 and higher. +::: + In the following example, the project will only run with dbt v1.5: diff --git a/website/docs/reference/resource-configs/store_failures_as.md b/website/docs/reference/resource-configs/store_failures_as.md index dd61030afb8..005193a5381 100644 --- a/website/docs/reference/resource-configs/store_failures_as.md +++ b/website/docs/reference/resource-configs/store_failures_as.md @@ -17,7 +17,7 @@ You can configure it in all the same places as `store_failures`, including singu #### Singular test -[Singular test](https://docs.getdbt.com/docs/build/tests#singular-data-tests) in `tests/singular/check_something.sql` file +[Singular test](https://docs.getdbt.com/docs/build/data-tests#singular-data-tests) in `tests/singular/check_something.sql` file ```sql {{ config(store_failures_as="table") }} @@ -29,7 +29,7 @@ where 1=0 #### Generic test -[Generic tests](https://docs.getdbt.com/docs/build/tests#generic-data-tests) in `models/_models.yml` file +[Generic tests](https://docs.getdbt.com/docs/build/data-tests#generic-data-tests) in `models/_models.yml` file ```yaml models: diff --git a/website/docs/reference/resource-properties/unit-tests.md b/website/docs/reference/resource-properties/unit-tests.md new file mode 100644 index 00000000000..40c3414e373 --- /dev/null +++ b/website/docs/reference/resource-properties/unit-tests.md @@ -0,0 +1,204 @@ +--- +title: "About unit tests property" +sidebar_label: "Unit tests" +resource_types: [models] +datatype: test +--- + + +Unit tests validate your SQL modeling logic on a small set of static inputs before you materialize your full model in production. They support a test-driven development approach, improving both the efficiency of developers and reliability of code. + +To run only your unit tests, use the command: +`dbt test --select test_type:unit` + + + +```yml + +unit_tests: + - name: # this is the unique name of the test + model: + versions: #optional + include: #optional + exclude: #optional + config: + meta: {dictionary} + tags: | [] + given: + - input: # optional for seeds + format: dict | csv + # if format csv, either define dictionary of rows or name of fixture + rows: + - {dictionary} + fixture: + - input: ... # declare additional inputs + expect: + format: dict | csv + # if format csv, either define dictionary of rows or name of fixture + rows: + - {dictionary} + fixture: + overrides: # optional: configuration for the dbt execution environment + macros: + is_incremental: true | false + dbt_utils.current_timestamp: str + # ... any other jinja function from https://docs.getdbt.com/reference/dbt-jinja-functions + # ... any other context property + vars: {dictionary} + env_vars: {dictionary} + - name: ... # declare additional unit tests + + ``` + + + + +## About writing unit tests + +Unit tests are currently limited to testing SQL models and only models in your current project. + +### Versions +If your model has multiple versions, the default unit test will run on *all* versions of your model. To specify version(s) of your model to unit test, use `include` or `exclude` for the desired versions in your model versions config: + +```yaml + +# my test_is_valid_email_address unit test will run on all versions of my_model +unit_tests: + - name: test_is_valid_email_address + model: my_model + ... + +# my test_is_valid_email_address unit test will run on ONLY version 2 of my_model +unit_tests: + - name: test_is_valid_email_address + model: my_model + versions: + include: + - 2 + ... + +# my test_is_valid_email_address unit test will run on all versions EXCEPT 1 of my_model +unit_tests: + - name: test_is_valid_email_address + model: my_model + versions: + exclude: + - 1 + ... + +``` + +### Format + +When using `format: dict` you must supply an in-line dictionary for `rows:` (this is the default, if you don’t specify a `format`) + +```yml + +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: dict + rows: + - {id: 1, name: gerda} + - {id: 2, b: michelle} + ... +``` + +When `format: csv`, can either supply: + - An inline csv string for `rows:` + + ```yaml + unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,name + 1,gerda + 2,michelle + ... + ``` + + + - The name of a csv file in the `tests/fixtures` directory in your project (or the directory configured for [test-paths](https://docs.getdbt.com/reference/project-configs/test-paths)) for `fixture`: + + ```yaml + unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: my_model_a_fixture + ... + ``` + + ```csv + # tests/fixtures/my_model_a_fixture.csv + 1,gerda + 2,michelle + ``` + +### Input + +- `input:` string that represents a `ref` or `source` call: + - `ref('my_model')` or `ref('my_model', v='2')` or `ref('dougs_project', 'users')` + - `source('source_schema', 'source_name')` +- `input:` is optional for seeds: + - If you don’t supply an input for a seed, we will use the seed *as* the input. + - If you do supply an input for a seed, we will use that input instead. +- You can also have “empty” inputs, by setting rows to an empty list `rows: []` + +## Examples +```yml + +unit_tests: + - name: test_is_valid_email_address # this is the unique name of the test + model: dim_customers # name of the model I'm unit testing + given: # the mock data for your inputs + - input: ref('stg_customers') + rows: + - {customer_id: 1, email: cool@example.com, email_top_level_domain: example.com} + - {customer_id: 2, email: cool@unknown.com, email_top_level_domain: unknown.com} + - {customer_id: 3, email: badgmail.com, email_top_level_domain: gmail.com} + - {customer_id: 4, email: missingdot@gmailcom, email_top_level_domain: gmail.com} + - input: ref('top_level_email_domains') + rows: + - {tld: example.com} + - {tld: gmail.com} + expect: # the expected output given the inputs above + rows: + - {customer_id: 1, is_valid_email_address: true} + - {customer_id: 2, is_valid_email_address: false} + - {customer_id: 3, is_valid_email_address: false} + - {customer_id: 4, is_valid_email_address: false} + +``` + +```yml + +unit_tests: + - name: test_is_valid_email_address # this is the unique name of the test + model: dim_customers # name of the model I'm unit testing + given: # the mock data for your inputs + - input: ref('stg_customers') + rows: + - {customer_id: 1, email: cool@example.com, email_top_level_domain: example.com} + - {customer_id: 2, email: cool@unknown.com, email_top_level_domain: unknown.com} + - {customer_id: 3, email: badgmail.com, email_top_level_domain: gmail.com} + - {customer_id: 4, email: missingdot@gmailcom, email_top_level_domain: gmail.com} + - input: ref('top_level_email_domains') + format: csv + rows: | + tld + example.com + gmail.com + expect: # the expected output given the inputs above + format: csv + fixture: valid_email_address_fixture_output + +``` diff --git a/website/docs/terms/data-lineage.md b/website/docs/terms/data-lineage.md index d0162c35616..42217db40d8 100644 --- a/website/docs/terms/data-lineage.md +++ b/website/docs/terms/data-lineage.md @@ -101,13 +101,13 @@ Is your DAG keeping up with best practices? Instead of manually auditing your DA Complex workflows also add to the difficulties a data lineage system will encounter. For example, consider the challenges in describing a data source's movement through a pipeline as it's filtered, pivoted, and joined with other tables. These challenges increase when the granularity of the data lineage shifts from the table to the column level. -As data lineage graphs mature and grow, it becomes clear that column or field-level lineage is often a needed layer of specificity that is not typically built-in to data lineage systems. [Some of the third party tooling](#third-party-tooling) from above can support column-level lineage. +As data lineage graphs mature and grow, it becomes clear that column- or field-level lineage is often a needed layer of specificity that is not typically built in to data lineage systems. Learn more about the [column-level lineage](/docs/collaborate/column-level-lineage) feature in [dbt Explorer](https://www.getdbt.com/product/dbt-explorer) and how it can help you gain insights. -## **Conclusion** +## Conclusion Data lineage is the holistic overview of how data moves through an organization or system, and is typically represented by a DAG. Analytics engineering practitioners use their DAG and data lineage to unpack root causes in broken pipelines, audit their models for inefficiencies, and promote greater transparency in their data work to business users. Overall, using your data lineage and DAG to know when your data is transformed and where it’s consumed is the foundation for good analytics work. -## **Further reading** +## Further reading DAGs, data lineage, and root cause analysis…tell me more! Check out some of our favorite resources of writing modular models, DRY code, and data modeling best practices: diff --git a/website/sidebars.js b/website/sidebars.js index 60606cc8dd4..6cb0c656079 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -277,9 +277,17 @@ const sidebarSettings = { "docs/build/python-models", ], }, + { + type: "category", + label: "Tests", + link: { type: "doc", id: "docs/build/data-tests" }, + items: [ + "docs/build/data-tests", + "docs/build/unit-tests", + ], + }, "docs/build/snapshots", "docs/build/seeds", - "docs/build/data-tests", "docs/build/jinja-macros", "docs/build/sources", "docs/build/exposures", @@ -426,6 +434,7 @@ const sidebarSettings = { link: { type: "doc", id: "docs/collaborate/explore-projects" }, items: [ "docs/collaborate/explore-projects", + "docs/collaborate/column-level-lineage", "docs/collaborate/model-performance", "docs/collaborate/project-recommendations", "docs/collaborate/explore-multiple-projects", @@ -611,7 +620,7 @@ const sidebarSettings = { link: { type: "doc", id: "docs/dbt-versions/core" }, items: [ "docs/dbt-versions/core", - "docs/dbt-versions/upgrade-core-in-cloud", + "docs/dbt-versions/upgrade-dbt-version-in-cloud", "docs/dbt-versions/product-lifecycles", "docs/dbt-versions/experimental-features", { @@ -810,7 +819,7 @@ const sidebarSettings = { }, { type: "category", - label: "For tests", + label: "For data tests", items: [ "reference/data-test-configs", "reference/resource-configs/fail_calc", @@ -821,6 +830,13 @@ const sidebarSettings = { "reference/resource-configs/where", ], }, + { + type: "category", + label: "For unit tests", + items: [ + "reference/resource-properties/unit-tests", + ], + }, { type: "category", label: "For sources", @@ -902,6 +918,7 @@ const sidebarSettings = { "reference/commands/snapshot", "reference/commands/source", "reference/commands/test", + "reference/commands/version", ], }, { diff --git a/website/snippets/_cloud-environments-info.md b/website/snippets/_cloud-environments-info.md index 2083d8f07ec..929337f86d4 100644 --- a/website/snippets/_cloud-environments-info.md +++ b/website/snippets/_cloud-environments-info.md @@ -29,8 +29,9 @@ Both development and deployment environments have a section called **General Set :::note About dbt version -- dbt Cloud allows users to select any dbt release. At this time, **environments must use a dbt version greater than or equal to v1.0.0;** [lower versions are no longer supported](/docs/dbt-versions/upgrade-core-in-cloud). +- dbt Cloud allows users to select any dbt release. At this time, **environments must use a dbt version greater than or equal to v1.0.0;** [lower versions are no longer supported](/docs/dbt-versions/upgrade-dbt-version-in-cloud). - If you select a current version with `(latest)` in the name, your environment will automatically install the latest stable version of the minor version selected. +- In 2024 we are introducing **Keep on latest version**, which removes the need for manually upgrading environments in the future, while ensuring you get access to the latest fixes and features. This feature is currently in beta for select customers, rolling out to wider availability through February and March._ ::: ### Custom branch behavior diff --git a/website/snippets/_config-dbt-version-check.md b/website/snippets/_config-dbt-version-check.md new file mode 100644 index 00000000000..231143407ce --- /dev/null +++ b/website/snippets/_config-dbt-version-check.md @@ -0,0 +1,23 @@ + +Starting in 2024, when you select **Keep on latest version** in dbt Cloud, dbt will ignore the `require-dbt-version` config. Refer to [Keep on latest version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#keep-on-latest-version) (available in beta) for more details. + +dbt Labs is committed to zero breaking changes for code in dbt projects, with ongoing releases to dbt Cloud and new versions of dbt Core. We also recommend these best practices: + +- **If you install dbt packages** — for use in your project, whether the package is maintained by your colleagues or a member of the open source dbt community, we recommend pinning the package to a specific revision or `version` boundary. Since v1.7, dbt manages this out-of-the-box by _locking_ the version/revision of packages in development in order to guarantee predictable builds in production. To learn more, refer to [Predictable package installs](/reference/commands/deps#predictable-package-installs). +- **If you maintain dbt packages** — whether on behalf of your colleagues or members of the open source community, we recommend writing defensive code that checks to verify that other required packages and global macros are available. For example, if your package depends on the availability of a `date_spine` macro in the global `dbt` namespace, you can write: + + + +```sql +{% macro a_few_days_in_september() %} + + {% if not dbt.get('date_spine') %} + {{ exceptions.raise_compiler_error("Expected to find the dbt.date_spine macro, but it could not be found") }} + {% endif %} + + {{ date_spine("day", "cast('2020-01-01' as date)", "cast('2030-12-31' as date)") }} + +{% endmacro %} +``` + + diff --git a/website/snippets/_new-sl-setup.md b/website/snippets/_new-sl-setup.md index a93f233d09c..e7039c8103a 100644 --- a/website/snippets/_new-sl-setup.md +++ b/website/snippets/_new-sl-setup.md @@ -6,7 +6,7 @@ You can set up the dbt Semantic Layer in dbt Cloud at the environment and projec - You must have a successful run in your new environment. :::tip -If you've configured the legacy Semantic Layer, it has been deprecated. dbt Labs strongly recommends that you [upgrade your dbt version](/docs/dbt-versions/upgrade-core-in-cloud) to dbt version 1.6 or higher to use the latest dbt Semantic Layer. Refer to the dedicated [migration guide](/guides/sl-migration) for details. +If you've configured the legacy Semantic Layer, it has been deprecated. dbt Labs strongly recommends that you [upgrade your dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to dbt version 1.6 or higher to use the latest dbt Semantic Layer. Refer to the dedicated [migration guide](/guides/sl-migration) for details. ::: 1. In dbt Cloud, create a new [deployment environment](/docs/deploy/deploy-environments#create-a-deployment-environment) or use an existing environment on dbt 1.6 or higher. diff --git a/website/snippets/_sl-deprecation-notice.md b/website/snippets/_sl-deprecation-notice.md index 610b1574b7d..2c42dd199c7 100644 --- a/website/snippets/_sl-deprecation-notice.md +++ b/website/snippets/_sl-deprecation-notice.md @@ -1,5 +1,5 @@ :::info Deprecation of dbt Metrics and the legacy dbt Semantic Layer dbt Labs has deprecated dbt Metrics and the legacy dbt Semantic Layer, both supported on dbt version 1.5 or lower. These changes went into effect on December 15th, 2023. -To migrate and access [MetricFlow](/docs/build/build-metrics-intro) or the re-released dbt Semantic Layer, use the [dbt Semantic Layer migration guide](/guides/sl-migration) and [upgrade your version](/docs/dbt-versions/upgrade-core-in-cloud) in dbt Cloud. +To migrate and access [MetricFlow](/docs/build/build-metrics-intro) or the re-released dbt Semantic Layer, use the [dbt Semantic Layer migration guide](/guides/sl-migration) and [upgrade your version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) in dbt Cloud. ::: diff --git a/website/snippets/_sl-faqs.md b/website/snippets/_sl-faqs.md index 75583bfa2f6..2c1d305b200 100644 --- a/website/snippets/_sl-faqs.md +++ b/website/snippets/_sl-faqs.md @@ -40,7 +40,7 @@ You can use the upcoming feature, Exports, which will allow you to create a [pre -If you're using the legacy Semantic Layer, we highly recommend you [upgrade your dbt version](/docs/dbt-versions/upgrade-core-in-cloud) to dbt v1.6 or higher to use the new dbt Semantic Layer. Refer to the dedicated [migration guide](/guides/sl-migration) for more info. +If you're using the legacy Semantic Layer, we highly recommend you [upgrade your dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to dbt v1.6 or higher to use the new dbt Semantic Layer. Refer to the dedicated [migration guide](/guides/sl-migration) for more info. diff --git a/website/snippets/_sl-measures-parameters.md b/website/snippets/_sl-measures-parameters.md index 4bd32311fda..e41268a5867 100644 --- a/website/snippets/_sl-measures-parameters.md +++ b/website/snippets/_sl-measures-parameters.md @@ -2,7 +2,7 @@ | --- | --- | --- | | [`name`](/docs/build/measures#name) | Provide a name for the measure, which must be unique and can't be repeated across all semantic models in your dbt project. | Required | | [`description`](/docs/build/measures#description) | Describes the calculated measure. | Optional | -| [`agg`](/docs/build/measures#description) | dbt supports the following aggregations: `sum`, `max`, `min`, `count_distinct`, and `sum_boolean`. | Required | +| [`agg`](/docs/build/measures#description) | dbt supports the following aggregations: `sum`, `max`, `min`, `avg`, `median`, `count_distinct`, and `sum_boolean`. | Required | | [`expr`](/docs/build/measures#expr) | Either reference an existing column in the table or use a SQL expression to create or derive a new one. | Optional | | [`non_additive_dimension`](/docs/build/measures#non-additive-dimensions) | Non-additive dimensions can be specified for measures that cannot be aggregated over certain dimensions, such as bank account balances, to avoid producing incorrect results. | Optional | | `agg_params` | Specific aggregation properties such as a percentile. | Optional | diff --git a/website/snippets/_v2-sl-prerequisites.md b/website/snippets/_v2-sl-prerequisites.md index 18f228ad3fe..9c7c86bab6f 100644 --- a/website/snippets/_v2-sl-prerequisites.md +++ b/website/snippets/_v2-sl-prerequisites.md @@ -1,6 +1,6 @@ - Have a dbt Cloud Team or Enterprise account. Suitable for both Multi-tenant and Single-tenant deployment. - Note: Single-tenant accounts should contact their account representative for necessary setup and enablement. -- Have both your production and development environments running [dbt version 1.6 or higher](/docs/dbt-versions/upgrade-core-in-cloud). +- Have both your production and development environments running [dbt version 1.6 or higher](/docs/dbt-versions/upgrade-dbt-version-in-cloud). - Use Snowflake, BigQuery, Databricks, or Redshift. - Create a successful run in the environment where you configure the Semantic Layer. - **Note:** Semantic Layer currently supports the Deployment environment for querying. (_development querying experience coming soon_) diff --git a/website/src/components/lifeCycle/index.js b/website/src/components/lifeCycle/index.js index d4e4eab88d8..d8abc142162 100644 --- a/website/src/components/lifeCycle/index.js +++ b/website/src/components/lifeCycle/index.js @@ -19,11 +19,10 @@ const fontColors = { }; export default function Lifecycle(props) { - if (!props.status || (Array.isArray(props.status) && props.status.length === 0)) { + const statuses = props.status?.split(',') + if (!props.status || !statuses?.length) { return null; } - // Check if props.status is an array or a single value - const statuses = Array.isArray(props.status) ? props.status : [props.status]; return ( <> diff --git a/website/static/img/blog/2024-02-13-dbt-explorer/column-level-lineage.png b/website/static/img/blog/2024-02-13-dbt-explorer/column-level-lineage.png new file mode 100644 index 00000000000..26191d677a1 Binary files /dev/null and b/website/static/img/blog/2024-02-13-dbt-explorer/column-level-lineage.png differ diff --git a/website/static/img/blog/2024-02-13-dbt-explorer/embedded-metadata-model.png b/website/static/img/blog/2024-02-13-dbt-explorer/embedded-metadata-model.png new file mode 100644 index 00000000000..8f7cd9c5f08 Binary files /dev/null and b/website/static/img/blog/2024-02-13-dbt-explorer/embedded-metadata-model.png differ diff --git a/website/static/img/blog/2024-02-13-dbt-explorer/embedded-metadata-source.png b/website/static/img/blog/2024-02-13-dbt-explorer/embedded-metadata-source.png new file mode 100644 index 00000000000..850a49b478e Binary files /dev/null and b/website/static/img/blog/2024-02-13-dbt-explorer/embedded-metadata-source.png differ diff --git a/website/static/img/blog/2024-02-13-dbt-explorer/full-lineage.png b/website/static/img/blog/2024-02-13-dbt-explorer/full-lineage.png new file mode 100644 index 00000000000..270daf7b74c Binary files /dev/null and b/website/static/img/blog/2024-02-13-dbt-explorer/full-lineage.png differ diff --git a/website/static/img/blog/2024-02-13-dbt-explorer/model-execution.png b/website/static/img/blog/2024-02-13-dbt-explorer/model-execution.png new file mode 100644 index 00000000000..f3cf0e54275 Binary files /dev/null and b/website/static/img/blog/2024-02-13-dbt-explorer/model-execution.png differ diff --git a/website/static/img/blog/2024-02-13-dbt-explorer/recommendations.png b/website/static/img/blog/2024-02-13-dbt-explorer/recommendations.png new file mode 100644 index 00000000000..b48594c2a7a Binary files /dev/null and b/website/static/img/blog/2024-02-13-dbt-explorer/recommendations.png differ diff --git a/website/static/img/docs/collaborate/dbt-explorer/example-cll.png b/website/static/img/docs/collaborate/dbt-explorer/example-cll.png new file mode 100644 index 00000000000..82576c27fb3 Binary files /dev/null and b/website/static/img/docs/collaborate/dbt-explorer/example-cll.png differ diff --git a/website/static/img/docs/collaborate/dbt-explorer/example-parsing-error-pill.png b/website/static/img/docs/collaborate/dbt-explorer/example-parsing-error-pill.png new file mode 100644 index 00000000000..d27ecdb1a85 Binary files /dev/null and b/website/static/img/docs/collaborate/dbt-explorer/example-parsing-error-pill.png differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/Environment-settings.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/Environment-settings.png deleted file mode 100644 index 7484ba94461..00000000000 Binary files a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/Environment-settings.png and /dev/null differ diff --git a/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/example-environment-settings.png b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/example-environment-settings.png new file mode 100644 index 00000000000..30711071101 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/cloud-configuring-dbt-cloud/choosing-dbt-version/example-environment-settings.png differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/sl-and-gsheets.jpg b/website/static/img/docs/dbt-cloud/semantic-layer/sl-and-gsheets.jpg new file mode 100644 index 00000000000..92cc79ec753 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/sl-and-gsheets.jpg differ diff --git a/website/vercel.json b/website/vercel.json index 9da721dc112..fd58e3f5b82 100644 --- a/website/vercel.json +++ b/website/vercel.json @@ -2,9 +2,14 @@ "cleanUrls": true, "trailingSlash": false, "redirects": [ + { + "source": "/docs/dbt-versions/upgrade-core-in-cloud", + "destination": "/docs/dbt-versions/upgrade-dbt-version-in-cloud", + "permanent": true + }, { "source": "/docs/cloud/about-cloud/regions-ip-addresses", - "destination": "/docs/cloud/about-cloud/accesss-regions-ip-addresses", + "destination": "/docs/cloud/about-cloud/access-regions-ip-addresses", "permanent": true }, { @@ -1674,7 +1679,7 @@ }, { "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", - "destination": "/docs/dbt-versions/upgrade-core-in-cloud", + "destination": "/docs/dbt-versions/upgrade-dbt-version-in-cloud", "permanent": true }, { @@ -2379,7 +2384,7 @@ }, { "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions", - "destination": "/docs/dbt-versions/upgrade-core-in-cloud", + "destination": "/docs/dbt-versions/upgrade-dbt-version-in-cloud", "permanent": true }, { @@ -2854,7 +2859,7 @@ }, { "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", - "destination": "/docs/dbt-versions/upgrade-core-in-cloud", + "destination": "/docs/dbt-versions/upgrade-dbt-version-in-cloud", "permanent": true }, {