diff --git a/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml b/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml
index f138b9e4e06..037da98dc6f 100644
--- a/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml
+++ b/.github/ISSUE_TEMPLATE/contribute-to-developer-blog.yml
@@ -1,4 +1,4 @@
-name: Contribute to the dbt Developer Blog
+name: Propose a dbt Developer Blog idea
description: >
For proposing a new post on the dbt Developer Blog.
labels: ["content","developer blog"]
diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml
index 5ebef4f88ca..4de2203647f 100644
--- a/.github/workflows/label.yml
+++ b/.github/workflows/label.yml
@@ -2,7 +2,7 @@ name: Add/Remove Labels
on:
pull_request_target:
- types: [ opened, closed ]
+ types: [ opened ]
jobs:
add_new_contributor_label:
@@ -15,24 +15,32 @@ jobs:
- uses: actions/github-script@v6
with:
script: |
- const creator = context.payload.sender.login
+ const creator = context.payload.sender.login;
const opts = github.rest.issues.listForRepo.endpoint.merge({
...context.issue,
creator,
- state: 'all'
- })
- const issues = await github.paginate(opts)
+ state: 'all',
+ });
+
+ const issues = await github.paginate(opts);
+
+ let isAlreadyContributor = false;
+
for (const issue of issues) {
if (issue.number === context.issue.number) {
- continue
+ continue;
}
- if (issue.pull_request) {
- return // creator is already a contributor
+ if (issue.pull_request && issue.user.login === creator) {
+ isAlreadyContributor = true;
+ break;
}
}
- await github.rest.issues.addLabels({
- issue_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- labels: ['new contributor']
- })
+
+ if (!isAlreadyContributor) {
+ await github.rest.issues.addLabels({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ labels: ['new contributor'],
+ });
+ }
diff --git a/netlify.toml b/netlify.toml
deleted file mode 100644
index 6ab92757410..00000000000
--- a/netlify.toml
+++ /dev/null
@@ -1,2 +0,0 @@
-[build]
- functions = "functions"
diff --git a/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md b/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md
index 944d6fdd3f9..cdfd4da5f5d 100644
--- a/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md
+++ b/website/blog/2020-07-01-how-to-create-near-real-time-models-with-just-dbt-sql.md
@@ -13,6 +13,13 @@ date: 2020-07-01
is_featured: false
---
+:::caution More up-to-date information available
+
+Since this blog post was first published, many data platforms have added support for [materialized views](/blog/announcing-materialized-views), which are a superior way to achieve the goals outlined here. We recommend them over the below approach.
+
+
+:::
+
Before I dive into how to create this, I have to say this. **You probably don’t need this**. I, along with my other Fishtown colleagues, have spent countless hours working with clients that ask for near-real-time streaming data. However, when we start digging into the project, it is often realized that the use case is not there. There are a variety of reasons why near real-time streaming is not a good fit. Two key ones are:
1. The source data isn’t updating frequently enough.
diff --git a/website/docs/docs/build/hooks-operations.md b/website/docs/docs/build/hooks-operations.md
index effbebb3c37..85378498a36 100644
--- a/website/docs/docs/build/hooks-operations.md
+++ b/website/docs/docs/build/hooks-operations.md
@@ -4,6 +4,8 @@ description: "Read this tutorial to learn how to use hooks and operations when b
id: "hooks-operations"
---
+import OnRunCommands from '/snippets/_onrunstart-onrunend-commands.md';
+
## Related documentation
* [pre-hook & post-hook](/reference/resource-configs/pre-hook-post-hook)
* [on-run-start & on-run-end](/reference/project-configs/on-run-start-on-run-end)
@@ -33,8 +35,8 @@ dbt provides hooks and operations so you can version control and execute these s
Hooks are snippets of SQL that are executed at different times:
* `pre-hook`: executed _before_ a model, seed or snapshot is built.
* `post-hook`: executed _after_ a model, seed or snapshot is built.
- * `on-run-start`: executed at the _start_ of `dbt run`, `dbt test`, `dbt seed` or `dbt snapshot`
- * `on-run-end`: executed at the _end_ of `dbt run`, `dbt test`, `dbt seed` or `dbt snapshot`
+ * `on-run-start`: executed at the _start_ of
+ * `on-run-end`: executed at the _end_ of
Hooks are a more-advanced capability that enable you to run custom SQL, and leverage database-specific actions, beyond what dbt makes available out-of-the-box with standard materializations and configurations.
diff --git a/website/docs/docs/build/sl-getting-started.md b/website/docs/docs/build/sl-getting-started.md
index f070bc27538..c0bf59ae0c2 100644
--- a/website/docs/docs/build/sl-getting-started.md
+++ b/website/docs/docs/build/sl-getting-started.md
@@ -5,7 +5,7 @@ description: "Learn how to create your first semantic model and metric."
sidebar_label: Get started with MetricFlow
tags: [Metrics, Semantic Layer]
meta:
- api_name: dbt Semantic Layer API
+ api_name: dbt Semantic Layer APIs
---
import InstallMetricFlow from '/snippets/_sl-install-metricflow.md';
@@ -34,7 +34,7 @@ However, to experience the power of the universal [dbt Semantic Layer](/docs/use
- Have an understanding of key concepts in [MetricFlow](/docs/build/about-metricflow), which powers the revamped dbt Semantic Layer.
- Have both your production and development environments running dbt version 1.6 or higher. Refer to [upgrade in dbt Cloud](/docs/dbt-versions/upgrade-core-in-cloud) for more info.
-- Use Snowflake, BigQuery, Databricks, Redshift, or Postgres (CLI only. dbt Cloud support coming soon).
+- Use Snowflake, BigQuery, Databricks, Redshift, or Postgres (Postgres available in the CLI only, dbt Cloud support coming soon).
- Create a successful run in the environment where you configure the Semantic Layer.
- **Note:** Semantic Layer currently supports the Deployment environment for querying. (_development querying experience coming soon_)
- Set up the [Semantic Layer API](/docs/dbt-cloud-apis/sl-api-overview) in the integrated tool to import metric definitions.
@@ -85,7 +85,7 @@ You can query your metrics in a JDBC-enabled tool or use existing first-class in
You must have a dbt Cloud Team or Enterprise [multi-tenant](/docs/cloud/about-cloud/regions-ip-addresses) deployment, hosted in North America. (Additional region support coming soon)
-- To learn how to use the JDBC API and what tools you can query it with, refer to the {frontMatter.meta.api_name}.
+- To learn how to use the JDBC or GraphQL API and what tools you can query it with, refer to the {frontMatter.meta.api_name}.
* To authenticate, you need to [generate a service token](/docs/dbt-cloud-apis/service-tokens) with Semantic Layer Only and Metadata Only permissions.
* Refer to the [SQL query syntax](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata) to query metrics using the API.
diff --git a/website/docs/docs/cloud/manage-access/set-up-sso-okta.md b/website/docs/docs/cloud/manage-access/set-up-sso-okta.md
index 41381b57eca..5ec70443d1f 100644
--- a/website/docs/docs/cloud/manage-access/set-up-sso-okta.md
+++ b/website/docs/docs/cloud/manage-access/set-up-sso-okta.md
@@ -16,8 +16,6 @@ dbt Cloud Enterprise supports single-sign on via Okta (using SAML). Currently su
* Just-in-time provisioning
This guide outlines the setup process for authenticating to dbt Cloud with Okta.
-If you have any questions during the setup process, please contact support
-(support@getdbt.com) for assistance.
## Configuration in Okta
@@ -63,7 +61,7 @@ Click **Next** to continue.
### Configure SAML Settings
-The SAML Settings page configures how Okta and dbt Cloud communicate. You will want to use an [appropriate Access URL](/docs/cloud/about-cloud/regions-ip-addresses) for your region and plan. If you aren't sure which values you should use, please contact support (support@getdbt.com).
+The SAML Settings page configures how Okta and dbt Cloud communicate. You will want to use an [appropriate Access URL](/docs/cloud/about-cloud/regions-ip-addresses) for your region and plan.
To complete this section, you will need a _login slug_. This slug controls the
URL where users on your account can log into your application via Okta. Login
diff --git a/website/docs/docs/cloud/manage-access/set-up-sso-saml-2.0.md b/website/docs/docs/cloud/manage-access/set-up-sso-saml-2.0.md
index be46e965fe3..db3efdbeb74 100644
--- a/website/docs/docs/cloud/manage-access/set-up-sso-saml-2.0.md
+++ b/website/docs/docs/cloud/manage-access/set-up-sso-saml-2.0.md
@@ -75,7 +75,6 @@ provider to provide group membership information in user attribute called
You may use a restricted group attribute statement to limit the groups set
to dbt Cloud for each authenticated user. For example, if all of your dbt Cloud groups start
with `DBT_CLOUD_...`, you may optionally apply a filter like `Starts With: DBT_CLOUD_`.
-Please contact support if you have any questions.
:::
### Collect integration secrets
diff --git a/website/docs/docs/cloud/secure/about-privatelink.md b/website/docs/docs/cloud/secure/about-privatelink.md
index 77ee8a6af7a..29003f65a21 100644
--- a/website/docs/docs/cloud/secure/about-privatelink.md
+++ b/website/docs/docs/cloud/secure/about-privatelink.md
@@ -15,6 +15,7 @@ dbt Labs has a worldwide network of regional VPCs. These VPCs are specifically u
dbt Cloud supports the following data platforms for use with the PrivateLink feature. Instructions for enabling PrivateLink for the various data platform providers are unique. The following guides will walk you through the necessary steps, including working with [dbt Support](https://docs.getdbt.com/community/resources/getting-help#dbt-cloud-support) to complete the connection in the dbt private network and setting up the endpoint in dbt Cloud.
-- [Redshift](/docs/cloud/secure/redshift-privatelink)
- [Snowflake](/docs/cloud/secure/snowflake-privatelink)
- [Databricks](/docs/cloud/secure/databricks-privatelink)
+- [Redshift](/docs/cloud/secure/redshift-privatelink)
+- [Postgres](/docs/cloud/secure/postgres-privatelink)
diff --git a/website/docs/docs/cloud/secure/ip-restrictions.md b/website/docs/docs/cloud/secure/ip-restrictions.md
index 49a7fe4f267..237de991c02 100644
--- a/website/docs/docs/cloud/secure/ip-restrictions.md
+++ b/website/docs/docs/cloud/secure/ip-restrictions.md
@@ -34,7 +34,7 @@ To add an IP to the allowlist, from the **IP Restrictions** page:
4. Select **Allow**
5. Add the ranges in the CIDR notation
- For example, 1.1.1.1/8
- - You can add multiple ranges followed by commas
+ - You cannot add multiple ranges in the same rule. Instead, create a rule per CIDR range.
6. Click **Save**
Note that simply adding the IP Ranges will not enforce IP restrictions. For more information, see the section “Enabling Restrictions.”
diff --git a/website/docs/docs/cloud/secure/postgres-privatelink.md b/website/docs/docs/cloud/secure/postgres-privatelink.md
new file mode 100644
index 00000000000..482aeb4040d
--- /dev/null
+++ b/website/docs/docs/cloud/secure/postgres-privatelink.md
@@ -0,0 +1,76 @@
+---
+title: "Configure AWS PrivateLink for Postgres"
+id: postgres-privatelink
+description: "Configuring PrivateLink for Postgres"
+sidebar_label: "PrivateLink for Postgres"
+---
+
+A Postgres database, hosted either in AWS or in a properly connected on-prem data center, can be accessed through a private network connection using AWS Interface-type PrivateLink. The type of Target Group connected to the Network Load Balancer (NLB) may vary based on the location and type of Postgres instance being connected, as explained in the following steps.
+
+## Configuring Postgres interface-type PrivateLink
+
+### 1. Provision AWS resources
+
+Creating an Interface VPC PrivateLink connection requires creating multiple AWS resources in the account containing, or connected to, the Postgres instance:
+
+- **Security Group (AWS hosted only)** — If you are connecting to an existing Postgres instance, this likely already exists, however, you may need to add or modify Security Group rules to accept traffic from the Network Load Balancer (NLB) created for this Endpoint Service.
+- **Target Group** — The Target Group will be attached to the NLB to tell it where to route requests. There are various target types available for NLB Target Groups, so choose the one appropriate for your Postgres setup.
+
+ - Target Type:
+
+ - _[Amazon RDS for PostgreSQL](https://aws.amazon.com/rds/postgresql/)_ - **IP**
+
+ - Find the IP address of your RDS instance using a command line tool such as `nslookup ` or `dig +short ` with your RDS DNS endpoint
+
+ - _Note_: With RDS Multi-AZ failover capabilities the IP address of your RDS instance can change, at which point your Target Group would need to be updated. See [this AWS blog post](https://aws.amazon.com/blogs/database/access-amazon-rds-across-vpcs-using-aws-privatelink-and-network-load-balancer/) for more details and a possible solution.
+
+ - _On-prem Postgres server_ - **IP**
+
+ - Use the IP address of the on-prem Postgres server linked to AWS through AWS Direct Connect or a Site-to-Site VPN connection
+
+ - _Postgres on EC2_ - **Instance/ASG** (or **IP**)
+
+ - If your Postgres instance is hosted on EC2 the _instance_ Target Group type (or ideally [using the instance type to connect to an auto-scaling group](https://docs.aws.amazon.com/autoscaling/ec2/userguide/attach-load-balancer-asg.html)) can be used to attach the instance without needing a static IP address
+
+ - The IP type can also be used, with the understanding that the IP of the EC2 instance can change if the instance is relaunched for any reason
+
+ - Target Group protocol: **TCP**
+
+- **Network Load Balancer (NLB)** — Requires creating a Listener that attaches to the newly created Target Group for port `5432`
+- **VPC Endpoint Service** — Attach to the newly created NLB.
+ - Acceptance required (optional) — Requires you to [accept our connection request](https://docs.aws.amazon.com/vpc/latest/privatelink/configure-endpoint-service.html#accept-reject-connection-requests) after dbt creates the endpoint.
+
+### 2. Grant dbt AWS account access to the VPC Endpoint Service
+
+On the provisioned VPC endpoint service, click the **Allow principals** tab. Click **Allow principals** to grant access. Enter the ARN of the root user in the appropriate production AWS account and save your changes.
+
+ - Principal: `arn:aws:iam::346425330055:role/MTPL_Admin`
+
+
+
+### 3. Obtain VPC Endpoint Service Name
+
+Once the VPC Endpoint Service is provisioned, you can find the service name in the AWS console by navigating to **VPC** → **Endpoint Services** and selecting the appropriate endpoint service. You can copy the service name field value and include it in your communication to dbt Cloud support.
+
+
+
+### 4. Add the required information to the template below, and submit your request to [dbt Support](https://docs.getdbt.com/community/resources/getting-help#dbt-cloud-support):
+```
+Subject: New Multi-Tenant PrivateLink Request
+- Type: Postgres Interface-type
+- VPC Endpoint Service Name:
+- Postgres server AWS Region (e.g., us-east-1, eu-west-2):
+- dbt Cloud multi-tenant environment (US, EMEA, AU):
+```
+
+dbt Labs will work on your behalf to complete the PrivateLink setup. Please allow 1-2 business days for this process to complete. Support will contact you when the endpoint is available.
+
+## Create Connection in dbt Cloud
+
+Once dbt Cloud support completes the configuration, you can start creating new connections using PrivateLink.
+
+1. Navigate to **settings** → **Create new project** → select **PostgreSQL**
+2. You will see two radio buttons: **Public** and **Private.** Select **Private**.
+3. Select the private endpoint from the dropdown (this will automatically populate the hostname/account field).
+4. Configure the remaining data platform details.
+5. Test your connection and save it.
diff --git a/website/docs/docs/cloud/secure/redshift-privatelink.md b/website/docs/docs/cloud/secure/redshift-privatelink.md
index fc0ceeea334..3ed49e7bb34 100644
--- a/website/docs/docs/cloud/secure/redshift-privatelink.md
+++ b/website/docs/docs/cloud/secure/redshift-privatelink.md
@@ -79,7 +79,7 @@ Creating an Interface VPC PrivateLink connection requires creating multiple AWS
On the provisioned VPC endpoint service, click the **Allow principals** tab. Click **Allow principals** to grant access. Enter the ARN of the root user in the appropriate production AWS account and save your changes.
- - Principal: `arn:aws:iam::346425330055:root`
+ - Principal: `arn:aws:iam::346425330055:role/MTPL_Admin`
diff --git a/website/docs/docs/collaborate/git/managed-repository.md b/website/docs/docs/collaborate/git/managed-repository.md
index d7beb38c4f5..db8e9840ccd 100644
--- a/website/docs/docs/collaborate/git/managed-repository.md
+++ b/website/docs/docs/collaborate/git/managed-repository.md
@@ -15,6 +15,6 @@ To set up a project with a managed repository:
6. Click **Create**.
-dbt Cloud will host and manage this repository for you. If in the future you choose to host this repository yourself, you can contact support to have the contents of your repo transferred to you.
+dbt Cloud will host and manage this repository for you. If in the future you choose to host this repository elsewhere, you can export the information from dbt Cloud at any time.
** We do not recommend using a managed repository in a production environment. You will not be able to use git features like pull requests which are part of our recommended version control best practices.
diff --git a/website/docs/docs/collaborate/govern/project-dependencies.md b/website/docs/docs/collaborate/govern/project-dependencies.md
index 158c405e4a7..1dbc967e74e 100644
--- a/website/docs/docs/collaborate/govern/project-dependencies.md
+++ b/website/docs/docs/collaborate/govern/project-dependencies.md
@@ -7,6 +7,12 @@ description: "Reference public models across dbt projects"
:::caution Closed Beta - dbt Cloud Enterprise
"Project" dependencies and cross-project `ref` are features of dbt Cloud Enterprise, currently in Closed Beta. To access these features while they are in beta, please contact your account team at dbt Labs.
+
+**Prerequisites:** In order to add project dependencies and resolve cross-project `ref`, you must:
+- Have the feature enabled (speak to your account team)
+- Use dbt v1.6 for **both** the upstream ("producer") project and the downstream ("consumer") project.
+- Have a deployment environment in the upstream ("producer") project [that is set to be your production environment](/docs/deploy/deploy-environments#set-as-production-environment-beta)
+- Have a successful run of the upstream ("producer") project
:::
For a long time, dbt has supported code reuse and extension by installing other projects as [packages](/docs/build/packages). When you install another project as a package, you are pulling in its full source code, and adding it to your own. This enables you to call macros and run models defined in that other project.
diff --git a/website/docs/docs/dbt-cloud-apis/sl-api-overview.md b/website/docs/docs/dbt-cloud-apis/sl-api-overview.md
index efe54cbd833..42416765904 100644
--- a/website/docs/docs/dbt-cloud-apis/sl-api-overview.md
+++ b/website/docs/docs/dbt-cloud-apis/sl-api-overview.md
@@ -1,7 +1,7 @@
---
-title: "Semantic Layer API"
+title: "Semantic Layer APIs"
id: sl-api-overview
-description: "Integrate and query using the Semantic Layer API."
+description: "Integrate and query metrics and dimensions in downstream tools using the Semantic Layer APIs"
tags: [Semantic Layer, API]
hide_table_of_contents: true
---
@@ -36,9 +36,8 @@ product="dbt Semantic Layer"
plan="dbt Cloud Team and Enterprise"
instance="hosted in North America"
/>
-
-
+
-
"}
```
-Each GQL request also comes with a dbt Cloud environmentId. The API uses both the service token in the header and environmentId for authentication.
+Each GQL request also requires a dbt Cloud `environmentId`. The API uses both the service token in the header and environmentId for authentication.
+
+### Metadata calls
-### Metric metadata calls
+**Fetch data platform dialect**
-Use the following example calls to provide you with an idea of the types of commands you can use:
+In some cases in your application, it may be useful to know the dialect or data platform that's internally used for the dbt Semantic Layer connection (such as if you are building `where` filters from a user interface rather than user-inputted SQL).
+
+The GraphQL API has an easy way to fetch this with the following query:
+
+```graphql
+{
+ environmentInfo(environmentId: BigInt!) {
+ dialect
+ }
+}
+```
**Fetch available metrics**
```graphql
-metrics(environmentId: Int!): [Metric!]!
+metrics(environmentId: BigInt!): [Metric!]!
```
**Fetch available dimensions for metrics**
```graphql
dimensions(
-environmentId: Int!
-metrics: [String!]!
+ environmentId: BigInt!
+ metrics: [MetricInput!]!
): [Dimension!]!
```
-**Fetch available time granularities given metrics**
+**Fetch available granularities given metrics**
+
+Note: This call for `queryableGranularities` returns only queryable granularities for metric time - the primary time dimension across all metrics selected.
```graphql
queryableGranularities(
-environmentId: Int!
-metrics: [String!]!
+ environmentId: BigInt!
+ metrics: [MetricInput!]!
): [TimeGranularity!]!
```
-**Fetch available metrics given a set of a dimensions**
+You can also get queryable granularities for all other dimensions using the `dimensions` call:
+
+```graphql
+{
+ dimensions(environmentId: BigInt!, metrics:[{name:"order_total"}]) {
+ name
+ queryableGranularities # --> ["DAY", "WEEK", "MONTH", "QUARTER", "YEAR"]
+ }
+}
+```
+
+You can also optionally access it from the metrics endpoint:
+
+```graphql
+{
+ metrics(environmentId: BigInt!) {
+ name
+ dimensions {
+ name
+ queryableGranularities
+ }
+ }
+}
+```
+
+**Fetch measures**
+
+```graphql
+{
+ measures(environmentId: BigInt!, metrics: [{name:"order_total"}]) {
+ name
+ aggTimeDimension
+ }
+}
+```
+
+`aggTimeDimension` tells you the name of the dimension that maps to `metric_time` for a given measure. You can also query `measures` from the `metrics` endpoint, which allows you to see what dimensions map to `metric_time` for a given metric:
+
+```graphql
+{
+ metrics(environmentId: BigInt!) {
+ measures {
+ name
+ aggTimeDimension
+ }
+ }
+}
+```
+
+**Fetch available metrics given a set of dimensions**
```graphql
metricsForDimensions(
-environmentId: Int!
-dimensions: [String!]!
+ environmentId: BigInt!
+ dimensions: [GroupByInput!]!
): [Metric!]!
```
-**Fetch dimension values for metrics and a given dimension**
+**Create Dimension Values query**
```graphql
-dimensionValues(
-environmentId: Int!
-metrics: [String!]!
-dimension: String!
-```
-### Metric value query parameters
+mutation createDimensionValuesQuery(
+ environmentId: BigInt!
+ metrics: [MetricInput!]
+ groupBy: [GroupByInput!]!
+): CreateDimensionValuesQueryResult!
+
+```
-The mutation is `createQuery`. The parameters are as follows:
+**Create Metric query**
```graphql
createQuery(
-environmentId: Int!
-metrics: [String!]!
-dimensions: [String!] = null
-limit: Int = null
-startTime: String = null
-endTime: String = null
-where: String = null
-order: [String!] = null
-): String
+ environmentId: BigInt!
+ metrics: [MetricInput!]!
+ groupBy: [GroupByInput!] = null
+ limit: Int = null
+ where: [WhereInput!] = null
+ order: [OrderByInput!] = null
+): CreateQueryResult
+```
+
+```graphql
+MetricInput {
+ name: String!
+}
+
+GroupByInput {
+ name: String!
+ grain: TimeGranularity = null
+}
+
+WhereInput {
+ sql: String!
+}
+
+OrderByinput { # -- pass one and only one of metric or groupBy
+ metric: MetricInput = null
+ groupBy: GroupByInput = null
+ descending: Boolean! = false
+}
+```
+
+**Fetch query result**
+
+```graphql
+query(
+ environmentId: BigInt!
+ queryId: String!
+): QueryResult!
+```
+
+**Metric Types**
+
+```graphql
+Metric {
+ name: String!
+ description: String
+ type: MetricType!
+ typeParams: MetricTypeParams!
+ filter: WhereFilter
+ dimensions: [Dimension!]!
+ queryableGranularities: [TimeGranularity!]!
+}
+```
+
+```
+MetricType = [SIMPLE, RATIO, CUMULATIVE, DERIVED]
+```
+
+**Metric Type parameters**
+
+```graphql
+MetricTypeParams {
+ measure: MetricInputMeasure
+ inputMeasures: [MetricInputMeasure!]!
+ numerator: MetricInput
+ denominator: MetricInput
+ expr: String
+ window: MetricTimeWindow
+ grainToDate: TimeGranularity
+ metrics: [MetricInput!]
+}
```
+
+**Dimension Types**
+
+```graphql
+Dimension {
+ name: String!
+ description: String
+ type: DimensionType!
+ typeParams: DimensionTypeParams
+ isPartition: Boolean!
+ expr: String
+ queryableGranularities: [TimeGranularity!]!
+}
+```
+
+```
+DimensionType = [CATEGORICAL, TIME]
+```
+
+### Create Query examples
+
+The following section provides query examples for the GraphQL API, such as how to query metrics, dimensions, where filters, and more.
+
+**Query two metrics grouped by time**
+
+```graphql
+mutation {
+ createQuery(
+ environmentId: BigInt!
+ metrics: [{name: "food_order_amount"}]
+ groupBy: [{name: "metric_time}, {name: "customer__customer_type"}]
+ ) {
+ queryId
+ }
+}
+```
+
+**Query with a time grain**
+
+```graphql
+mutation {
+ createQuery(
+ environmentId: BigInt!
+ metrics: [{name: "order_total"}]
+ groupBy: [{name: "metric_time", grain: "month"}]
+ ) {
+ queryId
+ }
+}
+```
+
+Note that when using granularity in the query, the output of a time dimension with a time grain applied to it always takes the form of a dimension name appended with a double underscore and the granularity level - `{time_dimension_name}__{DAY|WEEK|MONTH|QUARTER|YEAR}`. Even if no granularity is specified, it will also always have a granularity appended to it and will default to the lowest available (usually daily for most data sources). It is encouraged to specify a granularity when using time dimensions so that there won't be any unexpected results with the output data.
+
+**Query two metrics with a categorical dimension**
+
+```graphql
+mutation {
+ createQuery(
+ environmentId: BigInt!
+ metrics: [{name: "food_order_amount"}, {name: "order_gross_profit"}]
+ groupBy: [{name: "metric_time, grain: "month"}, {name: "customer__customer_type"}]
+ ) {
+ queryId
+ }
+}
+```
+
+**Query with a where filter**
+
+The `where` filter takes a list argument (or a string for a single input). Depending on the object you are filtering, there are a couple of parameters:
+
+ - `Dimension()` — Used for any categorical or time dimensions. If used for a time dimension, granularity is required. For example, `Dimension('metric_time').grain('week')` or `Dimension('customer__country')`.
+
+- `Entity()` — Used for entities like primary and foreign keys, such as `Entity('order_id')`.
+
+Note: If you prefer a more strongly typed `where` clause, you can optionally use `TimeDimension()` to separate out categorical dimensions from time ones. The `TimeDimension` input takes the time dimension name and also requires granularity. For example, `TimeDimension('metric_time', 'MONTH')`.
+
+```graphql
+mutation {
+ createQuery(
+ environmentId: BigInt!
+ metrics:[{name: "order_total"}]
+ groupBy:[{name: "customer__customer_type"}, {name: "metric_time", grain: "month"}]
+ where:[{sql: "{{ Dimension('customer__customer_type') }} = 'new'"}, {sql:"{{ Dimension('metric_time').grain('month') }} > '2022-10-01'"}]
+ ) {
+ queryId
+ }
+}
+```
+
+**Query with Order**
+
+```graphql
+mutation {
+ createQuery(
+ environmentId: BigInt!
+ metrics: [{name: "order_total"}]
+ groupBy: [{name: "metric_time", grain: "month"}]
+ orderBy: [{metric: {name: "order_total"}}, {groupBy: {name: "metric_time", grain: "month"}, descending:true}]
+ ) {
+ queryId
+ }
+}
+```
+
+
+**Query with Limit**
+
+```graphql
+mutation {
+ createQuery(
+ environmentId: BigInt!
+ metrics: [{name:"food_order_amount"}, {name: "order_gross_profit"}]
+ groupBy: [{name:"metric_time, grain: "month"}, {name: "customer__customer_type"}]
+ limit: 10
+ ) {
+ queryId
+ }
+}
+```
+
+**Query with Explain**
+
+This takes the same inputs as the `createQuery` mutation.
+
+```graphql
+mutation {
+ compileSql(
+ environmentId: BigInt!
+ metrics: [{name:"food_order_amount"} {name:"order_gross_profit"}]
+ groupBy: [{name:"metric_time, grain:"month"}, {name:"customer__customer_type"}]
+ ) {
+ sql
+ }
+}
+```
+
+### Output format and pagination
+
+**Output format**
+
+By default, the output is in Arrow format. You can switch to JSON format using the following parameter. However, due to performance limitations, we recommend using the JSON parameter for testing and validation. The JSON received is a base64 encoded string. To access it, you can decode it using a base64 decoder. The JSON is created from pandas, which means you can change it back to a dataframe using `pandas.read_json(json, orient="table")`. Or you can work with the data directly using `json["data"]`, and find the table schema using `json["schema"]["fields"]`. Alternatively, you can pass `encoded:false` to the jsonResult field to get a raw JSON string directly.
+
+
+```graphql
+{
+ query(environmentId: BigInt!, queryId: Int!, pageNum: Int! = 1) {
+ sql
+ status
+ error
+ totalPages
+ arrowResult
+ jsonResult(orient: PandasJsonOrient! = TABLE, encoded: Boolean! = true)
+ }
+}
+```
+
+The results default to the table but you can change it to any [pandas](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html) supported value.
+
+**Pagination**
+
+By default, we return 1024 rows per page. If your result set exceeds this, you need to increase the page number using the `pageNum` option.
+
+### Run a Python query
+
+The `arrowResult` in the GraphQL query response is a byte dump, which isn't visually useful. You can convert this byte data into an Arrow table using any Arrow-supported language. Refer to the following Python example explaining how to query and decode the arrow result:
+
+
+```python
+import base64
+import pyarrow as pa
+
+headers = {"Authorization":"Bearer "}
+query_result_request = """
+{
+ query(environmentId: 70, queryId: "12345678") {
+ sql
+ status
+ error
+ arrowResult
+ }
+}
+"""
+
+gql_response = requests.post(
+ "http://localhost:8000/graphql",
+ json={"query": query_result_request},
+ headers=headers,
+)
+
+"""
+gql_response.json() =>
+{
+ "data": {
+ "query": {
+ "sql": "SELECT\n ordered_at AS metric_time__day\n , SUM(order_total) AS order_total\nFROM semantic_layer.orders orders_src_1\nGROUP BY\n ordered_at",
+ "status": "SUCCESSFUL",
+ "error": null,
+ "arrowResult": "arrow-byte-data"
+ }
+ }
+}
+"""
+
+def to_arrow_table(byte_string: str) -> pa.Table:
+ """Get a raw base64 string and convert to an Arrow Table."""
+ with pa.ipc.open_stream(base64.b64decode(res)) as reader:
+ return pa.Table.from_batches(reader, reader.schema)
+
+
+arrow_table = to_arrow_table(gql_response.json()["data"]["query"]["arrowResult"])
+
+# Perform whatever functionality is available, like convert to a pandas table.
+print(arrow_table.to_pandas())
+"""
+order_total ordered_at
+ 3 2023-08-07
+ 112 2023-08-08
+ 12 2023-08-09
+ 5123 2023-08-10
+"""
+```
diff --git a/website/docs/docs/dbt-cloud-apis/sl-jdbc.md b/website/docs/docs/dbt-cloud-apis/sl-jdbc.md
index c238dcad680..02d26229794 100644
--- a/website/docs/docs/dbt-cloud-apis/sl-jdbc.md
+++ b/website/docs/docs/dbt-cloud-apis/sl-jdbc.md
@@ -30,6 +30,8 @@ dbt Labs partners can use the JDBC API to build integrations in their tools with
If you are a dbt user or partner with access to dbt Cloud and the [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl), you can [setup](/docs/use-dbt-semantic-layer/setup-sl) and test this API with data from your own instance by configuring the Semantic Layer and obtaining the right JDBC connection parameters described in this document.
+You *may* be able to use our JDBC API with tools that do not have an official integration with the dbt Semantic Layer. If the tool you use allows you to write SQL and either supports a generic JDBC driver option (such as DataGrip) or supports Dremio and uses ArrowFlightSQL driver version 12.0.0 or higher, you can access the Semantic Layer API.
+
Refer to [Get started with the dbt Semantic Layer](/docs/use-dbt-semantic-layer/quickstart-sl) for more info.
## Authentication
diff --git a/website/docs/docs/dbt-versions/release-notes/04-Sept-2023/ci-updates-phase2-rn.md b/website/docs/docs/dbt-versions/release-notes/04-Sept-2023/ci-updates-phase2-rn.md
index fefa07e6d6c..fd2d163b748 100644
--- a/website/docs/docs/dbt-versions/release-notes/04-Sept-2023/ci-updates-phase2-rn.md
+++ b/website/docs/docs/dbt-versions/release-notes/04-Sept-2023/ci-updates-phase2-rn.md
@@ -1,19 +1,19 @@
---
title: "Update: Improvements to dbt Cloud continuous integration"
-description: "September 2023: dbt Cloud now has two types of jobs — deploy jobs and CI jobs — with streamlined setup and improved efficiency. "
+description: "September 2023: dbt Cloud now has two types of jobs -- deploy jobs and CI jobs -- with streamlined setup and improved efficiency. "
sidebar_label: "Update: Improvements to dbt jobs"
tags: [Sept-2023, CI]
date: 2023-09-11
sidebar_position: 10
---
-dbt Cloud now has two distinct job types: [deploy jobs](/docs/deploy/deploy-jobs) for building production data assets, and [CI jobs](/docs/deploy/ci-jobs) for checking code changes. These jobs perform fundamentally different tasks so dbt Labs improved the setup experience with better defaults for each.
+dbt Cloud now has two distinct job types: [deploy jobs](/docs/deploy/deploy-jobs) for building production data assets, and [continuous integration (CI) jobs](/docs/deploy/ci-jobs) for checking code changes. These jobs perform fundamentally different tasks so dbt Labs improved the setup experience with better defaults for each.
With two types of jobs, instead of one generic type, we can better guide you through the setup flow. Best practices are built into the default settings so you can go from curious to being set up in seconds.
-
+
-And, we now have more efficient state comparisons on CI checks: never waste a build or test on code that hasn’t been changed. We now diff between the Git PR code and what’s running in production more efficiently with the introduction of deferral to an environment versus a job. To learn more, refer to [Continuous integration in dbt Cloud](/docs/deploy/continuous-integration) and [Get started with continuous integration tests](/guides/orchestration/set-up-ci/overview).
+And, we now have more efficient state comparisons on CI checks: never waste a build or test on code that hasn’t been changed. We now diff between the Git pull request (PR) code and what’s running in production more efficiently with the introduction of deferral to an environment versus a job. To learn more, refer to [Continuous integration in dbt Cloud](/docs/deploy/continuous-integration).
Below is a comparison table that describes how deploy jobs and CI jobs behave differently:
@@ -29,4 +29,14 @@ Below is a comparison table that describes how deploy jobs and CI jobs behave di
## What you need to update
-If you previously set up a job using the [Create Job](/dbt-cloud/api-v2#/operations/Create%20Job) API endpoint before September 11, 2023, you must re-create the job as described in [Trigger a CI job with the API](/docs/deploy/ci-jobs#trigger-a-ci-job-with-the-api). This is because you must set the `job_type` to be `ci`.
\ No newline at end of file
+- If you want to set up a CI environment for your jobs, dbt Labs recommends that you create your CI job in a dedicated [deployment environment](/docs/deploy/deploy-environments#create-a-deployment-environment) that's connected to a staging database. To learn more about these environment best practices, refer to the guide [Get started with continuous integration tests](/guides/orchestration/set-up-ci/overview).
+
+- If you had set up a CI job before October 2, 2023, the job might've been misclassified as a deploy job with this update. Below describes how to fix the job type:
+
+ If you used the [Create Job](/dbt-cloud/api-v2#/operations/Create%20Job) API endpoint but didn't set `"triggers":triggers.git_provider_webhook`, the job was misclassified as a deploy job and you must re-create it as described in [Trigger a CI job with the API](/docs/deploy/ci-jobs#trigger-a-ci-job-with-the-api).
+
+ If you used the dbt Cloud UI but didn't enable the **Run on Pull Requests** option that was in the **Continuous Integration** (CI) tab, the job was misclassified as a deploy job and you must re-create it as described in [Set up CI jobs](/docs/deploy/ci-jobs#set-up-ci-jobs).
+
+ To check for the job type, review your CI jobs in dbt Cloud's [Run History](/docs/deploy/run-visibility#run-history) and check for the **CI Job** tag below the job name. If it doesn't have this tag, it was misclassified and you need to re-create the job.
+
+
diff --git a/website/docs/docs/deploy/continuous-integration.md b/website/docs/docs/deploy/continuous-integration.md
index 23ed37afaa0..cc856f97f22 100644
--- a/website/docs/docs/deploy/continuous-integration.md
+++ b/website/docs/docs/deploy/continuous-integration.md
@@ -50,8 +50,3 @@ When you push a new commit to a PR, dbt Cloud enqueues a new CI run for the late
-### Run slot treatment
-
-Your CI runs don't consume run slots so a CI check will never block a production run.
-
-
diff --git a/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md b/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md
index 27cb83977de..b084dedc305 100644
--- a/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md
+++ b/website/docs/docs/use-dbt-semantic-layer/avail-sl-integrations.md
@@ -5,7 +5,7 @@ description: "Discover the diverse range of partners that seamlessly integrate w
tags: [Semantic Layer]
sidebar_label: "Available integrations"
meta:
- api_name: dbt Semantic Layer API
+ api_name: dbt Semantic Layer APIs
---
@@ -17,7 +17,8 @@ import NewSLChanges from '/snippets/_new-sl-changes.md';
There are a number of data applications that seamlessly integrate with the dbt Semantic Layer, powered by MetricFlow, from business intelligence tools to notebooks, spreadsheets, data catalogs, and more. These integrations allow you to query and unlock valuable insights from your data ecosystem.
-Query dbt metrics with external integrations using the sophisticated {frontMatter.meta.api_name}. The API enables you to query metrics, avoid duplicative coding, optimize your development workflow, ensure data governance for company metrics, and guarantee consistency for data consumers.
+Use the [dbt Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview) to simplify metric queries, optimize your development workflow, and reduce coding. This approach also ensures data governance and consistency for data consumers.
+
@@ -25,10 +26,14 @@ import AvailIntegrations from '/snippets/_sl-partner-links.md';
+### Custom integration
+
+You can create custom integrations using different languages and tools. We support connecting with JDBC, ADBC, and a GraphQL API. For more info, check out [our examples on GitHub](https://github.com/dbt-labs/example-semantic-layer-clients/).
+
## Related docs
-- {frontMatter.meta.api_name} to learn how to integrate with the JDBC to query your metrics in downstream tools.
-- [dbt Semantic Layer API query syntax](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata)
+- {frontMatter.meta.api_name} to learn how to integrate with JDBC and GraphQL to query your metrics in downstream tools.
+- [dbt Semantic Layer APIs query syntax](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata)
diff --git a/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md b/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md
index 8d073297f48..76753b41ffa 100644
--- a/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md
+++ b/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md
@@ -59,7 +59,7 @@ instance="hosted in North America"
icon="dbt-bit"/>
diff --git a/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md b/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md
index 542ab4896bb..3bbc11cea3f 100644
--- a/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md
+++ b/website/docs/docs/use-dbt-semantic-layer/quickstart-sl.md
@@ -5,7 +5,7 @@ description: "Use this guide to build and define metrics, set up the dbt Semanti
sidebar_label: "Get started with the dbt Semantic Layer"
tags: [Semantic Layer]
meta:
- api_name: dbt Semantic Layer API
+ api_name: dbt Semantic Layer APIs
---
@@ -92,10 +92,10 @@ You can query your metrics in a JDBC-enabled tool or use existing first-class in
You must have a dbt Cloud Team or Enterprise [multi-tenant](/docs/cloud/about-cloud/regions-ip-addresses) deployment, hosted in North America (Additional region support coming soon).
-- To learn how to use the JDBC API and what tools you can query it with, refer to the {frontMatter.meta.api_name}.
+- To learn how to use the JDBC or GraphQL API and what tools you can query it with, refer to the {frontMatter.meta.api_name}.
* To authenticate, you need to [generate a service token](/docs/dbt-cloud-apis/service-tokens) with Semantic Layer Only and Metadata Only permissions.
- * Refer to the [SQL query syntax](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata) to query metrics using the API.
+ * Refer to the [SQL query syntax](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata) to query metrics using the APIs.
- To learn more about the sophisticated integrations that connect to the dbt Semantic Layer, refer to [Available integrations](/docs/use-dbt-semantic-layer/avail-sl-integrations) for more info.
diff --git a/website/docs/docs/verified-adapters.md b/website/docs/docs/verified-adapters.md
index 8ec0c700ea4..a2d28a612d6 100644
--- a/website/docs/docs/verified-adapters.md
+++ b/website/docs/docs/verified-adapters.md
@@ -1,10 +1,11 @@
---
title: "Verified adapters"
id: "verified-adapters"
+hide_table_of_contents: true
---
-The dbt Labs has a rigorous verified adapter program which provides reassurance to users about which adapters can be trusted to use in production, has been tested, and is actively maintained and updated. The process covers aspects of development, documentation, user experience, and maintenance.
+The dbt Labs has a rigorous verified adapter program that provides reassurance to users about which adapters can be trusted to use in production, has been tested, and is actively maintained and updated. The process covers development, documentation, user experience, and maintenance aspects.
These adapters then earn a "Verified" status so that users can have a certain level of trust and expectation when they use them. The adapters also have maintainers and we recommend using the adapter's verification status to determine its quality and health.
@@ -12,7 +13,7 @@ The verification process serves as the on-ramp to integration with dbt Cloud. As
To learn more, see [Verifying a new adapter](/guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter).
-Here's the list of the verified data platforms that can connect to dbt and its latest version.
+Here are the verified data platforms that connect to dbt and its latest version.
import AdaptersVerified from '/snippets/_adapters-verified.md';
diff --git a/website/docs/guides/dbt-ecosystem/databricks-guides/productionizing-your-dbt-databricks-project.md b/website/docs/guides/dbt-ecosystem/databricks-guides/productionizing-your-dbt-databricks-project.md
index 2f052ae47d2..a3b4be5a051 100644
--- a/website/docs/guides/dbt-ecosystem/databricks-guides/productionizing-your-dbt-databricks-project.md
+++ b/website/docs/guides/dbt-ecosystem/databricks-guides/productionizing-your-dbt-databricks-project.md
@@ -121,7 +121,6 @@ The five key steps for troubleshooting dbt Cloud issues are:
2. Inspect the problematic file and look for an immediate fix.
3. Isolate the problem by running one model at a time in the IDE or undoing the code that caused the issue.
4. Check for problems in compiled files and logs.
-5. Seek help from the [dbt Cloud support team](/docs/dbt-support) if needed.
Consult the [Debugging errors documentation](/guides/best-practices/debugging-errors) for a comprehensive list of error types and diagnostic methods.
diff --git a/website/docs/guides/dbt-ecosystem/sl-partner-integration-guide.md b/website/docs/guides/dbt-ecosystem/sl-partner-integration-guide.md
index 39e93987b20..68037bfd0cd 100644
--- a/website/docs/guides/dbt-ecosystem/sl-partner-integration-guide.md
+++ b/website/docs/guides/dbt-ecosystem/sl-partner-integration-guide.md
@@ -114,7 +114,7 @@ For better analysis, it's best to have the context of the metrics close to where
These are recommendations on how to evolve a Semantic Layer integration and not a strict runbook.
**Stage 1 - The basic**
-* Supporting and using the new [JDBC](/docs/dbt-cloud-apis/sl-jdbc) is the first step. Refer to the [dbt Semantic Layer API](/docs/dbt-cloud-apis/sl-api-overview) for more technical details.
+* Supporting and using [JDBC](/docs/dbt-cloud-apis/sl-jdbc) or [GraphQL](/docs/dbt-cloud-apis/sl-graphql) is the first step. Refer to the [dbt Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview) for more technical details.
**Stage 2 - More discoverability and basic querying**
* Support listing metrics defined in the project
diff --git a/website/docs/guides/migration/sl-migration.md b/website/docs/guides/migration/sl-migration.md
index baa7ae4a567..c9def4537a3 100644
--- a/website/docs/guides/migration/sl-migration.md
+++ b/website/docs/guides/migration/sl-migration.md
@@ -77,10 +77,7 @@ Now that your Semantic Layer is set up, you will need to update any downstream i
### Migration guide for Hex
-:::important Hex integration coming soon
-
-Hex’s Semantic Layer integration will be available for use in the coming weeks. This section will have updated instructions once the integration is available.
-:::
+To learn more about integrating with Hex, check out their [documentation](https://learn.hex.tech/docs/connect-to-data/data-connections/dbt-integration#dbt-semantic-layer-integration) for more info. Additionally, refer to [dbt Semantic Layer cells](https://learn.hex.tech/docs/logic-cell-types/transform-cells/dbt-metrics-cells) to set up SQL cells in Hex.
1. Set up a new connection for the Semantic Layer for your account. Something to note is that your old connection will still work. The following Loom video guides you in setting up your Semantic Layer with Hex:
diff --git a/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md b/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md
index a377554c317..d453106eead 100644
--- a/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md
+++ b/website/docs/guides/orchestration/airflow-and-dbt-cloud/1-airflow-and-dbt-cloud.md
@@ -15,17 +15,17 @@ In some cases, [Airflow](https://airflow.apache.org/) may be the preferred orche
### Airflow + dbt Core
-There are so many great examples from Gitlab through their open source data engineering work. Example: [here](https://gitlab.com/gitlab-data/analytics/-/blob/master/dags/transformation/dbt_snowplow_backfill.py). This is especially appropriate if you are well-versed in Kubernetes, CI/CD, and docker task management when building your airflow pipelines. If this is you and your team, you’re in good hands reading through more details: [here](https://about.gitlab.com/handbook/business-technology/data-team/platform/infrastructure/#airflow) and [here](https://about.gitlab.com/handbook/business-technology/data-team/platform/dbt-guide/)
+There are [so many great examples](https://gitlab.com/gitlab-data/analytics/-/blob/master/dags/transformation/dbt_snowplow_backfill.py) from GitLab through their open source data engineering work. This is especially appropriate if you are well-versed in Kubernetes, CI/CD, and docker task management when building your airflow pipelines. If this is you and your team, you’re in good hands reading through more details [here](https://about.gitlab.com/handbook/business-technology/data-team/platform/infrastructure/#airflow) and [here](https://about.gitlab.com/handbook/business-technology/data-team/platform/dbt-guide/).
### Airflow + dbt Cloud API w/Custom Scripts
-This has served as a bridge until the fabled Astronomer + dbt Labs-built dbt Cloud provider became generally available: [here](https://registry.astronomer.io/providers/dbt-cloud?type=Sensors&utm_campaign=Monthly%20Product%20Updates&utm_medium=email&_hsmi=208603877&utm_content=208603877&utm_source=hs_email)
+This has served as a bridge until the fabled Astronomer + dbt Labs-built dbt Cloud provider became generally available [here](https://registry.astronomer.io/providers/dbt-cloud?type=Sensors&utm_campaign=Monthly%20Product%20Updates&utm_medium=email&_hsmi=208603877&utm_content=208603877&utm_source=hs_email).
There are many different permutations of this over time:
-- [Custom Python Scripts](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/archive/dbt_cloud_example.py): This is an airflow DAG based on custom python API utilities [here](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/archive/dbt_cloud_utils.py)
+- [Custom Python Scripts](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/archive/dbt_cloud_example.py): This is an airflow DAG based on [custom python API utilities](https://github.com/sungchun12/airflow-dbt-cloud/blob/main/archive/dbt_cloud_utils.py)
- [Make API requests directly through the BashOperator based on the docs](https://docs.getdbt.com/dbt-cloud/api-v2-legacy#operation/triggerRun): You can make cURL requests to invoke dbt Cloud to do what you want
-- [Other ways to run dbt in airflow](/docs/deploy/deployments#airflow): Official dbt Docs on how teams are running dbt in airflow
+- For more options, check out the [official dbt Docs](/docs/deploy/deployments#airflow) on the various ways teams are running dbt in airflow
## This guide's process
diff --git a/website/docs/reference/global-configs/logs.md b/website/docs/reference/global-configs/logs.md
index f5f1b3f814b..8c819193fc6 100644
--- a/website/docs/reference/global-configs/logs.md
+++ b/website/docs/reference/global-configs/logs.md
@@ -14,6 +14,9 @@ The `LOG_FORMAT` config specifies how dbt's logs should be formatted. If the val
dbt --log-format json run
{"code": "A001", "data": {"v": "=1.0.0"}, "invocation_id": "1193e449-4b7a-4eb1-8e8e-047a8b3b7973", "level": "info", "log_version": 1, "msg": "Running with dbt=1.0.0", "node_info": {}, "pid": 35098, "thread_name": "MainThread", "ts": "2021-12-03T10:46:59.928217Z", "type": "log_line"}
```
+
+
+
To set the `LOG_FORMAT_FILE` type output for the file without impacting the console log format, use the `log-format-file` flag.
@@ -37,8 +40,6 @@ See [structured logging](/reference/events-logging#structured-logging) for more
:::
-
-
### Log Level
@@ -124,7 +125,16 @@ dbt --quiet run
### Color
-You can set the color preferences for the file logs only using the `--use-colors-file / --no-use-colors-file` flags.
+You can set the color preferences for the file logs only within `profiles.yml` or using the `--use-colors-file / --no-use-colors-file` flags.
+
+
+
+```yaml
+config:
+ use_colors_file: False
+```
+
+
```text
dbt --use-colors-file run
diff --git a/website/docs/reference/global-configs/print-output.md b/website/docs/reference/global-configs/print-output.md
index 83280677229..112b92b546f 100644
--- a/website/docs/reference/global-configs/print-output.md
+++ b/website/docs/reference/global-configs/print-output.md
@@ -74,13 +74,24 @@ config:
use_colors: False
```
+
+
```text
dbt --use-colors run
dbt --no-use-colors run
```
-You can set the color preferences for the file logs only using the `--use-colors-file / --no-use-colors-file` flags.
+You can set the color preferences for the file logs only within `profiles.yml` or using the `--use-colors-file / --no-use-colors-file` flags.
+
+
+
+```yaml
+config:
+ use_colors_file: False
+```
+
+
```text
dbt --use-colors-file run
@@ -88,5 +99,3 @@ dbt --no-use-colors-file run
```
-
-
\ No newline at end of file
diff --git a/website/docs/reference/node-selection/methods.md b/website/docs/reference/node-selection/methods.md
index e318a0b9f4a..2647f3416a3 100644
--- a/website/docs/reference/node-selection/methods.md
+++ b/website/docs/reference/node-selection/methods.md
@@ -47,13 +47,19 @@ The `source` method is used to select models that select from a specified [sourc
```
### The "resource_type" method
-Use the `resource_type` method to select nodes of a particular type (`model`, `source`, `exposure`, etc). This is similar to the `--resource-type` flag used by the [`dbt ls` command](/reference/commands/list).
+Use the `resource_type` method to select nodes of a particular type (`model`, `test`, `exposure`, and so on). This is similar to the `--resource-type` flag used by the [`dbt ls` command](/reference/commands/list).
```bash
$ dbt build --select resource_type:exposure # build all resources upstream of exposures
$ dbt list --select resource_type:test # list all tests in your project
```
+Note: This method doesn't work for sources, so use the [`--resource-type`](/reference/commands/list) option of the list command instead:
+
+ ```bash
+ $ dbt list --resource-type source
+ ```
+
### The "path" method
The `path` method is used to select models/sources defined at or under a specific path.
Model definitions are in SQL/Python files (not YAML), and source definitions are in YAML files.
diff --git a/website/docs/reference/project-configs/on-run-start-on-run-end.md b/website/docs/reference/project-configs/on-run-start-on-run-end.md
index 1ed8c570dd0..e1a3d7b761a 100644
--- a/website/docs/reference/project-configs/on-run-start-on-run-end.md
+++ b/website/docs/reference/project-configs/on-run-start-on-run-end.md
@@ -4,6 +4,8 @@ description: "Read this guide to understand the on-run-start and on-run-end conf
datatype: sql-statement | [sql-statement]
---
+import OnRunCommands from '/snippets/_onrunstart-onrunend-commands.md';
+
```yml
@@ -15,14 +17,8 @@ on-run-end: sql-statement | [sql-statement]
## Definition
-A SQL statement (or list of SQL statements) to be run at the start, or end, of the following commands:
-- `dbt run`
-- `dbt test`
-- `dbt seed`
-- `dbt snapshot`
-- `dbt build`
-- `dbt compile`
-- `dbt docs generate`
+
+A SQL statement (or list of SQL statements) to be run at the start or end of the following commands:
`on-run-start` and `on-run-end` hooks can also call macros that return SQL statements
diff --git a/website/docs/reference/resource-configs/docs.md b/website/docs/reference/resource-configs/docs.md
index f6b633193ea..0ccd21d7504 100644
--- a/website/docs/reference/resource-configs/docs.md
+++ b/website/docs/reference/resource-configs/docs.md
@@ -28,6 +28,7 @@ models:
- name: model_name
docs:
show: true | false
+ node_color: "black"
```
@@ -113,13 +114,13 @@ macros:
```
-
+Also refer to [macro properties](/reference/macro-properties).
## Definition
-The docs field can be used to provide documentation-specific configuration to models. The only currently supported docs attribute is shown, which controls whether or not models are shown in the auto-generated documentation website.
+The docs field can be used to provide documentation-specific configuration to models. It supports the doc attribute `show`, which controls whether or not models are shown in the auto-generated documentation website. It also supports `node_color` for some node types.
**Note:** hidden models will still appear in the dbt DAG visualization but will be identified as "hidden.”
@@ -167,7 +168,7 @@ models:
## Custom node colors
-The `docs` attribute now supports `node_color` to customize the node color in the DAG within dbt docs. You can define node colors in the files below and apply overrides where needed.
+The `docs` attribute now supports `node_color` to customize the display color of some node types in the DAG within dbt docs. You can define node colors in the files below and apply overrides where needed.
`node_color` hiearchy:
@@ -176,7 +177,7 @@ The `docs` attribute now supports `node_color` to customize the node color in th
## Examples
-Add custom node colors to models within subdirectories based on hex codes or a plain color name.
+Add custom `node_colors` to models that support it within subdirectories based on hex codes or a plain color name.
![Example](../../../../website/static/img/node_color_example.png)
diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js
index d4115a700cc..0cc6299ed39 100644
--- a/website/docusaurus.config.js
+++ b/website/docusaurus.config.js
@@ -47,9 +47,6 @@ var siteSettings = {
onBrokenLinks: "throw",
onBrokenMarkdownLinks: "throw",
trailingSlash: false,
- customFields: {
- isVercel: process.env.REACT_APP_VERCEL
- },
themeConfig: {
docs:{
sidebar: {
diff --git a/website/functions/image-cache-wrapper.js b/website/functions/image-cache-wrapper.js
index 84f85c12a17..aad2ffff200 100644
--- a/website/functions/image-cache-wrapper.js
+++ b/website/functions/image-cache-wrapper.js
@@ -1,16 +1,10 @@
// This function is used to break the cache on images
// preventing stale or broken images from being served
-import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
-
const CACHE_VERSION = '2'
export default function imageCacheWrapper(src) {
- const { siteConfig: {customFields} } = useDocusaurusContext();
-
- const cacheParam = customFields?.isVercel === '1'
- ? `?v=${CACHE_VERSION}`
- : ``
+ const cacheParam = `?v=${CACHE_VERSION}`
return (
src + cacheParam
diff --git a/website/sidebars.js b/website/sidebars.js
index af9482a8ddf..8b162f67af3 100644
--- a/website/sidebars.js
+++ b/website/sidebars.js
@@ -134,8 +134,9 @@ const sidebarSettings = {
items: [
"docs/cloud/secure/about-privatelink",
"docs/cloud/secure/snowflake-privatelink",
- "docs/cloud/secure/redshift-privatelink",
"docs/cloud/secure/databricks-privatelink",
+ "docs/cloud/secure/redshift-privatelink",
+ "docs/cloud/secure/postgres-privatelink",
"docs/cloud/secure/ip-restrictions",
],
}, // PrivateLink
@@ -516,6 +517,7 @@ const sidebarSettings = {
link: { type: "doc", id: "docs/dbt-cloud-apis/sl-api-overview" },
items: [
"docs/dbt-cloud-apis/sl-jdbc",
+ "docs/dbt-cloud-apis/sl-graphql",
"docs/dbt-cloud-apis/sl-manifest",
],
},
diff --git a/website/snippets/_new-sl-changes.md b/website/snippets/_new-sl-changes.md
index fa7c7abf743..6eca327001a 100644
--- a/website/snippets/_new-sl-changes.md
+++ b/website/snippets/_new-sl-changes.md
@@ -3,6 +3,6 @@
The dbt Semantic Layer has been re-released with [significant improvements](https://www.getdbt.com/blog/dbt-semantic-layer-whats-next/), making it more efficient to define and query metrics.
-The new version is available in [public beta](/docs/dbt-versions/release-notes/Aug-2023/sl-revamp-beta#public-beta) and introduces [MetricFlow](/docs/build/about-metricflow), an essential component. It also includes new semantic elements, better governance, improved efficiency, easier data access, and new Semantic Layer API.
+The new version is available in [public beta](/docs/dbt-versions/release-notes/Aug-2023/sl-revamp-beta#public-beta) and introduces [MetricFlow](/docs/build/about-metricflow), an essential component. It also includes new semantic elements, better governance, improved efficiency, easier data access, and new dbt Semantic Layer APIs.
:::
diff --git a/website/snippets/_new-sl-setup.md b/website/snippets/_new-sl-setup.md
index 9f1fcef0fb6..b802db9c5ae 100644
--- a/website/snippets/_new-sl-setup.md
+++ b/website/snippets/_new-sl-setup.md
@@ -25,7 +25,7 @@ If you're using the legacy Semantic Layer, we **highly** recommend you [upgrade
5. Select the deployment environment you want for the Semantic Layer and click **Save**.
-6. After saving it, you'll be provided with the connection information that allows you to connect to downstream tools. If your tool supports JDBC, save the JDBC URL or individual components (like environment id and host).
+6. After saving it, you'll be provided with the connection information that allows you to connect to downstream tools. If your tool supports JDBC, save the JDBC URL or individual components (like environment id and host). If it uses the GraphQL API, save the GraphQL API host information instead.
diff --git a/website/snippets/_onrunstart-onrunend-commands.md b/website/snippets/_onrunstart-onrunend-commands.md
new file mode 100644
index 00000000000..68d693ce426
--- /dev/null
+++ b/website/snippets/_onrunstart-onrunend-commands.md
@@ -0,0 +1 @@
+dbt build
, dbt compile
, dbt docs generate
, dbt run
, dbt seed
, dbt snapshot
, or dbt test
.
diff --git a/website/snippets/_sl-partner-links.md b/website/snippets/_sl-partner-links.md
index 3e4173a6ae3..e9cc6af3564 100644
--- a/website/snippets/_sl-partner-links.md
+++ b/website/snippets/_sl-partner-links.md
@@ -1,10 +1,10 @@
-
+
The dbt Semantic Layer integrations are capable of querying dbt metrics, importing definitions, surfacing the underlying data in partner tools, and more. These are the following tools that integrate with the dbt Semantic Layer:
-1. **Mode** — Refer to the [Mode docs](https://mode.com/help/articles/supported-databases/#dbt-semantic-layer) for info on how to integrate with Mode.
-1. **Hex** — Hex’s Semantic Layer integration will be available in the coming weeks. Until then, refer to [this Loom video](https://www.loom.com/share/752e85aabfbf4fa585008a5598f3517a) for more info.
-1. **Google Sheets** — Google Sheets integration coming soon.
-1. **Tools that allows you to write SQL** — They must meet one of the two criteria:
+1. **Mode** — To learn more about integrating with Mode, check out their [documentation](https://mode.com/help/articles/supported-databases/#dbt-semantic-layer) for more info.
+2. **Hex** — To learn more about integrating with Hex, check out their [documentation](https://learn.hex.tech/docs/connect-to-data/data-connections/dbt-integration#dbt-semantic-layer-integration) for more info. Additionally, refer to [dbt Semantic Layer cells](https://learn.hex.tech/docs/logic-cell-types/transform-cells/dbt-metrics-cells) to set up SQL cells in Hex.
+3. **Google Sheets** — Google Sheets integration coming soon.
+4. **Tools that allows you to write SQL** — They must meet one of the two criteria:
* Supports a generic JDBC driver option (such as DataGrip) or
* Supports Dremio and uses ArrowFlightSQL driver version 12.0.0 or higher.
diff --git a/website/snippets/_sl-plan-info.md b/website/snippets/_sl-plan-info.md
index 20ec4b5dd44..5fba18de6bb 100644
--- a/website/snippets/_sl-plan-info.md
+++ b/website/snippets/_sl-plan-info.md
@@ -1 +1,2 @@
-To define and query metrics with the {props.product}, you must be on a {props.plan} multi-tenant plan, {props.instance} (Additional region support coming soon).
The re-released dbt Semantic Layer is available on dbt v1.6 or higher. dbt Core users can use the MetricFlow CLI to define metrics in their local project, but won't be able dynamically query them with integrated tools.
+To define and query metrics with the {props.product}, you must be on a {props.plan} multi-tenant plan, {props.instance} (Additional region support coming soon).
The re-released dbt Semantic Layer is available on dbt v1.6 or higher. dbt Core users can use the MetricFlow CLI to define metrics in their local project, but won't be able to dynamically query them with integrated tools.
+
diff --git a/website/snippets/_sl-test-and-query-metrics.md b/website/snippets/_sl-test-and-query-metrics.md
index 323ba2d83ad..b250fac4f31 100644
--- a/website/snippets/_sl-test-and-query-metrics.md
+++ b/website/snippets/_sl-test-and-query-metrics.md
@@ -2,7 +2,7 @@
Support for testing or querying metrics in the dbt Cloud IDE is not available in the current beta but is coming soon.
-You can use the **Preview** or **Compile** buttons in the IDE to run semantic validations and make sure your metrics are defined. You can [dynamically query metrics](#connect-and-query-api) with integrated tools on a dbt Cloud [Team or Enterprise](https://www.getdbt.com/pricing/) plan using the [Semantic Layer API](/docs/dbt-cloud-apis/sl-api-overview).
+You can use the **Preview** or **Compile** buttons in the IDE to run semantic validations and make sure your metrics are defined. You can [dynamically query metrics](#connect-and-query-api) with integrated tools on a dbt Cloud [Team or Enterprise](https://www.getdbt.com/pricing/) plan using the [dbt Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview).
Currently, you can define and test metrics using the MetricFlow CLI. dbt Cloud IDE support is coming soon. Alternatively, you can test using SQL client tools like DataGrip, DBeaver, or RazorSQL.
@@ -28,4 +28,4 @@ MetricFlow needs a `semantic_manifest.json` in order to build a semantic graph.
5. Run `mf validate-configs` to run validation on your semantic models and metrics.
6. Commit and merge the code changes that contain the metric definitions.
-To streamline your metric querying process, you can connect to the [dbt Semantic Layer API](/docs/dbt-cloud-apis/sl-api-overview) to access your metrics programmatically. For SQL syntax, refer to [Querying the API for metric metadata](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata) to query metrics using the API.
+To streamline your metric querying process, you can connect to the [dbt Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview) to access your metrics programmatically. For SQL syntax, refer to [Querying the API for metric metadata](/docs/dbt-cloud-apis/sl-jdbc#querying-the-api-for-metric-metadata) to query metrics using the API.
diff --git a/website/src/components/discourse/index.js b/website/src/components/discourse/index.js
index 759903a175f..97ef08a5272 100644
--- a/website/src/components/discourse/index.js
+++ b/website/src/components/discourse/index.js
@@ -1,7 +1,6 @@
import React, { useState, useEffect } from 'react'
import axios from 'axios'
import feedStyles from './styles.module.css';
-import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
// Bare component with no default props set
export const DiscourseFeed = ({
@@ -25,8 +24,6 @@ export const DiscourseFeed = ({
styles = {}
}) => {
- const { siteConfig: {customFields} } = useDocusaurusContext();
-
const [topics, setTopics] = useState([])
const [loading, setLoading] = useState(true)
const [isError, setIsError] = useState(false)
@@ -42,9 +39,7 @@ export const DiscourseFeed = ({
setIsError(false)
// Build function endpoint
- const endpoint = customFields?.isVercel === '1'
- ? `/api/get-discourse-topics`
- : `/.netlify/functions/get-discourse-topics`
+ const endpoint = `/api/get-discourse-topics`
// If 'after' prop not passed in, set relative after date
let afterDate = after
diff --git a/website/src/components/discourseBlogComments/index.js b/website/src/components/discourseBlogComments/index.js
index 091f1047cb7..7684269f92a 100644
--- a/website/src/components/discourseBlogComments/index.js
+++ b/website/src/components/discourseBlogComments/index.js
@@ -2,12 +2,9 @@ import React, { useState, useEffect } from 'react'
import styles from './styles.module.css'
import axios from 'axios'
import sanitizeHtml from 'sanitize-html';
-import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
export const DiscourseBlogComments = ({title,slug}) => {
- const { siteConfig: {customFields} } = useDocusaurusContext();
-
const DISCOURSE_TOPIC_ENDPOINT = `https://discourse.getdbt.com/t/`
const commentsToLoad = 6
@@ -31,9 +28,7 @@ export const DiscourseBlogComments = ({title,slug}) => {
const fetchData = async () => {
try {
- const endpoint = customFields?.isVercel === '1'
- ? `/api/get-discourse-comments?title=${title}&slug=${slug}`
- : `/.netlify/functions/get-discourse-comments?title=${title}&slug=${slug}`
+ const endpoint = `/api/get-discourse-comments?title=${title}&slug=${slug}`
const { data } = await axios.get(endpoint)
diff --git a/website/src/components/lineage/index.js b/website/src/components/lineage/index.js
index eb59178369d..6c22e2bae99 100644
--- a/website/src/components/lineage/index.js
+++ b/website/src/components/lineage/index.js
@@ -5,11 +5,11 @@ let Dag = null;
try {
/** As a private package, not every developer will have access to this repo. */
- const DagImport = require('@dbt-labs/react-dbt-dag');
- require('@dbt-labs/react-dbt-dag/dag.css');
- require('@dbt-labs/react-dbt-dag/dag.standalone.css');
+ // const DagImport = require('@dbt-labs/react-dbt-dag');
+ // require('@dbt-labs/react-dbt-dag/dag.css');
+ // require('@dbt-labs/react-dbt-dag/dag.standalone.css');
- Dag = DagImport.Dag;
+ // Dag = DagImport.Dag;
} catch (err) {
/**
* react-dbt-dag is a private repo. Not all developers of the
diff --git a/website/static/_headers b/website/static/_headers
deleted file mode 100644
index f6b636c5158..00000000000
--- a/website/static/_headers
+++ /dev/null
@@ -1,5 +0,0 @@
-/*
- Strict-Transport-Security: max-age=63072000; includeSubDomains; preload
- Content-Security-Policy: object-src 'none'; frame-ancestors 'none';
- X-Content-Type-Options: nosniff
- X-XSS-Protection: 1; mode=block
diff --git a/website/static/_redirects b/website/static/_redirects
deleted file mode 100644
index cc1ad567682..00000000000
--- a/website/static/_redirects
+++ /dev/null
@@ -1,916 +0,0 @@
-# Deprecating Discovery API (FKA Metadata API) legacy endpoints
-
-/docs/dbt-cloud-apis/discovery-schema-model /docs/dbt-cloud-apis/discovery-schema-job-model 301
-/docs/dbt-cloud-apis/discovery-schema-models /docs/dbt-cloud-apis/discovery-schema-job-models 301
-/docs/dbt-cloud-apis/discovery-schema-modelByEnv /docs/dbt-cloud-apis/discovery-schema-environment-applied-modelHistoricalRuns 301
-/docs/dbt-cloud-apis/discovery-schema-metric /docs/dbt-cloud-apis/discovery-schema-job-metric 301
-/docs/dbt-cloud-apis/discovery-schema-metrics /docs/dbt-cloud-apis/discovery-schema-job-metrics 301
-/docs/dbt-cloud-apis/discovery-schema-source /docs/dbt-cloud-apis/discovery-schema-job-source 301
-/docs/dbt-cloud-apis/discovery-schema-sources /docs/dbt-cloud-apis/discovery-schema-job-sources 301
-/docs/dbt-cloud-apis/discovery-schema-seed /docs/dbt-cloud-apis/discovery-schema-job-seed 301
-/docs/dbt-cloud-apis/discovery-schema-seeds /docs/dbt-cloud-apis/discovery-schema-job-seeds 301
-/docs/dbt-cloud-apis/discovery-schema-snapshots /docs/dbt-cloud-apis/discovery-schema-job-snapshots 301
-/docs/dbt-cloud-apis/discovery-schema-test /docs/dbt-cloud-apis/discovery-schema-job-test 301
-/docs/dbt-cloud-apis/discovery-schema-tests /docs/dbt-cloud-apis/discovery-schema-job-tests 301
-/docs/dbt-cloud-apis/discovery-schema-exposure /docs/dbt-cloud-apis/discovery-schema-job-exposure 301
-/docs/dbt-cloud-apis/discovery-schema-exposures /docs/dbt-cloud-apis/discovery-schema-job-exposures 301
-
-# deploy redirects
-
-/docs/deploy/job-triggers /docs/deploy/deploy-jobs 301
-/docs/deploy/job-settings /docs/deploy/deploy-jobs 301
-/docs/deploy/dbt-cloud-job /docs/deploy/deploy-jobs 301
-/faqs/environments/beta-release /docs/dbt-versions/product-lifecycles 301
-
-/docs/deploy/slim-ci-jobs /docs/deploy/ci-jobs 301
-
-## semantic layer
-
-https://docs.getdbt.com/blog/understanding-the-components-of-the-dbt-semantic-layer /docs/use-dbt-semantic-layer/dbt-sl?version=1.6 301
-https://docs.getdbt.com/blog/how-to-design-and-structure-metrics /docs/use-dbt-semantic-layer/dbt-sl?version=1.6 301
-/guides/dbt-ecosystem/sl-partner-integration-guide /docs/use-dbt-semantic-layer/avail-sl-integrations 301
-/docs/use-dbt-semantic-layer/dbt-semantic-layer /docs/use-dbt-semantic-layer/dbt-sl 301
-/docs/use-dbt-semantic-layer/set-up-semantic-layer /docs/use-dbt-semantic-layer/setup-sl 301
-/docs/use-dbt-semantic-layer/setup-dbt-semantic-layer /docs/use-dbt-semantic-layer/setup-sl 301
-/docs/use-dbt-semantic-layer/quickstart-semantic-layer /docs/use-dbt-semantic-layer/quickstart-sl 301
-
-## refocus deploy page
-/docs/collaborate/environments/environments-in-dbt /docs/environments-in-dbt 301
-/docs/collaborate/environments/dbt-cloud-environments /docs/deploy/dbt-cloud-environments 301
-/docs/collaborate/environments/dbt-core-environments /docs/core/dbt-core-environments 301
-
-/docs/cloud/manage-access/licenses-and-groups /docs/cloud/manage-access/about-user-access 301
-
-/docs/deploy/cloud-ci-job /docs/deploy/continuous-integration 301
-
-## quickstarts redirect again
-
-/docs/quickstarts/dbt-cloud/bigquery /quickstarts/bigquery 301
-/docs/quickstarts/dbt-cloud/databricks /quickstarts/databricks 301
-/docs/quickstarts/dbt-cloud/redshift /quickstarts/redshift 301
-/docs/quickstarts/dbt-cloud/snowflake /quickstarts/snowflake 301
-/docs/quickstarts/dbt-cloud/starburst-galaxy /quickstarts/starburst-galaxy 301
-/docs/quickstarts/dbt-core/codespace /quickstarts/codespace 301
-/docs/quickstarts/dbt-core/manual-install /quickstarts/manual-install 301
-
-/docs/deploy/project-state /reference/node-selection/syntax 301
-/reference/global-configs /reference/global-configs/about-global-configs 301
-
-/docs/quickstarts/overview /quickstarts 301
-
-## supported data platform
-
-/docs/supported-data-platforms#verified-adapters /docs/supported-data-platforms 301
-/docs/supported-data-platforms#community-adapters /docs/community-adapters 301
-/docs/supported-data-platforms#adapter-installation /docs/connect-adapters 301
-/docs/supported-data-platforms#adapter-taxonomy /docs/supported-data-platforms 301
-/docs/supported-data-platforms#verified-by-dbt-labs /docs/supported-data-platforms 301
-/docs/supported-data-platforms#maintainers /docs/connect-adapters#maintainers 301
-/docs/supported-data-platforms#contributing-to-dbt-core-adapters /docs/contribute-core-adapters 301
-/docs/supported-data-platforms#contributing-to-a-pre-existing-adapter /docs/contribute-core-adapters#contribute-to-a-pre-existing-adapter 301
-/docs/supported-data-platforms#creating-a-new-adapter /docs/contribute-core-adapters#create-a-new-adapter 301
-
-## dbt core setup changes
-
-/docs/core/connection-profiles /docs/core/connect-data-platform/connection-profiles 301
-/reference/warehouse-setups/bigquery-setup /docs/core/connect-data-platform/bigquery-setup 301
-/reference/warehouse-setups/postgres-setup /docs/core/connect-data-platform/postgres-setup 301
-/reference/warehouse-setups/redshift-setup /docs/core/connect-data-platform/redshift-setup 301
-/reference/warehouse-setups/snowflake-setup /docs/core/connect-data-platform/snowflake-setup 301
-/reference/warehouse-setups/mssql-setup /docs/core/connect-data-platform/mssql-setup 301
-/reference/warehouse-setups/trino-setup /docs/core/connect-data-platform/trino-setup 301
-/reference/warehouse-setups/singlestore-setup /docs/core/connect-data-platform/singlestore-setup 301
-/reference/warehouse-setups/spark-setup /docs/core/connect-data-platform/spark-setup 301
-/reference/warehouse-setups/databricks-setup /docs/core/connect-data-platform/databricks-setup 301
-/reference/warehouse-setups/hive-setup /docs/core/connect-data-platform/hive-setup 301
-/reference/warehouse-setups/exasol-setup /docs/core/connect-data-platform/exasol-setup 301
-/reference/warehouse-setups/oracle-setup /docs/core/connect-data-platform/oracle-setup 301
-/reference/warehouse-setups/azuresynapse-setup /docs/core/connect-data-platform/azuresynapse-setup 301
-/reference/warehouse-setups/dremio-setup /docs/core/connect-data-platform/dremio-setup 301
-/reference/warehouse-setups/clickhouse-setup /docs/core/connect-data-platform/clickhouse-setup 301
-/reference/warehouse-setups/materialize-setup /docs/core/connect-data-platform/materialize-setup 301
-/reference/warehouse-setups/rockset-setup /docs/core/connect-data-platform/rockset-setup 301
-/reference/warehouse-setups/firebolt-setup /docs/core/connect-data-platform/firebolt-setup 301
-/reference/warehouse-setups/teradata-setup /docs/core/connect-data-platform/teradata-setup 301
-/reference/warehouse-setups/athena-setup /docs/core/connect-data-platform/athena-setup 301
-/reference/warehouse-setups/vertica-setup /docs/core/connect-data-platform/vertica-setup 301
-/reference/warehouse-setups/tidb-setup /docs/core/connect-data-platform/tidb-setup 301
-/reference/warehouse-setups/glue-setup /docs/core/connect-data-platform/glue-setup 301
-/reference/warehouse-setups/mindsdb-setup /docs/core/connect-data-platform/mindsdb-setup 301
-/reference/warehouse-setups/greenplum-setup /docs/core/connect-data-platform/greenplum-setup 301
-/reference/warehouse-setups/impala-setup /docs/core/connect-data-platform/impala-setup 301
-/reference/warehouse-setups/layer-setup /docs/core/connect-data-platform/layer-setup 301
-/reference/warehouse-setups/iomete-setup /docs/core/connect-data-platform/iomete-setup 301
-/reference/warehouse-setups/duckdb-setup /docs/core/connect-data-platform/duckdb-setup 301
-/reference/warehouse-setups/sqlite-setup /docs/core/connect-data-platform/sqlite-setup 301
-/reference/warehouse-setups/mysql-setup /docs/core/connect-data-platform/mysql-setup 301
-/reference/warehouse-setups/ibmdb2-setup /docs/core/connect-data-platform/ibmdb2-setup 301
-/reference/warehouse-setups/alloydb-setup /docs/core/connect-data-platform/alloydb-setup 301
-/reference/warehouse-setups/doris-setup /docs/core/connect-data-platform/doris-setup 301
-/reference/warehouse-setups/infer-setup /docs/core/connect-data-platform/infer-setup 301
-/reference/warehouse-setups/databend-setup /docs/core/connect-data-platform/databend-setup 301
-/reference/warehouse-setups/fal-setup /docs/core/connect-data-platform/fal-setup 301
-/reference/warehouse-setups/decodable-setup /docs/core/connect-data-platform/decodable-setup
-/reference/warehouse-setups/upsolver-setup /docs/core/connect-data-platform/upsolver-setup 301
-
-# Discovery redirect
-/docs/dbt-cloud-apis/metadata-schema-source /docs/dbt-cloud-apis/discovery-schema-source 301
-/docs/dbt-cloud-apis/metadata-schema-sources /docs/dbt-cloud-apis/discovery-schema-sources 301
-/docs/dbt-cloud-apis/metadata-schema-test /docs/dbt-cloud-apis/discovery-schema-test 301
-/docs/dbt-cloud-apis/metadata-schema-tests /docs/dbt-cloud-apis/discovery-schema-tests 301
-/docs/dbt-cloud-apis/metadata-schema-seed /docs/dbt-cloud-apis/discovery-schema-seed 301
-/docs/dbt-cloud-apis/metadata-schema-seeds /docs/dbt-cloud-apis/discovery-schema-seeds 301
-/docs/dbt-cloud-apis/metadata-schema-snapshots /docs/dbt-cloud-apis/discovery-schema-snapshots 301
-/docs/dbt-cloud-apis/metadata-schema-model /docs/dbt-cloud-apis/discovery-schema-model 301
-/docs/dbt-cloud-apis/metadata-schema-models /docs/dbt-cloud-apis/discovery-schema-models 301
-/docs/dbt-cloud-apis/metadata-schema-modelByEnv /docs/dbt-cloud-apis/discovery-schema-modelByEnv 301
-/docs/dbt-cloud-apis/metadata-schema-metrics /docs/dbt-cloud-apis/discovery-schema-metrics 301
-/docs/dbt-cloud-apis/metadata-schema-metric /docs/dbt-cloud-apis/discovery-schema-metric 301
-/docs/dbt-cloud-apis/metadata-schema-exposures /docs/dbt-cloud-apis/discovery-schema-exposures 301
-/docs/dbt-cloud-apis/metadata-schema-exposure /docs/dbt-cloud-apis/discovery-schema-exposure 301
-/docs/dbt-cloud-apis/metadata-use-case-guides /docs/dbt-cloud-apis/discovery-use-cases-and-examples 301
-/docs/dbt-cloud-apis/metadata-api /docs/dbt-cloud-apis/discovery-api 301
-/docs/dbt-cloud-apis/metadata-querying /docs/dbt-cloud-apis/discovery-querying 301
-
-/docs/core/connection-profiles#understanding-threads /docs/running-a-dbt-project/using-threads 301
-
-# PrivateLink to Secure redirects
-/docs/cloud/privatelink/about-privatelink /docs/cloud/secure/about-privatelink 301
-/docs/cloud/privatelink/snowflake-privatelink /docs/cloud/secure/about-privatelink 301
-/docs/cloud/privatelink/redshift-privatelink /docs/cloud/secure/about-privatelink 301
-/docs/cloud/privatelink/databricks-privatelink /docs/cloud/secure/about-privatelink 301
-/docs/cloud/privatelink/ip-restrictions /docs/cloud/secure/about-privatelink 301
-
-/docs/deploy/dbt-cloud-job#create-and-schedule-jobs /docs/deploy/dbt-cloud-job#create-and-schedule-jobs 301
-/docs/cloud/dbt-cloud-tips /docs/cloud/dbt-cloud-ide/dbt-cloud-tips 301
-/docs/cloud/develop-in-the-cloud /docs/cloud/dbt-cloud-ide/develop-in-the-cloud 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab /docs/deploy/dbt-cloud-job#model-timing 301
-
-/docs/quickstarts/dbt-core/quickstart /quickstarts/manual-install 301
-docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab /docs/deploy/dbt-cloud-job#model-timing 301
-
-/docs/dbt-versions/release-notes/January-2022/model-timing-more /docs/deploy/dbt-cloud-job#model-timing 301
-/docs/deploy/deployments#dbt-cloud /docs/deploy/dbt-cloud-job 301
-/docs/deploy/deployments#airflow /docs/deploy/deployment-tools 301
-/docs/deploy/deployments#prefect /docs/deploy/deployment-tools 301
-/docs/deploy/deployments#run-dbt-in-production /docs/deploy/deployments 301
-/docs/deploy/deployments#on-prefect-2 /docs/deploy/deployment-tools 301
-/docs/deploy/deployments#on-prefect-1 /docs/deploy/deployment-tools 301
-/docs/deploy/deployments#dagster /docs/deploy/deployment-tools 301
-/docs/deploy/deployments#automation-servers /docs/deploy/deployment-tools 301
-/docs/deploy/deployments#cron /docs/deploy/deployment-tools 301
-
-# New Cloud directory redirects
-/docs/collaborate/manage-access/enterprise-permissions#permission-sets /docs/cloud/manage-access/enterprise-permissions#permission-sets 301
-/docs/get-started/privatelink/about-privatelink /docs/cloud/privatelink/about-privatelink 301
-/docs/get-started/privatelink/snowflake-privatelink /docs/cloud/privatelink/snowflake-privatelink 301
-/docs/get-started/privatelink/redshift-privatelink /docs/cloud/privatelink/redshift-privatelink 301
-/docs/get-started/privatelink/databricks-privatelink /docs/cloud/privatelink/databricks-privatelink 301
-/docs/get-started/dbt-cloud-features /docs/cloud/about-cloud/dbt-cloud-features 301
-/docs/deploy/regions-ip-addresses /docs/cloud/about-cloud/regions-ip-addresses 301
-/docs/deploy/architecture /docs/cloud/about-cloud/architecture 301
-/docs/deploy/single-tenant /docs/cloud/about-cloud/tenancy 301
-/docs/deploy/multi-tenant /docs/cloud/about-cloud/tenancy 301
-/docs/cloud/manage-access/about-access /docs/cloud/manage-access/about-user-access 301
-/docs/collaborate/git/connect-github /docs/cloud/git/connect-github 301
-/docs/collaborate/git/connect-gitlab /docs/cloud/git/connect-gitlab 301
-/docs/collaborate/git/connect-azure-devops /docs/cloud/git/connect-azure-devops 301
-/docs/collaborate/git/setup-azure /docs/cloud/git/setup-azure 301
-/docs/collaborate/git/authenticate-azure /docs/cloud/git/authenticate-azure 301
-/docs/collaborate/git/import-a-project-by-git-url /docs/cloud/git/import-a-project-by-git-url 301
-/docs/collaborate/publish/about-publishing-models /docs/collaborate/govern/about-model-governance 301
-/docs/collaborate/publish/model-contracts /docs/collaborate/govern/model-contracts 301
-/docs/collaborate/publish/model-access /docs/collaborate/govern/model-access 301
-/docs/collaborate/publish/model-versions /docs/collaborate/govern/model-versions 301
-/docs/collaborate/manage-access/about-access /docs/cloud/manage-access/about-user-access 301
-/docs/collaborate/manage-access/seats-and-users /docs/cloud/manage-access/seats-and-users 301
-/docs/collaborate/manage-access/self-service-permissions /docs/cloud/manage-access/self-service-permissions 301
-/docs/collaborate/manage-access/enterprise-permissions /docs/cloud/manage-access/enterprise-permissions 301
-/docs/collaborate/manage-access/sso-overview /docs/cloud/manage-access/sso-overview 301
-/docs/collaborate/manage-access/set-up-sso-saml-2.0 /docs/cloud/manage-access/set-up-sso-saml-2.0 301
-/docs/collaborate/manage-access/set-up-sso-okta /docs/cloud/manage-access/set-up-sso-okta 301
-/docs/collaborate/manage-access/set-up-sso-google-workspace /docs/cloud/manage-access/set-up-sso-google-workspace 301
-/docs/collaborate/manage-access/set-up-sso-azure-active-directory /docs/cloud/manage-access/set-up-sso-azure-active-directory 301
-/docs/collaborate/manage-access/set-up-snowflake-oauth /docs/cloud/manage-access/set-up-snowflake-oauth 301
-/docs/collaborate/manage-access/set-up-bigquery-oauth /docs/cloud/manage-access/set-up-bigquery-oauth 301
-/docs/collaborate/manage-access/audit-log /docs/cloud/manage-access/audit-log 301
-/docs/get-started/develop-in-the-cloud /docs/cloud/develop-in-the-cloud 301
-/docs/get-started/dbt-cloud-tips /docs/cloud/dbt-cloud-tips 301
-/docs/get-started/installation /docs/core/installation 301
-/docs/get-started/about-the-cli /docs/core/about-the-cli 301
-/docs/get-started/homebrew-install /docs/core/homebrew-install 301
-/docs/get-started/pip-install /docs/core/pip-install 301
-/docs/get-started/docker-install /docs/core/docker-install 301
-/docs/get-started/source-install /docs/core/source-install 301
-/docs/get-started/connection-profiles /docs/core/connection-profiles 301
-/docs/get-started/run-your-dbt-projects /docs/running-a-dbt-project/run-your-dbt-projects 301
-/docs/get-started/learning-more/refactoring-legacy-sql /guides/migration/tools/refactoring-legacy-sql 301
-/docs/get-started/learning-more/using-jinja /guides/advanced/using-jinja 301
-
-# Quickstart redirects
-
-/docs/get-started/getting-started/set-up-dbt-cloud /quickstarts 301
-/docs/get-started/getting-started/getting-set-up/setting-up-snowflake /docs/quickstarts/dbt-cloud/snowflake 301
-/docs/get-started/getting-started/getting-set-up/setting-up-redshift /docs/quickstarts/dbt-cloud/redshift 301
-/docs/get-started/getting-started/getting-set-up/setting-up-databricks /quickstarts/databricks 301
-/docs/get-started/getting-started/getting-set-up/setting-up-bigquery /docs/quickstarts/dbt-cloud/bigquery 301
-/docs/get-started/getting-started/getting-set-up/setting-up-databricks /quickstarts/databricks 301
-/docs/get-started/getting-started/getting-set-up/setting-up-redshift /docs/quickstarts/dbt-cloud/redshift 301
-/docs/get-started/getting-started/getting-set-up/setting-up-snowflake /docs/quickstarts/dbt-cloud/snowflake 301
-/docs/get-started/getting-started/building-your-first-project/schedule-a-job /quickstarts/bigquery 301
-/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project /docs/quickstarts/dbt-cloud/bigquery#add-tests-to-your-models 301
-/docs/get-started/getting-started/building-your-first-project/build-your-first-models /quickstarts/bigquery?step=8 301
-/docs/get-started/getting-started/overview /quickstarts 301
-/docs/get-started/getting-started-dbt-core /docs/quickstarts/dbt-core/quickstart 301
-
-/docs/get-started/develop-in-the-cloud#set-up-environments /docs/get-started/develop-in-the-cloud 301
-/docs/get-started/develop-in-the-cloud#developer-credentials /docs/get-started/develop-in-the-cloud 301
-/docs/getting-started/develop-in-the-cloud#setting-up-developer-credentials /docs/get-started/develop-in-the-cloud 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-redshift-and-postgres /docs/get-started/connect-your-database#connecting-to-postgres-redshift-and-alloydb 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-snowflake /docs/get-started/connect-your-database#connecting-to-snowflake 301
-/docs/get-started/connect-your-database#connecting-to-snowflake /docs/cloud/connect-data-platform/connect-snowflake 301
-/docs/get-started/connect-your-database#connecting-to-postgres-redshift-and-alloydb /cloud/connect-data-platform/connect-redshift-postgresql-alloydb 301
-/docs/cloud/connect-data-platform/connect-your-database /docs/cloud/connect-data-platform/about-connections 301
-/faqs/connecting-to-two-dbs-not-allowed /faqs/warehouse/connecting-to-two-dbs-not-allowed 301
-/docs/dbt-cloud/cloud-ide/ide-beta /docs/get-started/develop-in-the-cloud 301
-
-# Adding new path for quickstarts
-
-/docs/quickstarts/dbt-cloud/bigquery /quickstarts/bigquery 301
-/quickstarts/databricks /quickstarts/databricks 301
-/docs/quickstarts/dbt-cloud/redshift /quickstarts/redshift 301
-/docs/quickstarts/dbt-cloud/snowflake /quickstarts/snowflake 301
-/quickstarts/starburst-galaxy /quickstarts/starburst-galaxy 301
-/quickstarts/codespace /quickstarts/codespace 301
-/quickstarts/manual-install /quickstarts/manual-install 301
-
-## dbt cloud feature page changes
-
-/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab /docs/get-started/dbt-cloud-features#model-timing-dashboard 301
-/docs/dbt-cloud /docs/get-started/getting-started/set-up-dbt-cloud
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version /docs/dbt-versions/upgrade-core-in-cloud 301
-/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/get-started/develop-in-the-cloud 301
-/docs/dbt-cloud/cloud-overview /docs/get-started/getting-started/set-up-dbt-cloud 301
-/docs/dbt-cloud/using-dbt-cloud/artifacts /docs/deploy/artifacts 301
-
-/docs/building-a-dbt-project/building-models/python-models /docs/build/python-models 301
-/docs/deploy/regions /docs/deploy/regions-ip-addresses 301
-
-## adapter redirects using diff formats
-
-/advanced/adapter-development/1-what-are-adapters /guides/dbt-ecosystem/adapter-development/1-what-are-adapters 301!
-/advanced/adapter-development/2-prerequisites-for-a-new-adapter /guides/dbt-ecosystem/adapter-development/2-prerequisites-for-a-new-adapter 301!
-/advanced/adapter-development/3-building-a-new-adapter /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301!
-/advanced/adapter-development/4-testing-a-new-adapter /guides/dbt-ecosystem/adapter-development/4-testing-a-new-adapter 301!
-/advanced/adapter-development/5-documenting-a-new-adapter /guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter 301!
-/advanced/adapter-development/6-promoting-a-new-adapter /guides/dbt-ecosystem/adapter-development/6-promoting-a-new-adapter 301!
-/advanced/adapter-development/7-verifying-a-new-adapter /guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter 301!
-/guides/advanced/adapter-development/1-what-are-adapters /guides/dbt-ecosystem/adapter-development/1-what-are-adapters 301!
-/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter /guides/dbt-ecosystem/adapter-development/2-prerequisites-for-a-new-adapter 301!
-/guides/advanced/adapter-development/3-building-a-new-adapter /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301!
-/guides/advanced/adapter-development/4-testing-a-new-adapter /guides/dbt-ecosystem/adapter-development/4-testing-a-new-adapter 301!
-/guides/advanced/adapter-development/5-documenting-a-new-adapter /guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter 301!
-/guides/advanced/adapter-development/6-promoting-a-new-adapter /guides/dbt-ecosystem/adapter-development/6-promoting-a-new-adapter 301!
-/guides/advanced/adapter-development/7-verifying-a-new-adapter /guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter 301!
-
-/guides/legacy/debugging-errors /guides/best-practices/debugging-errors 301!
-/guides/legacy/writing-custom-generic-tests /guides/best-practices/writing-custom-generic-tests 301!
-/guides/legacy/creating-new-materializations /guides/advanced/creating-new-materializations 301!
-
-# add new redirects to the top because they will override later ones
-
-# getting started guide
-
-/guides/getting-started /docs/get-started/getting-started/overview 301
-/docs/get-started/getting-started/building-your-first-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301
-/docs/get-started/getting-started/create-a-project /docs/get-started/getting-started/set-up-dbt-cloud 301
-/guides/getting-started/building-your-first-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301
-
-/guides/getting-started/building-your-first-project/build-your-first-models /docs/get-started/getting-started/building-your-first-project/build-your-first-models 301
-/guides/getting-started/building-your-first-project/schedule-a-job /docs/get-started/getting-started/building-your-first-project/schedule-a-job 301
-/guides/getting-started/building-your-first-project/test-and-document-your-project /docs/get-started/getting-started/building-your-first-project/test-and-document-your-project 301
-/guides/getting-started/create-a-project /docs/get-started/getting-started/building-your-first-project/build-your-first-models301
-/guides/getting-started/getting-set-up /docs/get-started/getting-started/set-up-dbt-cloud 301
-/guides/getting-started/getting-set-up/setting-up-bigquery /docs/get-started/getting-started/getting-set-up/setting-up-bigquery 301
-/guides/getting-started/getting-set-up/setting-up-databricks /docs/get-started/getting-started/getting-set-up/setting-up-databricks 301
-/guides/getting-started/getting-set-up/setting-up-redshift /docs/get-started/getting-started/getting-set-up/setting-up-redshift 301
-/guides/getting-started/getting-set-up/setting-up-snowflake /docs/get-started/getting-started/getting-set-up/setting-up-snowflake 301
-/guides/getting-started/getting-started /docs/get-started/getting-started/set-up-dbt-cloud 301
-/guides/getting-started/learning-more /docs/get-started/getting-started-dbt-core 301
-/guides/getting-started/learning-more/getting-started-dbt-core /docs/get-started/getting-started-dbt-core 301
-/guides/getting-started/learning-more/refactoring-legacy-sql /docs/get-started/learning-more/refactoring-legacy-sql 301
-/guides/getting-started/learning-more/using-jinja /docs/get-started/learning-more/using-jinja 301
-/docs/dbt-cloud/cloud-quickstart /docs/get-started/getting-started/set-up-dbt-cloud 301
-/docs/cloud-quickstart /docs/dbt-cloud/cloud-quickstart 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud /docs/get-started/getting-started/set-up-dbt-cloud 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database /docs/cloud/connect-data-platform/about-connections 301
-/docs/get-started/connect-your-database /docs/cloud/connect-data-platform/about-connections 301
-/docs/running-a-dbt-project/profile /docs/get-started/connection-profiles 301
-
-# other redirects
-
-/guides/best-practices/materializations/guides/best-practices/materializations/1-overview /guides/best-practices/materializations/1-guide-overview
-
-/docs/deploy/understanding-state /docs/deploy/about-state 301!
-/guides/legacy/understanding-state /docs/deploy/about-state 301!
-/guides/migration/versions/Older%20versions/understanding-state /docs/deploy/about-state 301!
-
-/docs/collaborate/git/resolve-merge-conflicts /docs/collaborate/git/merge-conflicts 301
-/docs/collaborate/environments /docs/collaborate/environments/environments-in-dbt 301
-/docs/running-a-dbt-project/running-dbt-in-production /docs/deploy/deployments 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/deploy/job-notifications 301
-/docs/dbt-cloud/using-dbt-cloud /docs/develop/develop-in-the-cloud 301
-/docs/dbt-cloud/january-2020-pricing-updates https://www.getdbt.com/pricing/ 301
-/docs/dbt-cloud/dbt-cloud-enterprise https://www.getdbt.com/pricing/ 301
-/docs/building-a-dbt-project/archival /docs/build/snapshots 301
-/docs/about/license /community/resources/contributor-license-agreements 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository /docs/collaborate/git/managed-repository 301
-/docs/dbt-cloud/release-notes /docs/dbt-versions/dbt-cloud-release-notes 301
-/docs/dbt-cloud/dbt-cloud-enterprise/audit-log /docs/collaborate/manage-access/audit-log 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth /docs/collaborate/manage-access/set-up-bigquery-oauth 301
-/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth /docs/collaborate/manage-access/set-up-snowflake-oauth 301
-/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta /docs/collaborate/manage-access/set-up-sso-okta 301
-/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory /docs/collaborate/manage-access/set-up-sso-azure-active-directory 301
-/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite /docs/collaborate/manage-access/set-up-sso-google-workspace 301
-/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0 /docs/collaborate/manage-access/set-up-sso-saml-2.0 301
-/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview /docs/collaborate/manage-access/sso-overview 301
-/docs/dbt-cloud/access-control/enterprise-permissions /docs/collaborate/manage-access/enterprise-permissions 301
-/docs/dbt-cloud/access-control/self-service-permissions /docs/collaborate/manage-access/self-service-permissions 301
-/docs/dbt-cloud/access-control/cloud-seats-and-users /docs/collaborate/manage-access/seats-and-users 301
-/docs/dbt-cloud/access-control/access-control-overview /docs/collaborate/manage-access/about-access 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation /docs/collaborate/build-and-view-your-docs 301
-/docs/building-a-dbt-project/documentation /docs/collaborate/documentation 301
-/docs/building-a-dbt-project/managing-environments /docs/collaborate/environments/environments-in-dbt 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url /docs/collaborate/git/import-a-project-by-git-url 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure /docs/collaborate/git/authenticate-azure 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure /docs/collaborate/git/setup-azure 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops /docs/collaborate/git/connect-azure-devops 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab /docs/collaborate/git/connect-gitlab 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application /docs/collaborate/git/connect-github 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/setting-up / 301
-/docs/dbt-cloud/cloud-ide/handling-merge-conflicts /docs/collaborate/git/resolve-merge-conflicts 301
-/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/collaborate/cloud-build-and-view-your-docs 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories /docs/collaborate/git/pr-template 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration /docs/deploy/cloud-ci-job 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles /docs/deploy/dashboard-status-tiles 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness /docs/deploy/source-freshness 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-notifications /docs/deploy/job-notifications 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule /docs/deploy/job-triggers 301
-/docs/dbt-cloud/deployments/airgapped-deployment /docs/deploy/airgapped-deployment 301
-/docs/dbt-cloud/deployments/single-tenant-deployment /docs/deploy/single-tenant 301
-/docs/dbt-cloud/deployments/multi-tenant-deployment /docs/deploy/multi-tenant 301
-/docs/dbt-cloud/deployments/deployment-architecture /docs/deploy/architecture 301
-/docs/dbt-cloud/deployments/deployment-overview /docs/deploy/deployments 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name /docs/build/custom-target-names 301
-/docs/building-a-dbt-project/building-models/using-custom-aliases /docs/build/custom-aliases 301
-/docs/building-a-dbt-project/building-models/using-custom-databases /docs/build/custom-databases 301
-/docs/building-a-dbt-project/building-models/using-custom-schemas /docs/build/custom-schemas 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposures /docs/dbt-cloud-apis/metadata-schema-exposures 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposure /docs/dbt-cloud-apis/metadata-schema-exposure 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-tests /docs/dbt-cloud-apis/metadata-schema-tests 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-test /docs/dbt-cloud-apis/metadata-schema-test 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-snapshots /docs/dbt-cloud-apis/metadata-schema-snapshots 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seeds /docs/dbt-cloud-apis/metadata-schema-seeds 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seed /docs/dbt-cloud-apis/metadata-schema-seed 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-sources /docs/dbt-cloud-apis/metadata-schema-sources 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-source /docs/dbt-cloud-apis/metadata-schema-source 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metrics /docs/dbt-cloud-apis/metadata-schema-metrics 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metric /docs/dbt-cloud-apis/metadata-schema-metric 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-modelByEnv /docs/dbt-cloud-apis/metadata-schema-modelByEnv 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-models /docs/dbt-cloud-apis/metadata-schema-models 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model /docs/dbt-cloud-apis/metadata-schema-model 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying /docs/dbt-cloud-apis/metadata-querying 301
-/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview /docs/dbt-cloud-apis/metadata-api 301
-/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api /docs/dbt-cloud-apis/admin-cloud-api 301
-/docs/dbt-cloud/dbt-cloud-api/service-tokens /docs/dbt-cloud-apis/service-tokens 301
-/docs/dbt-cloud/dbt-cloud-api/user-tokens /docs/dbt-cloud-apis/user-tokens 301
-/docs/dbt-cloud/dbt-cloud-api/cloud-apis /docs/dbt-cloud-apis/overview 301
-/docs/building-a-dbt-project/hooks-operations /docs/build/hooks-operations 301
-/docs/building-a-dbt-project/analyses /docs/build/analyses 301
-/docs/building-a-dbt-project/package-management /docs/build/packages 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables /docs/build/environment-variables 301
-/docs/building-a-dbt-project/building-models/using-variables /docs/build/project-variables 301
-/docs/building-a-dbt-project/jinja-macros /docs/build/jinja-macros 301
-/docs/building-a-dbt-project/building-models/configuring-incremental-models /docs/build/incremental-models 301
-/docs/building-a-dbt-project/building-models/materializations /docs/build/materializations 301
-/docs/building-a-dbt-project/tests /docs/build/tests 301
-/docs/building-a-dbt-project/metrics /docs/build/metrics 301
-/docs/building-a-dbt-project/exposures /docs/build/exposures 301
-/docs/building-a-dbt-project/snapshots /docs/build/snapshots 301
-/docs/building-a-dbt-project/seeds /docs/build/seeds 301
-/docs/building-a-dbt-project/building-models /docs/build/sql-models 301
-/docs/building-a-dbt-project/using-sources /docs/build/sources 301
-/docs/building-a-dbt-project/projects /docs/build/projects 301
-/docs/building-a-dbt-project/building-models/python-models /docs/build/python-models 301
-/docs/building-a-dbt-project/macros /docs/guides/building-packages 301
-/docs/building-a-dbt-project/setting-up /docs/guides/building-packages 301
-/docs/building-a-dbt-project/dbt-jinja-functions /docs/guides/building-packages 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions /docs/dbt-versions/upgrade-core-in-cloud 301
-/docs/core-versions /docs/dbt-versions/core 301
-/docs/dbt-cloud/cloud-dbt-cloud-support /docs/dbt-support 301
-/docs/about/viewpoint /community/resources/viewpoint 301
-/docs/viewpoint /community/resources/viewpoint 301
-/dbt-cli/configure-your-profile /docs/get-started/connection-profiles 301
-/docs/running-a-dbt-project/using-the-cli /docs/get-started/about-the-cli 301
-/dbt-cli/install/from-source /docs/get-started/source-install 301
-/dbt-cli/install/docker /docs/get-started/docker-install 301
-/dbt-cli/install/pip /docs/get-started/pip-install 301
-/dbt-cli/install/homebrew /docs/get-started/homebrew-install 301
-/dbt-cli/install/overview /docs/get-started/installation 301
-/docs/dbt-cloud/cloud-ide/the-dbt-ide /docs/get-started/dbt-cloud-features 301
-/useful*components https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/adding-page-components.md 301
-/guides/legacy/managing-environments /docs/building-a-dbt-project/managing-environments 301
-/docs/running-a-dbt-project/dbt-api /docs/introduction 301
-/img/docs/dbt-cloud/dbt-cloud-enterprise/icon.png https://www.getdbt.com/ui/img/dbt-icon.png 301!
-/dbt-cli/installation-guides/centos /docs/get-started/installation 301
-/dbt-cli/installation-guides/centos /docs/get-started/installation 301
-/dbt-cli/installation-guides/install-from-source /dbt-cli/install/from-source 301
-/dbt-cli/installation-guides/macos /docs/get-started/installation 301
-/dbt-cli/installation-guides/ubuntu-debian /docs/get-started/installation 301
-/dbt-cli/installation-guides/windows /docs/get-started/installation 301
-/dbt-cli/installation /docs/get-started/installation 301
-/dbt-jinja-functions /reference/dbt-jinja-functions 301
-/docs /docs/introduction 301
-/docs/adapter /docs/writing-code-in-dbt/jinja-context/adapter 301
-/docs/analyses /docs/building-a-dbt-project/analyses 301
-/docs/api-variable /docs/writing-code-in-dbt/api-variable 301
-/docs/archival /docs/building-a-dbt-project/archival 301
-/docs/artifacts /docs/dbt-cloud/using-dbt-cloud/artifacts 301
-/docs/bigquery-configs /reference/resource-configs/bigquery-configs 301
-/reference/resource-properties/docs /reference/resource-configs/docs 301
-/reference/resource-properties/latest-version /reference/resource-properties/latest_version 301
-/docs/building-a-dbt-project/building-models/bigquery-configs /reference/resource-configs/bigquery-configs 301
-/docs/building-a-dbt-project/building-models/configuring-models /reference/model-configs
-/docs/building-a-dbt-project/building-models/enable-and-disable-models /reference/resource-configs/enabled 301
-/docs/building-a-dbt-project/building-models/redshift-configs /reference/resource-configs/redshift-configs 301
-/docs/building-a-dbt-project/building-models/snowflake-configs /reference/resource-configs/snowflake-configs 301
-/docs/building-a-dbt-project/building-models/spark-configs /reference/resource-configs/spark-configs 301
-/docs/building-a-dbt-project/building-models/tags /reference/resource-configs/tags 301
-/docs/building-a-dbt-project/building-models/using-sql-headers /reference/resource-configs/sql_header 301
-/docs/building-a-dbt-project/dbt-projects /docs/building-a-dbt-project/projects 301
-/docs/building-a-dbt-project/dbt-projects/configuring-query-comments /reference/project-configs/query-comment 301
-/docs/building-a-dbt-project/dbt-projects/configuring-quoting /reference/project-configs/quoting 301
-/docs/building-a-dbt-project/dbt-projects/creating-a-project /docs/building-a-dbt-project/projects#creating-a-dbt-project 301
-/docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions /reference/project-configs/require-dbt-version 301
-/docs/building-a-dbt-project/dbt-projects/use-an-existing-project /docs/building-a-dbt-project/projects#using-an-existing-project 301
-/docs/building-a-dbt-project/hooks /docs/building-a-dbt-project/hooks-operations 301
-/docs/building-a-dbt-project/testing-and-documentation /docs/building-a-dbt-project/tests 301
-/docs/building-a-dbt-project/testing-and-documentation/documentation /docs/building-a-dbt-project/testing-and-documentation/documentation 301
-/docs/building-a-dbt-project/testing-and-documentation/documentation-website /docs/building-a-dbt-project/testing-and-documentation/documentation 301
-/docs/building-a-dbt-project/testing-and-documentation/schemayml-files /reference/declaring-properties 301
-/docs/building-a-dbt-project/testing-and-documentation/testing /docs/building-a-dbt-project/tests 301
-/docs/building-a-dbt-project/using-operations /docs/building-a-dbt-project/hooks-operations 301
-/docs/building-models /docs/building-a-dbt-project/building-models 301
-/docs/building-packages /guides/legacy/building-packages 301
-/docs/centos /dbt-cli/installation 301
-/docs/clean /reference/commands/clean 301
-/docs/cloud-choosing-a-dbt-version /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 301
-/docs/cloud-configuring-dbt-cloud /docs/dbt-cloud/cloud-configuring-dbt-cloud 301
-/docs/cloud-enabling-continuous-integration-with-github /docs/deploy/cloud-ci-job 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github /docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github/ /docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration 301
-/docs/cloud-generating-documentation /docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation 301
-/docs/cloud-import-a-project-by-git-url /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url 301
-/docs/cloud-installing-the-github-application /docs/cloud/git/connect-github 301
-/docs/cloud-managing-permissions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-managing-permissions 301
-/docs/cloud-overview /docs/dbt-cloud/cloud-overview 301
-/docs/cloud-seats-and-users /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-seats-and-users 301
-/docs/cloud-setting-a-custom-target-name /docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name 301
-/docs/cloud-snapshotting-source-freshness /docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness 301
-/docs/cloud-supported-dbt-versions /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version 301
-/docs/cloud-using-a-custom-cron-schedule /docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule 301
-/docs/cloud-using-a-managed-repository /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository 301
-/docs/cmd-docs /reference/commands/cmd-docs 301
-/docs/command-line-interface /reference/dbt-commands 301
-/docs/compile /reference/commands/compile 301
-/docs/config /docs/writing-code-in-dbt/jinja-context/config 301
-/docs/configure-your-profile /dbt-cli/configure-your-profile 301
-/docs/configuring-incremental-models /docs/building-a-dbt-project/building-models/configuring-incremental-models 301
-/docs/configuring-models /reference/model-configs 301
-/docs/configuring-query-comments /docs/building-a-dbt-project/dbt-projects/configuring-query-comments 301
-/docs/configuring-quoting /docs/building-a-dbt-project/dbt-projects/configuring-quoting 301
-/docs/configuring-resources-from-the-project-file /docs/building-a-dbt-project/dbt-projects/configuring-resources-from-the-project-file 301
-/docs/connecting-your-database /docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database 301
-/docs/contributor-license-agreements /docs/contributing/contributor-license-agreements 301
-/docs/creating-a-project /docs/building-a-dbt-project/dbt-projects/creating-a-project 301
-/docs/creating-new-materializations /guides/legacy/creating-new-materializations 301
-/docs/creating-date-partitioned-tables /docs/guides/database-specific-guides/creating-date-partitioned-tables 301
-/docs/custom-schema-tests /guides/legacy/writing-custom-generic-tests 301
-/docs/database-specific-guides / 301
-/docs/dbt-api /docs/running-a-dbt-project/dbt-api 301
-/docs/dbt-cloud-enterprise /docs/dbt-cloud/dbt-cloud-enterprise 301
-/docs/dbt-cloud/cloud-configuring-repositories /docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories 301
-/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version /docs/dbt-versions/upgrade-core-in-cloud 301
-/docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions/ /docs/dbt-cloud/access-control/enterprise-permissions 301
-/docs/dbt-cloud/on-premises/architecture /dbt-cloud/on-premises/dependencies 301
-/docs/dbt-projects /docs/building-a-dbt-project/dbt-projects 301
-/docs/dbt_projectyml-file /docs/building-a-dbt-project/dbt-projects/dbt_projectyml-file 301
-/docs/debug /reference/commands/debug 301
-/docs/debug-method /docs/writing-code-in-dbt/jinja-context/debug-method 301
-/docs/deps /reference/commands/deps 301
-/docs/doc /docs/writing-code-in-dbt/jinja-context/doc 301
-/docs/documentation /docs/building-a-dbt-project/documentation 301
-/docs/documentation-website /docs/building-a-dbt-project/documentation 301
-/docs/dont-nest-your-curlies /docs/building-a-dbt-project/dont-nest-your-curlies 301
-/docs/enable-and-disable-models /reference/resource-configs/enabled 301
-/docs/enterprise-permissions /docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions 301
-/docs/env_var /docs/writing-code-in-dbt/jinja-context/env_var 301
-/docs/exceptions /docs/writing-code-in-dbt/jinja-context/exceptions 301
-/docs/execute /docs/writing-code-in-dbt/jinja-context/execute 301
-/docs/exit-codes /reference/exit-codes 301
-/docs/flags /docs/writing-code-in-dbt/jinja-context/flags 301
-/docs/fromjson /docs/writing-code-in-dbt/jinja-context/fromjson 301
-/docs/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 301
-/docs/global-cli-flags /reference/global-cli-flags 301
-/docs/graph /docs/writing-code-in-dbt/jinja-context/graph 301
-/docs/guides/building-packages /guides/legacy/building-packages 301
-/docs/guides/creating-new-materializations /guides/legacy/creating-new-materializations 301
-/docs/guides/debugging-errors /guides/legacy/debugging-errors 301
-/docs/guides/debugging-schema-names /guides/legacy/debugging-schema-names 301
-/docs/guides/getting-help /guides/legacy/getting-help 301
-/docs/guides/managing-environments /guides/legacy/managing-environments 301
-/docs/guides/navigating-the-docs /guides/legacy/navigating-the-docs 301
-/docs/guides/understanding-state /guides/legacy/understanding-state 301
-/docs/guides/videos /guides/legacy/videos 301
-/docs/guides/writing-custom-generic-tests /guides/legacy/writing-custom-generic-tests 301
-/docs/guides/writing-custom-schema-tests /guides/legacy/writing-custom-generic-tests 301
-/docs/guides/best-practices#choose-your-materializations-wisely /guides/legacy/best-practices#choose-your-materializations-wisely 301
-/docs/guides/best-practices#version-control-your-dbt-project /guides/legacy/best-practices#version-control-your-dbt-project 301
-/docs/best-practices /guides/legacy/best-practices 301
-/docs/guides/best-practices /guides/best-practices 301
-/docs/hooks /docs/building-a-dbt-project/hooks-operations 301
-/docs/init /reference/commands/init 301
-/docs/install-from-source /dbt-cli/installation 301
-/docs/installation /docs/core/installation 301
-/docs/invocation_id /docs/writing-code-in-dbt/jinja-context/invocation_id 301
-/docs/jinja-context /docs/writing-code-in-dbt/jinja-context 301
-/docs/license /docs/about/license 301
-/docs/list /reference/commands/list 301
-/docs/log /docs/writing-code-in-dbt/jinja-context/log 301
-/docs/macos /dbt-cli/installation 301
-/docs/macros /guides/legacy/building-packages 301
-/docs/maintaining-multiple-environments-with-dbt / 301
-/docs/managing-environments /guides/legacy/managing-environments 301
-/docs/materializations /docs/building-a-dbt-project/building-models/materializations 301
-/docs/model-selection-syntax /reference/node-selection/syntax 301
-/docs/modules /docs/writing-code-in-dbt/jinja-context/modules 301
-/docs/on-run-end-context /docs/writing-code-in-dbt/jinja-context/on-run-end-context 301
-/docs/overview /docs/introduction 301
-/docs/performance-optimization / 301
-/docs/package-management /docs/building-a-dbt-project/package-management 301
-/docs/profile-bigquery /reference/warehouse-profiles/bigquery-profile 301
-/docs/profile-mssql /reference/warehouse-profiles/mssql-profile 301
-/docs/profile-postgres /reference/warehouse-profiles/postgres-profile 301
-/docs/profile-presto /reference/warehouse-profiles/presto-profile 301
-/docs/profile-redshift /reference/warehouse-profiles/redshift-profile 301
-/docs/profile-snowflake /reference/warehouse-profiles/snowflake-profile 301
-/docs/profile-spark /reference/warehouse-profiles/spark-profile 301
-/docs/redshift-configs /reference/resource-configs/redshift-configs 301
-/docs/spark-configs /reference/resource-configs/spark-configs 301
-/docs/redshift-v2 /reference/warehouse-profiles/redshift-profile 301
-/docs/ref /docs/writing-code-in-dbt/jinja-context/ref 301
-/docs/requiring-specific-dbt-versions /docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions 301
-/docs/requiring-dbt-versions / 301
-/docs/return /docs/writing-code-in-dbt/jinja-context/return 301
-/docs/rpc /reference/commands/rpc 301
-/docs/run /reference/commands/run 301
-/docs/run-operation /reference/commands/run-operation 301
-/docs/run_query /docs/writing-code-in-dbt/jinja-context/run_query 301
-/docs/run_started_at /docs/writing-code-in-dbt/jinja-context/run_started_at 301
-/docs/running-a-dbt-project/command-line-interface /reference/dbt-commands 301
-/docs/running-a-dbt-project/command-line-interface/clean /reference/commands/clean 301
-/docs/running-a-dbt-project/command-line-interface/cmd-docs /reference/commands/cmd-docs 301
-/docs/running-a-dbt-project/command-line-interface/compile /reference/commands/compile 301
-/docs/running-a-dbt-project/command-line-interface/debug /reference/commands/debug 301
-/docs/running-a-dbt-project/command-line-interface/deps /reference/commands/deps 301
-/docs/running-a-dbt-project/command-line-interface/exit-codes /reference/exit-codes 301
-/docs/running-a-dbt-project/command-line-interface/global-cli-flags /reference/global-cli-flags 301
-/docs/running-a-dbt-project/command-line-interface/init /reference/commands/init 301
-/docs/running-a-dbt-project/command-line-interface/list /reference/commands/list 301
-/docs/running-a-dbt-project/command-line-interface/model-selection-syntax /reference/model-selection-syntax 301
-/docs/running-a-dbt-project/command-line-interface/rpc /reference/commands/rpc 301
-/docs/running-a-dbt-project/command-line-interface/run /reference/commands/run 301
-/docs/running-a-dbt-project/command-line-interface/run-operation /reference/commands/run-operation 301
-/docs/running-a-dbt-project/command-line-interface/seed /reference/commands/seed 301
-/docs/running-a-dbt-project/command-line-interface/snapshot /reference/commands/snapshot 301
-/docs/running-a-dbt-project/command-line-interface/source /reference/commands/source 301
-/docs/running-a-dbt-project/command-line-interface/test /reference/commands/test 301
-/docs/running-a-dbt-project/command-line-interface/version /reference/global-cli-flags#version 301
-/docs/running-a-dbt-project/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 301
-/docs/running-a-dbt-project/using-the-command-line-interface/centos /dbt-cli/installation-guides/centos 301
-/docs/running-a-dbt-project/using-the-command-line-interface/configure-your-profile /dbt-cli/configure-your-profile 301
-/docs/running-a-dbt-project/using-the-command-line-interface/install-from-source /dbt-cli/installation-guides/install-from-source 301
-/docs/running-a-dbt-project/using-the-command-line-interface/installation /dbt-cli/installation 301
-/docs/running-a-dbt-project/using-the-command-line-interface/macos /dbt-cli/installation-guides/macos 301
-/docs/running-a-dbt-project/using-the-command-line-interface/ubuntu-debian /dbt-cli/installation-guides/ubuntu-debian 301
-/docs/running-a-dbt-project/using-the-command-line-interface/windows /dbt-cli/installation-guides/windows 301
-/docs/running-dbt-in-production /docs/running-a-dbt-project/running-dbt-in-production 301
-/docs/schema /docs/writing-code-in-dbt/jinja-context/schema 301
-/docs/schemas /docs/writing-code-in-dbt/jinja-context/schemas 301
-/docs/schemayml-files /reference/declaring-properties 301
-/docs/seed /reference/commands/seed 301
-/docs/seeds /docs/building-a-dbt-project/seeds 301
-/docs/setting-up-enterprise-sso-with-azure-active-directory /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory 301
-/docs/setting-up-snowflake-sso /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth 301
-/docs/setting-up-sso-with-google-gsuite /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite 301
-/docs/setting-up-sso-with-okta /docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta 301
-/docs/snapshot /reference/commands/snapshot 301
-/docs/snapshots /docs/building-a-dbt-project/snapshots 301
-/docs/snowflake-configs /reference/resource-configs/snowflake-configs 301
-/docs/source /reference/commands/source 301
-/docs/statement-blocks /docs/writing-code-in-dbt/jinja-context/statement-blocks 301
-/docs/supported-databases/profile-bigquery /reference/bigquery-profile 301
-/docs/supported-databases/profile-mssql /reference/mssql-profile 301
-/docs/supported-databases/profile-postgres /reference/postgres-profile 301
-/docs/supported-databases/profile-presto /reference/presto-profile 301
-/docs/supported-databases/profile-redshift /reference/redshift-profile 301
-/docs/supported-databases/profile-snowflake /reference/snowflake-profile 301
-/docs/supported-databases/profile-spark /reference/spark-profile 301
-/docs/tags /reference/resource-configs/tags 301
-/docs/target /docs/writing-code-in-dbt/jinja-context/target 301
-/docs/test /reference/commands/test 301
-/docs/testing /docs/building-a-dbt-project/tests 301
-/docs/testing-and-documentation /docs/building-a-dbt-project/tests 301
-/docs/the-dbt-ide /docs/cloud/about-cloud/dbt-cloud-features 301
-/docs/this /docs/writing-code-in-dbt/jinja-context/this 301
-/docs/tojson /docs/writing-code-in-dbt/jinja-context/tojson 301
-/docs/ubuntu-debian /dbt-cli/installation 301
-/docs/use-an-existing-project /docs/building-a-dbt-project/dbt-projects/use-an-existing-project 301
-/docs/using-custom-aliases /docs/building-a-dbt-project/building-models/using-custom-aliases 301
-/docs/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 301
-/docs/using-custom-schemas /docs/building-a-dbt-project/building-models/using-custom-schemas 301
-/docs/using-dbt-cloud /docs/dbt-cloud/using-dbt-cloud 301
-/docs/using-jinja /guides/getting-started/learning-more/using-jinja 301
-/docs/using-operations /docs/building-a-dbt-project/hooks-operations 301
-/docs/using-sources /docs/building-a-dbt-project/using-sources 301
-/docs/using-sql-headers /reference/resource-configs/sql_header 301
-/docs/using-the-command-line-interface /docs/running-a-dbt-project/using-the-cli 301
-/docs/using-the-dbt-ide /docs/running-a-dbt-project/using-the-dbt-ide 301
-/docs/using-variables /docs/building-a-dbt-project/building-models/using-variables 301
-/docs/var /docs/writing-code-in-dbt/jinja-context/var 301
-/docs/version /reference/global-cli-flags#version 301
-/docs/videos /guides/legacy/videos 301
-/docs/warehouse-specific-configurations / 301
-/docs/windows /dbt-cli/installation 301
-/docs/writing-code-in-dbt/api-variable / 301
-/docs/writing-code-in-dbt/class-reference /reference/dbt-classes 301
-/docs/writing-code-in-dbt/extending-dbts-programming-environment/creating-new-materializations /guides/legacy/creating-new-materializations 301
-/docs/writing-code-in-dbt/extending-dbts-programming-environment/custom-schema-tests /guides/legacy/writing-custom-schema-tests 301
-/docs/writing-code-in-dbt/getting-started-with-jinja /docs/building-a-dbt-project/jinja-macros 301
-/docs/writing-code-in-dbt/jinja-context/adapter /reference/dbt-jinja-functions/adapter 301
-/docs/writing-code-in-dbt/jinja-context/as_text /reference/dbt-jinja-functions/as_text 301
-/docs/writing-code-in-dbt/jinja-context/builtins /reference/dbt-jinja-functions/builtins 301
-/docs/writing-code-in-dbt/jinja-context/config /reference/dbt-jinja-functions/config 301
-/docs/writing-code-in-dbt/jinja-context/dbt-project-yml-context /reference/dbt-jinja-functions/dbt-project-yml-context 301
-/docs/writing-code-in-dbt/jinja-context/dbt_version /reference/dbt-jinja-functions/dbt_version 301
-/docs/writing-code-in-dbt/jinja-context/debug-method /reference/dbt-jinja-functions/debug-method 301
-/docs/writing-code-in-dbt/jinja-context/doc /reference/dbt-jinja-functions/doc 301
-/docs/writing-code-in-dbt/jinja-context/env_var /reference/dbt-jinja-functions/env_var 301
-/docs/writing-code-in-dbt/jinja-context/exceptions /reference/dbt-jinja-functions/exceptions 301
-/docs/writing-code-in-dbt/jinja-context/execute /reference/dbt-jinja-functions/execute 301
-/docs/writing-code-in-dbt/jinja-context/flags /reference/dbt-jinja-functions/flags 301
-/docs/writing-code-in-dbt/jinja-context/fromjson /reference/dbt-jinja-functions/fromjson 301
-/docs/writing-code-in-dbt/jinja-context/fromyaml /reference/dbt-jinja-functions/fromyaml 301
-/docs/writing-code-in-dbt/jinja-context/graph /reference/dbt-jinja-functions/graph 301
-/docs/writing-code-in-dbt/jinja-context/invocation_id /reference/dbt-jinja-functions/invocation_id 301
-/docs/writing-code-in-dbt/jinja-context/log /reference/dbt-jinja-functions/log 301
-/docs/writing-code-in-dbt/jinja-context/modules /reference/dbt-jinja-functions/modules 301
-/docs/writing-code-in-dbt/jinja-context/on-run-end-context /reference/dbt-jinja-functions/on-run-end-context 301
-/docs/writing-code-in-dbt/jinja-context/profiles-yml-context /reference/dbt-jinja-functions/profiles-yml-context 301
-/docs/writing-code-in-dbt/jinja-context/project_name /reference/dbt-jinja-functions/project_name 301
-/docs/writing-code-in-dbt/jinja-context/ref /reference/dbt-jinja-functions/ref 301
-/docs/writing-code-in-dbt/jinja-context/return /reference/dbt-jinja-functions/return 301
-/docs/writing-code-in-dbt/jinja-context/run_query /reference/dbt-jinja-functions/run_query 301
-/docs/writing-code-in-dbt/jinja-context/run_started_at /reference/dbt-jinja-functions/run_started_at 301
-/docs/writing-code-in-dbt/jinja-context/schema /reference/dbt-jinja-functions/schema 301
-/docs/writing-code-in-dbt/jinja-context/schemas /reference/dbt-jinja-functions/schemas 301
-/docs/writing-code-in-dbt/jinja-context/source /reference/dbt-jinja-functions/source 301
-/docs/writing-code-in-dbt/jinja-context/statement-blocks /reference/dbt-jinja-functions/statement-blocks 301
-/docs/writing-code-in-dbt/jinja-context/target /reference/dbt-jinja-functions/target 301
-/docs/writing-code-in-dbt/jinja-context/this /reference/dbt-jinja-functions/this 301
-/docs/writing-code-in-dbt/jinja-context/tojson /reference/dbt-jinja-functions/tojson 301
-/docs/writing-code-in-dbt/jinja-context/toyaml /reference/dbt-jinja-functions/toyaml 301
-/docs/writing-code-in-dbt/jinja-context/var /reference/dbt-jinja-functions/var 301
-/docs/writing-code-in-dbt/macros /docs/building-a-dbt-project/jinja-macros 301
-/docs/writing-code-in-dbt/using-jinja /guides/getting-started/learning-more/using-jinja 301
-/faqs/getting-help/ /guides/legacy/getting-help 301
-/migration-guide/upgrading-to-0-17-0 /guides/migration/versions 301
-/migration-guide/upgrading-to-0-18-0 /guides/migration/versions 301
-/reference / 301
-/reference/accounts /dbt-cloud/api 301
-/reference/api /dbt-cloud/api 301
-/reference/bigquery-profile /reference/warehouse-profile/bigquery-profile 301
-/reference/connections /dbt-cloud/api 301
-/reference/data-test-configs /reference/test-configs 301
-/reference/declaring-properties /reference/configs-and-properties 301
-/reference/dbt-artifacts /reference/artifacts/dbt-artifacts 301
-/reference/environments /dbt-cloud/api 301
-/reference/events /reference/events-logging 301
-/reference/jobs /dbt-cloud/api 301
-/reference/model-selection-syntax /reference/node-selection/syntax 301
-/reference/project-configs/on-run-end /reference/project-configs/on-run-start-on-run-end 301
-/reference/project-configs/on-run-start /reference/project-configs/on-run-start-on-run-end 301
-/reference/repositories /dbt-cloud/api 301
-/reference/resource-configs/post-hook /reference/resource-configs/pre-hook-post-hook 301
-/reference/resource-configs/pre-hook /reference/resource-configs/pre-hook-post-hook 301
-/reference/resource-properties/tags /reference/resource-configs/tags 301
-/reference/resource-properties/meta /reference/resource-configs/meta 301
-/reference/runs /dbt-cloud/api 301
-/reference/using-the-dbt-cloud-api /dbt-cloud/api 301
-https://tutorial.getdbt.com/* https://docs.getdbt.com/:splat 301!
-/reference/model-selection-syntax/#test-selection-examples /reference/node-selection/test-selection-examples 301
-/docs/building-a-dbt-project/building-models/using-custom-database /docs/building-a-dbt-project/building-models/using-custom-databases 301
-/dbt-cloud/api /dbt-cloud/api-v2 301
-/dbt-cloud/api-v2-old /dbt-cloud/api-v2-legacy 301
-/dbt-cloud/api-v4 /docs/dbt-cloud-apis/admin-cloud-api
-/reference/project-configs/source-paths /reference/project-configs/model-paths 301
-/reference/project-configs/data-paths /reference/project-configs/seed-paths 301
-/reference/project-configs/modules-paths /reference/project-configs/packages-install-path 301
-/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications /docs/dbt-cloud/using-dbt-cloud/cloud-notifications 301
-/reference/warehouse-profiles/presto-profile /reference/profiles.yml 301
-/setting-up /guides/getting-started/getting-set-up/setting-up-bigquery 301
-/tutorial/setting-up /quickstarts 301
-/tutorial/test-and-document-your-project /guides/getting-started/building-your-first-project/test-and-document-your-project 301
-/tutorial/build-your-first-models /guides/getting-started/building-your-first-project/build-your-first-models 301
-/tutorial/deploy-your-project /guides/getting-started/building-your-first-project/schedule-a-job 301
-/tutorial/using-jinja /guides/getting-started/learning-more/using-jinja 301
-/tutorial/2b-create-a-project-dbt-cli /guides/getting-started/learning-more/getting-started-dbt-core 301
-/tutorial/create-a-project-dbt-cli /guides/getting-started/learning-more/getting-started-dbt-core 301
-/tutorial/2a-create-a-project-dbt-cloud /guides/getting-started 301
-/tutorial/create-a-project-dbt-cloud /guides/getting-started 301
-/tutorial/getting-started /guides/getting-started 301
-/docs/dbt-cloud/cloud-changelog /docs/dbt-cloud/release-notes 301
-/faqs/all /docs/faqs 301!
-/faqs/_ /docs/faqs/:splat 301
-/faqs/dbt-jinja-functions /reference/dbt-jinja-functions 301
-/tutorial/learning-more/_ /guides/getting-started/learning-more/:splat 301
-/tutorial/getting-set-up/\_ /guides/getting-started/getting-set-up/:splat 301
-/tutorial/building-your-first-project/\* /guides/getting-started/building-your-first-project/:splat 301
-/tutorial/refactoring-legacy-sql /guides/migration/tools/refactoring-legacy-sql 301
-/blog/change-data-capture-metrics /blog/change-data-capture 301
-/blog/intelligent-slim-ci /docs/deploy/continuous-integration 301
-/blog/model-timing-tab /blog/how-we-shaved-90-minutes-off-model 301
-/reference/warehouse-setups/resource-configs/materialize-configs/indexes /reference/resource-configs/materialize-configs#indexes 301
-/docs/build/building-models /docs/build/models 301
-/docs/build/bigquery-profile /reference/resource-configs/bigquery-configs 301
-/reference/warehouse-profiles/bigquery-setup /reference/warehouse-setups/bigquery-setup 301
-/date-trunc-sql /blog/date-trunc-sql 301
-/docs/using-hooks / 301
-/blog/how-we-structure-our-dbt-projects /guides/best-practices/how-we-structure/1-guide-overview 301
-
-/data-testing-why-you-need-it-and-how-to-get-started https://www.getdbt.com/blog/data-quality-testing/ 301
-
-# supported data platforms page
-
-/docs/profile /docs/supported-data-platforms 301
-/docs/available-adapters /docs/supported-data-platforms 301
-/docs/supported-databases /docs/supported-data-platforms 301
-
-# migration and legacy guides
-
-/docs/guides/migration-guide/upgrading-to-0-14-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-0-15-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-0-16-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-0-17-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-0-18-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-0-19-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-from-0-10-to-0-11 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-014 /guides/migration/versions 301
-/docs/upgrading-to-014 /guides/migration/versions 301
-/docs/upgrading-to-0-14-1 /guides/migration/versions 301
-/docs/upgrading-to-0-16-0 /guides/migration/versions 301
-/docs/guides/migration-guide/upgrading-to-0-20-0 /guides/migration/versions/upgrading-to-v0.20 301
-/docs/guides/migration-guide/upgrading-to-0-21-0 /guides/migration/versions/upgrading-to-v0.21 301
-/docs/guides/migration-guide/upgrading-to-1-0-0 /guides/migration/versions/upgrading-to-v1.0 301
-/docs/guides/migration-guide/upgrading-to-v1.0 /guides/migration/versions/upgrading-to-v1.0 301
-/docs/guides/getting-help /guides/legacy/getting-help 301
-/docs/guides/migration-guide/_ /guides/migration/versions/:splat 301!
-/docs/guides/_ /guides/legacy/:splat 301!
-
-/guides/best-practices/environment-setup/1-env-guide-overview /guides/orchestration/set-up-ci/overview 301
-/guides/best-practices/environment-setup/2-one-deployment-environment /guides/orchestration/set-up-ci/in-15-minutes 301
-/guides/best-practices/environment-setup/3-many-deployment-environments /guides/orchestration/set-up-ci/multiple-environments 301
-
-# adapter development docs
-
-/docs/contributing/what-are-adapters /guides/advanced/adapter-development/1-what-are-adapters 301
-/docs/contributing/adapter-development/1-what-are-adapters /guides/advanced/adapter-development/1-what-are-adapters 301
-/docs/contributing/prerequisites-for-a-new-adapter /guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter 301
-
-/docs/contributing/adapter-development/2-prerequisites-for-a-new-adapter /guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter 301
-/docs/contributing/building-a-new-adapter /guides/advanced/adapter-development/3-building-a-new-adapter 301
-
-/docs/contributing/adapter-development/3-building-a-new-adapter /guides/advanced/adapter-development/3-building-a-new-adapter 301
-
-/v0.13/docs/building-a-new-adapter /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301
-/docs/building-a-new-adapter /guides/advanced/adapter-development/3-building-a-new-adapter 301
-
-/docs/contributing/testing-a-new-adapter /guides/advanced/adapter-development/4-testing-a-new-adapter 301
-/docs/contributing/adapter-development/4-testing-a-new-adapter /guides/advanced/adapter-development/4-testing-a-new-adapter 301
-
-/docs/contributing/documenting-a-new-adapter /guides/advanced/adapter-development/5-documenting-a-new-adapter 301
-/docs/contributing/adapter-development/5-documenting-a-new-adapter /guides/advanced/adapter-development/5-documenting-a-new-adapter 301
-
-/docs/contributing/promoting-a-new-adapter /guides/advanced/adapter-development/6-promoting-a-new-adapter 301
-/docs/contributing/adapter-development/6-promoting-a-new-adapter /guides/advanced/adapter-development/6-promoting-a-new-adapter 301
-
-/docs/contributing/verifying-a-new-adapter /guides/advanced/adapter-development/7-verifying-a-new-adapter 301
-/docs/contributing/adapter-development/7-verifying-a-new-adapter /guides/advanced/adapter-development/7-verifying-a-new-adapter 301
-
-/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer /docs/use-dbt-semantic-layer/dbt-semantic-layer 301!
-/reference/warehouse-profiles/impala-profile /reference/warehouse-setups/impala-setup 301
-/reference/warehouse-profiles/exasol-profile /reference/warehouse-setups/exasol-setup 301
-/reference/warehouse-profiles/layer-profile /reference/warehouse-setups/layer-setup 301
-/reference/warehouse-profiles/postgres-profile /reference/warehouse-setups/postgres-setup 301
-/reference/warehouse-profiles/greenplum-profile /reference/warehouse-setups/greenplum-setup 301
-/reference/warehouse-profiles/alloydb-profile /reference/warehouse-setups/alloydb-setup 301
-/reference/warehouse-profiles/azuresynapse-profile /reference/warehouse-setups/azuresynapse-setup 301
-/reference/warehouse-profiles/snowflake-profile /reference/warehouse-setups/snowflake-setup 301
-/reference/warehouse-profiles/rockset-profile /reference/warehouse-setups/rockset-setup 301
-/reference/warehouse-profiles/trino-profile /reference/warehouse-setups/trino-setup 301
-/reference/warehouse-profiles/glue-profile /reference/warehouse-setups/glue-setup 301
-/reference/warehouse-profiles/duckdb-profile /reference/warehouse-setups/duckdb-setup 301
-/reference/warehouse-profiles/vertica-profile /reference/warehouse-setups/vertica-setup 301
-/reference/warehouse-profiles/clickhouse-profile /reference/warehouse-setups/clickhouse-setup 301
-/reference/warehouse-profiles/athena-profile /reference/warehouse-setups/athena-setup 301
-/reference/warehouse-profiles/iomete-profile /reference/warehouse-setups/iomete-setup 301
-/reference/warehouse-profiles/mssql-profile /reference/warehouse-setups/mssql-setup 301
-/reference/warehouse-profiles/tidb-profile /reference/warehouse-setups/tidb-setup 301
-/reference/warehouse-profiles/materialize-profile /reference/warehouse-setups/materialize-setup 301
-/reference/warehouse-profiles/redshift-profile /reference/warehouse-setups/redshift-setup 301
-/reference/warehouse-profiles/databricks-profile /reference/warehouse-setups/databricks-setup 301
-/reference/warehouse-profiles/bigquery-profile /reference/warehouse-setups/bigquery-setup 301
-/reference/warehouse-profiles/dremio-profile /reference/warehouse-setups/dremio-setup 301
-/reference/warehouse-profiles/oracle-profile /reference/warehouse-setups/oracle-setup 301
-/reference/warehouse-profiles/teradata-profile /reference/warehouse-setups/teradata-setup 301
-/reference/warehouse-profiles/singlestore-profile /reference/warehouse-setups/singlestore-setup 301
-/reference/warehouse-profiles/sqlite-profile /reference/warehouse-setups/sqlite-setup 301
-/reference/warehouse-profiles/spark-profile /reference/warehouse-setups/spark-setup 301
-/reference/warehouse-profiles/mindsdb-profile /reference/warehouse-setups/mindsdb-setup 301
-/reference/warehouse-profiles/ibmdb2-profile /reference/warehouse-setups/ibmdb2-setup 301
-/reference/warehouse-profiles/firebolt-profile /reference/warehouse-setups/firebolt-setup 301
-/reference/warehouse-profiles/mysql-profile /reference/warehouse-setups/mysql-setup 301
-/reference/warehouse-profiles/hive-profile /reference/warehouse-setups/hive-setup 301
-/reference/using-sources /docs/build/sources 301
-
-# ide ia redirects
-
-/docs/dbt-cloud/cloud-ide/the-dbt-ide /docs/getting-started/dbt-cloud-features 301!
-/docs/dbt-cloud/cloud-ide/handling-merge-conflicts /docs/collaborate/git/resolve-merge-conflicts 301!
-/dbt-cloud/cloud-ide/viewing-docs-in-the-ide /docs/getting-started/develop-in-the-cloud 301!
-/docs/dbt-cloud/cloud-ide/ide-beta /docs/getting-started/develop-in-the-cloud 301!
-/docs/running-a-dbt-project/using-the-dbt-ide /docs/getting-started/develop-in-the-cloud 301!
-/dbt-cloud/cloud-ide/the-ide-git-button /docs/collaborate/git/version-control-basics 301!
-/docs/building-a-dbt-project/setting-up /guides/legacy/building-packages 301!
-/docs/building-a-dbt-project/dbt-jinja-functions /reference/dbt-jinja-functions 301!
-
-# Community docs
-
-/docs/contributing/long-lived-discussions-guidelines /community/resources/forum-guidelines 301
-/docs/guides/legacy/navigating-the-docs.md /community/contribute 301
-/community/writing-on-discourse/ /community/contributing/contributing-online-community 301
-/community/contributing/ /community/contribute 301
-/docs/contributing/contributor-license-agreements /community/resources/contributor-license-agreements 301
-/community/maintaining-a-channel /community/resources/maintaining-a-channel 301
-/docs/contributing/oss-expectations /community/resources/oss-expectations 301
-/docs/slack-rules-of-the-road /community/resources/community-rules-of-the-road 301
-/docs/contributing/slack-rules-of-the-road /community/resources/community-rules-of-the-road 301
-/community/resources/slack-rules-of-the-road /community/resources/community-rules-of-the-road 301
-/blog/getting-started-with-the-dbt-semantic-layer /blog/understanding-the-components-of-the-dbt-semantic-layer 301!
-/docs/getting-started/develop-in-the-cloud#creating-a-development-environment /docs/get-started/develop-in-the-cloud#set-up-and-access-the-cloud-ide 301
-/docs/cloud-developer-ide /docs/build/custom-target-names#dbt-cloud-ide 301
-/website/docs/docs/contributing/building-a-new-adapter.md /guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter 301
-/guides/legacy/getting-help /community/resources/getting-help 301
-
-# Blog docs
-
-/blog/tags/release-notes /docs/dbt-versions/dbt-cloud-release-notes 301
-
-# Faq docs
-
-/faqs/dbt-jinja-functions /reference/dbt-jinja-functions 301
-
-/website/docs/docs/contributing/documenting-a-new-adapter.md /guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter 301
-
-/docs/docs/contributing/documenting-a-new-adapter /docs/contributing/documenting-a-new-adapter 301
-
-
-/v0.8/reference / 301
-/v0.10/reference / 301
-/v0.12/reference / 301
-/v0.13/reference / 301
-/v0.13/docs/requiring-dbt-versions / 301
-/v0.14/docs/cloud-developer-ide / 301
-/v0.15/docs/cloud-import-a-project-by-git-url /docs/cloud/git/import-a-project-by-git-url 301
-
-/v0.15/docs/configure-your-profile /docs/core/connection-profiles 301
-
-# Removing on premise Cloud content
-/docs/dbt-cloud/on-premises/dependencies /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/faqs /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/index /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/installation /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/prerequisites /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/setup /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/system-requirements /docs/deploy/single-tenant 301
-/docs/dbt-cloud/on-premises/upgrading-kots /docs/deploy/single-tenant 301
diff --git a/website/static/img/docs/release-notes/ci-job-tag.png b/website/static/img/docs/release-notes/ci-job-tag.png
new file mode 100644
index 00000000000..02f2cdc895c
Binary files /dev/null and b/website/static/img/docs/release-notes/ci-job-tag.png differ