diff --git a/contributing/single-sourcing-content.md b/contributing/single-sourcing-content.md index 5b87d494c94..7c345a6631a 100644 --- a/contributing/single-sourcing-content.md +++ b/contributing/single-sourcing-content.md @@ -90,7 +90,7 @@ This component can be added directly to a markdown file in a similar way as othe Both properties can be used together to set a range where the content should show. In the example below, this content will only show if the selected version is between **0.21** and **1.0**: ```markdown - + Versioned content here diff --git a/website/api/get-discourse-comments.js b/website/api/get-discourse-comments.js new file mode 100644 index 00000000000..5ac59cfe5f2 --- /dev/null +++ b/website/api/get-discourse-comments.js @@ -0,0 +1,169 @@ +const axios = require('axios') +require("dotenv").config(); + +const { DISCOURSE_DEVBLOG_API_KEY , DISCOURSE_USER_SYSTEM } = process.env +const DEVBLOG_PROD_URL = 'https://docs.getdbt.com/blog/' +const DEV_ENV = 'dev-' +const PREVIEW_ENV = 'deploy-preview-' + +// Set API endpoint and headers +let discourse_endpoint = `https://discourse.getdbt.com` +let headers = { + 'Accept': 'application/json', + 'Api-Key': DISCOURSE_DEVBLOG_API_KEY, + 'Api-Username': DISCOURSE_USER_SYSTEM, +} + +async function getDiscourseComments(request, response) { + let topicId, comments, DISCOURSE_TOPIC_ID; + + const blogUrl = await getBlogUrl(request) + + if (blogUrl === DEVBLOG_PROD_URL) { + DISCOURSE_TOPIC_ID = 21 + } else { + DISCOURSE_TOPIC_ID = 2 + } + + try { + const env = + blogUrl === DEVBLOG_PROD_URL + ? "" + : blogUrl.includes("localhost") + ? DEV_ENV + : PREVIEW_ENV; + const postTitle = `${env}${request.query.title}`; + const postSlug = request.query.slug; + const cleanSlug = cleanUrl(request.query.slug); + const externalId = truncateString(`${env}${cleanSlug}`); + + console.table({ + blogUrl, + postTitle, + postSlug, + cleanSlug, + externalId, + }); + + + if (!postSlug) throw new Error("Unable to query Discourse API. Error reading slug."); + + topicId = await searchDiscourseExternalId(externalId); + + // First check if the dev blog post exists in Discourse + // Get the comments if it does + if (typeof topicId === "number") { + comments = await getDiscourseTopicbyID(topicId); + } else { + // If the dev blog post does not exist in Discourse + // Create a new topic and get the comments + topicId = await createDiscourseTopic(postTitle, externalId, cleanSlug, blogUrl, DISCOURSE_TOPIC_ID); + if (typeof topicId === "number") { + comments = await getDiscourseTopicbyID(topicId); + comments.shift(); + comments = { topicId, comments }; + + return await response.status(200).json(comments); + } else { + console.log("Unable to create Discourse topic TopicID is not a number."); + return await response.status(500).json({ error: "Unable to create Discourse topic TopicID is not a number." }); + } + } + + comments.shift(); + comments = { topicId, comments }; + + return await response.status(200).json(comments); + } catch (err) { + console.log("err on getDiscourseComments", err); + return await response.status(500).json({ error: "Unable to get topics from Discourse." }); + } +} + +async function createDiscourseTopic(title, externalId, slug, blogUrl, DISCOURSE_TOPIC_ID) { + console.log(`Creating a new topic in Discourse - ${title}`) + try { + const response = await axios.post(`${discourse_endpoint}/posts`, { + title: title, + raw: `This is a companion discussion topic for the original entry at ${blogUrl}${slug}`, + category: DISCOURSE_TOPIC_ID, + embed_url: `${blogUrl}${slug}`, + external_id: externalId, + tags: ['devblog'], + visible: false + }, { headers }) + + let topicId = await response.data.topic_id + + console.log('Topic successfully created with topic_id', topicId) + + return topicId + + } catch(err) { + console.log('err on createDiscourseTopic', err) + return err + } +} + +async function getDiscourseTopicbyID(topicId) { + console.log(`Topic found setting topic id - ${topicId}`) + try { + let response = await axios.get(`${discourse_endpoint}/t/${topicId}.json`, { headers }) + let { data } = await response + let post_stream = data.post_stream + let post_count = data.posts_count + + // If there is more than one comment make the topic visibile in Discourse + if (post_count > 1 && data.visible === false) { + console.log(`Topic has more than one comment. Changing visibility to visible.`) + await axios.put(`${discourse_endpoint}/t/${topicId}`, { + visible: true + }, { headers }) + } + + // Filter only 'regular' posts in Discourse. (e.g. not moderator actions, small_actions, whispers) + post_stream.posts = post_stream.posts.filter(post => post.post_type === 1) + + return post_stream.posts + } catch(err) { + console.log('err on getDiscourseTopicbyID', err) + return err + } +} + +async function searchDiscourseExternalId(externalId) { + console.log(`Searching for external_id in Discourse - ${externalId}`); + try { + const data = await axios.get(`${discourse_endpoint}/t/external_id/${externalId}.json`, { headers }); + return data.data.id; + } catch (err) { + if (err.response.status === 404) { + console.log("No topics found in Discourse."); + return null; + } + console.log("Unable to search Discourse for external_id.", err); + return err; + } +} + + +// Truncate external_id to 50 characters per Discourse API requirements +function truncateString(str) { + if (str.length <= 50) { + return str + } + return str.slice(0, 50) +} + +// Remove query params and hash from URL to prevent duplicate topics +function cleanUrl(url) { + return url.split("?")[0].split("#")[0]; +} + +// Create a function to get the host name from the request and add /blog/ to the end +async function getBlogUrl(req) { + const host = req.headers.host + return `https://${host}/blog/` +} + +module.exports = getDiscourseComments; diff --git a/website/api/get-discourse-topics.js b/website/api/get-discourse-topics.js new file mode 100644 index 00000000000..90d6e5af80e --- /dev/null +++ b/website/api/get-discourse-topics.js @@ -0,0 +1,136 @@ +const axios = require('axios') + +async function getDiscourseTopics(request, response) { + const { DISCOURSE_API_KEY , DISCOURSE_USER } = process.env + + const body = request.body + + try { + // Set API endpoint and headers + let discourse_endpoint = `https://discourse.getdbt.com` + let headers = { + 'Accept': 'application/json', + 'Api-Key': DISCOURSE_API_KEY, + 'Api-Username': DISCOURSE_USER, + } + + const query = buildQueryString(body) + if(!query) throw new Error('Unable to build query string.') + + // Get topics from Discourse + let { data: { posts, topics } } = await axios.get(`${discourse_endpoint}/search?q=${query}`, { headers }) + + // Return empty array if no topics found for search query + // 200 status is used to prevent triggering Datadog alerts + if(!topics || topics?.length <= 0) { + // Log message with encoded query and end function + console.log('Unable to get results from api request.') + console.log(`Search query: ${query}`) + return await response.status(200).json([]) + } + + // Set author and like_count for topics if not querying by specific term + let allTopics = topics + if(!body?.term) { + allTopics = topics.reduce((topicsArr, topic) => { + // Get first post in topic + const firstTopicPost = posts?.find(post => + post?.post_number === 1 && + post?.topic_id === topic?.id + ) + // If post found + // Get username + if(firstTopicPost?.username) { + topic.author = firstTopicPost.username + } + // Get like count + if(firstTopicPost?.like_count) { + topic.like_count = firstTopicPost.like_count + } + + if(firstTopicPost?.blurb) { + topic.blurb = firstTopicPost.blurb + } + + // Push updated topic to array + topicsArr.push(topic) + + return topicsArr + }, []) + } + + // Return topics + //return await returnResponse(200, allTopics) + return await response.status(200).json(allTopics) + } catch(err) { + // Log and return the error + console.log('err', err) + return await response.status(500).json({ error: 'Unable to get topics from Discourse.'}) + } +} + +function buildQueryString(body) { + if(!body) return null + + // start with empty query string + let query = '' + + // check param and apply to query if set + for (const [key, value] of Object.entries(body)) { + // validate categories + // if valid, add to query string + if(validateItem({ key, value })) { + if(key === 'category') { + query += `#${value} ` + } else if(key === 'inString') { + query += `in:${value}` + } else if(key === 'status' && Array.isArray(value)) { + value?.map(item => { + query += `${key}:${item} ` + }) + } else { + query += `${key}:${value} ` + } + } + } + + if(query) { + const encodedQuery = encodeURIComponent(query) + return encodedQuery + } +} + +function validateItem({ key, value }) { + // predefined Discourse values + // https://docs.discourse.org/#tag/Search/operation/search + const inStringValues = ['title', 'first', 'pinned', 'wiki'] + const orderValues = ['latest', 'likes', 'views', 'latest_topic'] + const statusValues = ['open', 'closed', 'public', 'archived', 'noreplies', 'single_user', 'solved', 'unsolved'] + + // validate keys + if(key === 'inString') { + return inStringValues.includes(value) + ? true + : false + } else if(key === 'order') { + return orderValues.includes(value) + ? true + : false + } else if(key === 'status') { + if(Array.isArray(value)) { + let isValid = true + value?.map(item => { + if(!statusValues.includes(item)) isValid = false + }) + return isValid + } else { + return statusValues.includes(value) + ? true + : false + } + } else { + return true + } +} + +module.exports = getDiscourseTopics diff --git a/website/dbt-versions.js b/website/dbt-versions.js index a59822101e9..655d4f02b7b 100644 --- a/website/dbt-versions.js +++ b/website/dbt-versions.js @@ -23,10 +23,6 @@ exports.versions = [ version: "1.1", EOLDate: "2023-04-28", }, - { - version: "1.0", - EOLDate: "2022-12-03" - }, ] exports.versionedPages = [ diff --git a/website/docs/docs/build/derived-metrics.md b/website/docs/docs/build/derived-metrics.md index bef7346b353..2ad1c3e368c 100644 --- a/website/docs/docs/build/derived-metrics.md +++ b/website/docs/docs/build/derived-metrics.md @@ -21,7 +21,7 @@ In MetricFlow, derived metrics are metrics created by defining an expression usi | `metrics` | The list of metrics used in the derived metrics. | Required | | `alias` | Optional alias for the metric that you can use in the expr. | Optional | | `filter` | Optional filter to apply to the metric. | Optional | -| `offset_window` | Set the period for the offset window, such as 1 month. This will return the value of the metric one month from the metric time. This can't be used with `offset_to_grain`. | Required | +| `offset_window` | Set the period for the offset window, such as 1 month. This will return the value of the metric one month from the metric time. | Required | The following displays the complete specification for derived metrics, along with an example. diff --git a/website/docs/docs/build/incremental-models.md b/website/docs/docs/build/incremental-models.md index 89115652a9c..d3c3f25890b 100644 --- a/website/docs/docs/build/incremental-models.md +++ b/website/docs/docs/build/incremental-models.md @@ -79,12 +79,6 @@ A `unique_key` enables updating existing rows instead of just appending new rows Not specifying a `unique_key` will result in append-only behavior, which means dbt inserts all rows returned by the model's SQL into the preexisting target table without regard for whether the rows represent duplicates. - - -The optional `unique_key` parameter specifies a field that can uniquely identify each row within your model. You can define `unique_key` in a configuration block at the top of your model. If your model doesn't contain a single field that is unique, but rather a combination of columns, we recommend that you create a single column that can serve as a unique identifier (by concatenating and hashing those columns), and pass it into your model's configuration. - - - The optional `unique_key` parameter specifies a field (or combination of fields) that define the grain of your model. That is, the field(s) identify a single unique row. You can define `unique_key` in a configuration block at the top of your model, and it can be a single column name or a list of column names. diff --git a/website/docs/docs/build/packages.md b/website/docs/docs/build/packages.md index d4cebc7a6f0..97e8784416e 100644 --- a/website/docs/docs/build/packages.md +++ b/website/docs/docs/build/packages.md @@ -284,18 +284,35 @@ packages: ### Local packages -Packages that you have stored locally can be installed by specifying the path to the project, like so: +A "local" package is a dbt project accessible from your local file system. You can install it by specifying the project's path. It works best when you nest the project within a subdirectory relative to your current project's directory. ```yaml packages: - - local: /opt/dbt/redshift # use a local path + - local: relative/path/to/subdirectory ``` -Local packages should only be used for specific situations, for example, when testing local changes to a package. +Other patterns may work in some cases, but not always. For example, if you install this project as a package elsewhere, or try running it on a different system, the relative and absolute paths will yield the same results. + + + +```yaml +packages: + # not recommended - support for these patterns vary + - local: /../../redshift # relative path to a parent directory + - local: /opt/dbt/redshift # absolute path on the system +``` + + + +There are a few specific use cases where we recommend using a "local" package: +1. **Monorepo** — When you have multiple projects, each nested in a subdirectory, within a monorepo. "Local" packages allow you to combine projects for coordinated development and deployment. +2. **Testing changes** — To test changes in one project or package within the context of a downstream project or package that uses it. By temporarily switching the installation to a "local" package, you can make changes to the former and immediately test them in the latter for quicker iteration. This is similar to [editable installs](https://pip.pypa.io/en/stable/topics/local-project-installs/) in Python. +3. **Nested project** — When you have a nested project that defines fixtures and tests for a project of utility macros, like [the integration tests within the `dbt-utils` package](https://github.com/dbt-labs/dbt-utils/tree/main/integration_tests). + ## What packages are available? Check out [dbt Hub](https://hub.getdbt.com) to see the library of published dbt packages! diff --git a/website/docs/docs/build/python-models.md b/website/docs/docs/build/python-models.md index 12825648501..bff65362d06 100644 --- a/website/docs/docs/build/python-models.md +++ b/website/docs/docs/build/python-models.md @@ -16,11 +16,15 @@ We encourage you to: dbt Python (`dbt-py`) models can help you solve use cases that can't be solved with SQL. You can perform analyses using tools available in the open-source Python ecosystem, including state-of-the-art packages for data science and statistics. Before, you would have needed separate infrastructure and orchestration to run Python transformations in production. Python transformations defined in dbt are models in your project with all the same capabilities around testing, documentation, and lineage. + Python models are supported in dbt Core 1.3 and higher. Learn more about [upgrading your version in dbt Cloud](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions) and [upgrading dbt Core versions](https://docs.getdbt.com/docs/core-versions#upgrading-to-new-patch-versions). To read more about Python models, change the [docs version to 1.3](/docs/build/python-models?version=1.3) (or higher) in the menu bar. + + + @@ -711,3 +715,5 @@ You can also install packages at cluster creation time by [defining cluster prop + + \ No newline at end of file diff --git a/website/docs/docs/building-a-dbt-project/building-models/python-models.md b/website/docs/docs/building-a-dbt-project/building-models/python-models.md deleted file mode 100644 index 1aab8ac7a92..00000000000 --- a/website/docs/docs/building-a-dbt-project/building-models/python-models.md +++ /dev/null @@ -1,719 +0,0 @@ ---- -title: "Python models" ---- - -:::info Brand new! - -dbt Core v1.3 included first-ever support for Python models. Note that only [specific data platforms](#specific-data-platforms) support dbt-py models. - -We encourage you to: -- Read [the original discussion](https://github.com/dbt-labs/dbt-core/discussions/5261) that proposed this feature. -- Contribute to [best practices for developing Python models in dbt](https://discourse.getdbt.com/t/dbt-python-model-dbt-py-best-practices/5204 ). -- Weigh in on [next steps for Python models, beyond v1.3](https://github.com/dbt-labs/dbt-core/discussions/5742). -- Join the **#dbt-core-python-models** channel in the [dbt Community Slack](https://www.getdbt.com/community/join-the-community/). - -Below, you'll see sections entitled "❓ **Our questions**." We are excited to have released a first narrow set of functionality in v1.3, which will solve real use cases. We also know this is a first step into a much wider field of possibility. We don't pretend to have all the answers. We're excited to keep developing our opinionated recommendations and next steps for product development—and we want your help. Comment in the GitHub discussions; leave thoughts in Slack; bring up dbt + Python in casual conversation with colleagues and friends. -::: - -## About Python models in dbt - -dbt Python ("dbt-py") models will help you solve use cases that can't be solved with SQL. You can perform analyses using tools available in the open source Python ecosystem, including state-of-the-art packages for data science and statistics. Before, you would have needed separate infrastructure and orchestration to run Python transformations in production. By defining your Python transformations in dbt, they're just models in your project, with all the same capabilities around testing, documentation, and lineage. - - - -Python models are supported in dbt Core 1.3 and above. Learn more about [upgrading your version in dbt Cloud](https://docs.getdbt.com/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions) and [upgrading dbt Core versions](https://docs.getdbt.com/docs/core-versions#upgrading-to-new-patch-versions). - -To read more about Python models, change the docs version to 1.3 or higher in the menu above. - - - - - - - - -```python -import ... - -def model(dbt, session): - - my_sql_model_df = dbt.ref("my_sql_model") - - final_df = ... # stuff you can't write in SQL! - - return final_df -``` - - - - - -```yml -version: 2 - -models: - - name: my_python_model - - # Document within the same codebase - description: My transformation written in Python - - # Configure in ways that feel intuitive and familiar - config: - materialized: table - tags: ['python'] - - # Test the results of my Python transformation - columns: - - name: id - # Standard validation for 'grain' of Python results - tests: - - unique - - not_null - tests: - # Write your own validation logic (in SQL) for Python results - - [custom_generic_test](writing-custom-generic-tests) -``` - - - - - - -The prerequisites for dbt Python models include using an adapter for a data platform that supports a fully featured Python runtime. In a dbt Python model, all Python code is executed remotely on the platform. None of it is run by dbt locally. We believe in clearly separating _model definition_ from _model execution_. In this and many other ways, you'll find that dbt's approach to Python models mirrors its longstanding approach to modeling data in SQL. - -We've written this guide assuming that you have some familiarity with dbt. If you've never before written a dbt model, we encourage you to start by first reading [dbt Models](/docs/build/models). Throughout, we'll be drawing connections between Python models and SQL models, as well as making clear their differences. - -### What is a Python model? - -A dbt Python model is a function that reads in dbt sources or other models, applies a series of transformations, and returns a transformed dataset. DataFrame operations define the starting points, the end state, and each step along the way. - -This is similar to the role of CTEs in dbt SQL models. We use CTEs to pull in upstream datasets, define (and name) a series of meaningful transformations, and end with a final `select` statement. You can run the compiled version of a dbt SQL model to see the data included in the resulting view or table. When you `dbt run`, dbt wraps that query in `create view`, `create table`, or more complex DDL to save its results in the database. - -Instead of a final `select` statement, each Python model returns a final DataFrame. Each DataFrame operation is "lazily evaluated." In development, you can preview its data, using methods like `.show()` or `.head()`. When you run a Python model, the full result of the final DataFrame will be saved as a table in your data warehouse. - -dbt Python models have access to almost all of the same configuration options as SQL models. You can test them, document them, add `tags` and `meta` properties to them, grant access to their results to other users, and so on. You can select them by their name, their file path, their configurations, whether they are upstream or downstream of another model, or whether they have been modified compared to a previous project state. - -### Defining a Python model - -Each Python model lives in a `.py` file in your `models/` folder. It defines a function named **`model()`**, which takes two parameters: -- **`dbt`**: A class compiled by dbt Core, unique to each model, enables you to run your Python code in the context of your dbt project and DAG. -- **`session`**: A class representing your data platform’s connection to the Python backend. The session is needed to read in tables as DataFrames, and to write DataFrames back to tables. In PySpark, by convention, the `SparkSession` is named `spark`, and available globally. For consistency across platforms, we always pass it into the `model` function as an explicit argument called `session`. - -The `model()` function must return a single DataFrame. On Snowpark (Snowflake), this can be a Snowpark or pandas DataFrame. Via PySpark (Databricks + BigQuery), this can be a Spark, pandas, or pandas-on-Spark DataFrame. For more about choosing between pandas and native DataFrames, see [DataFrame API + syntax](#dataframe-api--syntax). - -When you `dbt run --select python_model`, dbt will prepare and pass in both arguments (`dbt` and `session`). All you have to do is define the function. This is how every single Python model should look: - - - -```python -def model(dbt, session): - - ... - - return final_df -``` - - - - -### Referencing other models - -Python models participate fully in dbt's directed acyclic graph (DAG) of transformations. Use the `dbt.ref()` method within a Python model to read in data from other models (SQL or Python). If you want to read directly from a raw source table, use `dbt.source()`. These methods return DataFrames pointing to the upstream source, model, seed, or snapshot. - - - -```python -def model(dbt, session): - - # DataFrame representing an upstream model - upstream_model = dbt.ref("upstream_model_name") - - # DataFrame representing an upstream source - upstream_source = dbt.source("upstream_source_name", "table_name") - - ... -``` - - - -Of course, you can `ref()` your Python model in downstream SQL models, too: - - - -```sql -with upstream_python_model as ( - - select * from {{ ref('my_python_model') }} - -), - -... -``` - - - -### Configuring Python models - -Just like SQL models, there are three ways to configure Python models: -1. In `dbt_project.yml`, where you can configure many models at once -2. In a dedicated `.yml` file, within the `models/` directory -3. Within the model's `.py` file, using the `dbt.config()` method - -Calling the `dbt.config()` method will set configurations for your model right within your `.py` file, similar to the `{{ config() }}` macro in `.sql` model files: - - - -```python -def model(dbt, session): - - # setting configuration - dbt.config(materialized="table") -``` - - - -There's a limit to how fancy you can get with the `dbt.config()` method. It accepts _only_ literal values (strings, booleans, and numeric types). Passing another function or a more complex data structure is not possible. The reason is that dbt statically analyzes the arguments to `config()` while parsing your model without executing your Python code. If you need to set a more complex configuration, we recommend you define it using the [`config` property](resource-properties/config) in a YAML file. - -#### Accessing project context - -dbt Python models don't use Jinja to render compiled code. Python models have limited access to global project contexts compared to SQL models. That context is made available from the `dbt` class, passed in as an argument to the `model()` function. - -Out of the box, the `dbt` class supports: -- Returning DataFrames referencing the locations of other resources: `dbt.ref()` + `dbt.source()` -- Accessing the database location of the current model: `dbt.this()` (also: `dbt.this.database`, `.schema`, `.identifier`) -- Determining if the current model's run is incremental: `dbt.is_incremental` - -It is possible to extend this context by "getting" them via `dbt.config.get()` after they are configured in the [model's config](/reference/model-configs). This includes inputs such as `var`, `env_var`, and `target`. If you want to use those values to power conditional logic in your model, we require setting them through a dedicated `.yml` file config: - - - -```yml -version: 2 - -models: - - name: my_python_model - config: - materialized: table - target_name: "{{ target.name }}" - specific_var: "{{ var('SPECIFIC_VAR') }}" - specific_env_var: "{{ env_var('SPECIFIC_ENV_VAR') }}" -``` - - - -Then, within the model's Python code, use the `dbt.config.get()` function to _access_ values of configurations that have been set: - - - -```python -def model(dbt, session): - target_name = dbt.config.get("target_name") - specific_var = dbt.config.get("specific_var") - specific_env_var = dbt.config.get("specific_env_var") - - orders_df = dbt.ref("fct_orders") - - # limit data in dev - if target_name == "dev": - orders_df = orders_df.limit(500) -``` - - - -### Materializations - -Python models support two materializations: -- `table` -- `incremental` - -Incremental Python models support all the same [incremental strategies](/docs/build/incremental-models#about-incremental_strategy) as their SQL counterparts. The specific strategies supported depend on your adapter. - -Python models can't be materialized as `view` or `ephemeral`. Python isn't supported for non-model resource types (like tests and snapshots). - -For incremental models, like SQL models, you will need to filter incoming tables to only new rows of data: - - - -
- - - -```python -import snowflake.snowpark.functions as F - -def model(dbt, session): - dbt.config( - materialized = "incremental", - unique_key = "id", - ) - df = dbt.ref("upstream_table") - - if dbt.is_incremental: - - # only new rows compared to max in current table - max_from_this = f"select max(updated_at) from {dbt.this}" - df = df.filter(df.updated_at > session.sql(max_from_this).collect()[0][0]) - - # or only rows from the past 3 days - df = df.filter(df.updated_at >= F.dateadd("day", F.lit(-3), F.current_timestamp())) - - ... - - return df -``` - - - -
- -
- - - -```python -import pyspark.sql.functions as F - -def model(dbt, session): - dbt.config( - materialized = "incremental", - unique_key = "id", - ) - df = dbt.ref("upstream_table") - - if dbt.is_incremental: - - # only new rows compared to max in current table - max_from_this = f"select max(updated_at) from {dbt.this}" - df = df.filter(df.updated_at > session.sql(max_from_this).collect()[0][0]) - - # or only rows from the past 3 days - df = df.filter(df.updated_at >= F.date_add(F.current_timestamp(), F.lit(-3))) - - ... - - return df -``` - - - -
- -
- -**Note:** Incremental models are supported on BigQuery/Dataproc for the `merge` incremental strategy. The `insert_overwrite` strategy is not yet supported. - -## Python-specific functionality - -### Defining functions - -In addition to defining a `model` function, the Python model can import other functions or define its own. Here's an example, on Snowpark, defining a custom `add_one` function: - - - -```python -def add_one(x): - return x + 1 - -def model(dbt, session): - dbt.config(materialized="table") - temps_df = dbt.ref("temperatures") - - # warm things up just a little - df = temps_df.withColumn("degree_plus_one", add_one(temps_df["degree"])) - return df -``` - - - -At present, Python functions defined in one dbt model can't be imported and reused in other models. See the ["Code reuse"](#code-reuse) section for the potential patterns we're considering. - -### Using PyPI packages - -You can also define functions that depend on third-party packages, so long as those packages are installed and available to the Python runtime on your data platform. See notes on "Installing Packages" for [specific data warehouses](#specific-data-warehouses). - -In this example, we use the `holidays` package to determine if a given date is a holiday in France. For simplicity and consistency across platforms, the code below uses the pandas API. The exact syntax, and the need to refactor for multi-node processing, still varies. - - - -
- - - -```python -import holidays - -def is_holiday(date_col): - # Chez Jaffle - french_holidays = holidays.France() - is_holiday = (date_col in french_holidays) - return is_holiday - -def model(dbt, session): - dbt.config( - materialized = "table", - packages = ["holidays"] - ) - - orders_df = dbt.ref("stg_orders") - - df = orders_df.to_pandas() - - # apply our function - # (columns need to be in uppercase on Snowpark) - df["IS_HOLIDAY"] = df["ORDER_DATE"].apply(is_holiday) - - # return final dataset (Pandas DataFrame) - return df -``` - - - -
- -
- - - -```python -import holidays - -def is_holiday(date_col): - # Chez Jaffle - french_holidays = holidays.France() - is_holiday = (date_col in french_holidays) - return is_holiday - -def model(dbt, session): - dbt.config( - materialized = "table", - packages = ["holidays"] - ) - - orders_df = dbt.ref("stg_orders") - - df = orders_df.to_pandas_on_spark() # Spark 3.2+ - # df = orders_df.toPandas() in earlier versions - - # apply our function - df["is_holiday"] = df["order_date"].apply(is_holiday) - - # convert back to PySpark - df = df.to_spark() # Spark 3.2+ - # df = session.createDataFrame(df) in earlier versions - - # return final dataset (PySpark DataFrame) - return df -``` - - - -
- -
- -#### Configuring packages - -We encourage you to explicitly configure required packages and versions so dbt can track them in project metadata. This configuration is required for the implementation on some platforms. If you need specific versions of packages, specify them. - - - -```python -def model(dbt, session): - dbt.config( - packages = ["numpy==1.23.1", "scikit-learn"] - ) -``` - - - - - -```yml -version: 2 - -models: - - name: my_python_model - config: - packages: - - "numpy==1.23.1" - - scikit-learn -``` - - - -#### UDFs - -You can use the `@udf` decorator or `udf` function to define an "anonymous" function and call it within your `model` function's DataFrame transformation. This is a typical pattern for applying more complex functions as DataFrame operations, especially if those functions require inputs from third-party packages. -- [Snowpark Python: Creating UDFs](https://docs.snowflake.com/en/developer-guide/snowpark/python/creating-udfs.html) -- [PySpark functions: udf](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.udf.html) - - - -
- - - -```python -import snowflake.snowpark.types as T -import snowflake.snowpark.functions as F -import numpy - -def register_udf_add_random(): - add_random = F.udf( - # use 'lambda' syntax, for simple functional behavior - lambda x: x + numpy.random.normal(), - return_type=T.FloatType(), - input_types=[T.FloatType()] - ) - return add_random - -def model(dbt, session): - - dbt.config( - materialized = "table", - packages = ["numpy"] - ) - - temps_df = dbt.ref("temperatures") - - add_random = register_udf_add_random() - - # warm things up, who knows by how much - df = temps_df.withColumn("degree_plus_random", add_random("degree")) - return df -``` - - - -**Note:** Due to a Snowpark limitation, it is not currently possible to register complex named UDFs within stored procedures, and therefore dbt Python models. We are looking to add native support for Python UDFs as a project/DAG resource type in a future release. For the time being, if you want to create a "vectorized" Python UDF via the Batch API, we recommend either: -- Writing [`create function`](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-batch.html) inside a SQL macro, to run as a hook or run-operation -- [Registering from a staged file](https://docs.snowflake.com/ko/developer-guide/snowpark/reference/python/_autosummary/snowflake.snowpark.udf.html#snowflake.snowpark.udf.UDFRegistration.register_from_file) within your Python model code - -
- -
- - - -```python -from pyspark.sql.types as T -import pyspark.sql.functions as F -import numpy - -# use a 'decorator' for more readable code -@F.udf(returnType=T.DoubleType()) -def add_random(x): - random_number = numpy.random.normal() - return x + random_number - -def model(dbt, session): - dbt.config( - materialized = "table", - packages = ["numpy"] - ) - - temps_df = dbt.ref("temperatures") - - # warm things up, who knows by how much - df = temps_df.withColumn("degree_plus_random", add_random("degree")) - return df -``` - - - -
- -
- -#### Code reuse - -Currently, you cannot import or reuse Python functions defined in one dbt model, in other models. This is something we'd like dbt to support. There are two patterns we're considering: -1. Creating and registering **"named" UDFs**. This process is different across data platforms and has some performance limitations. (Snowpark does support ["vectorized" UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-batch.html): pandas-like functions that you can execute in parallel.) -2. Using **private Python packages**. In addition to importing reusable functions from public PyPI packages, many data platforms support uploading custom Python assets and registering them as packages. The upload process looks different across platforms, but your code’s actual `import` looks the same. - -:::note ❓ Our questions - -- Should dbt have a role in abstracting over UDFs? Should dbt support a new type of DAG node, `function`? Would the primary use case be code reuse across Python models or defining Python-language functions that can be called from SQL models? -- How can dbt help users when uploading or initializing private Python assets? Is this a new form of `dbt deps`? -- How can dbt support users who want to test custom functions? If defined as UDFs: "unit testing" in the database? If "pure" functions in packages: encourage adoption of `pytest`? - -💬 Discussion: ["Python models: package, artifact/object storage, and UDF management in dbt"](https://github.com/dbt-labs/dbt-core/discussions/5741) -::: - -### DataFrame API and syntax - -Over the past decade, most people writing data transformations in Python have adopted DataFrame as their common abstraction. dbt follows this convention by returning `ref()` and `source()` as DataFrames, and it expects all Python models to return a DataFrame. - -A DataFrame is a two-dimensional data structure (rows and columns). It supports convenient methods for transforming that data, creating new columns from calculations performed on existing columns. It also offers convenient ways for previewing data while developing locally or in a notebook. - -That's about where the agreement ends. There are numerous frameworks with their own syntaxes and APIs for DataFrames. The [pandas](https://pandas.pydata.org/docs/) library offered one of the original DataFrame APIs, and its syntax is the most common to learn for new data professionals. Most newer DataFrame APIs are compatible with pandas-style syntax, though few can offer perfect interoperability. This is true for Snowpark and PySpark, which have their own DataFrame APIs. - -When developing a Python model, you will find yourself asking these questions: - -**Why pandas?** It's the most common API for DataFrames. It makes it easy to explore sampled data and develop transformations locally. You can “promote” your code as-is into dbt models and run it in production for small datasets. - -**Why _not_ pandas?** Performance. pandas runs "single-node" transformations, which cannot benefit from the parallelism and distributed computing offered by modern data warehouses. This quickly becomes a problem as you operate on larger datasets. Some data platforms support optimizations for code written using pandas' DataFrame API, preventing the need for major refactors. For example, ["pandas on PySpark"](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart_ps.html) offers support for 95% of pandas functionality, using the same API while still leveraging parallel processing. - -:::note ❓ Our questions -- When developing a new dbt Python model, should we recommend pandas-style syntax for rapid iteration and then refactor? -- Which open source libraries provide compelling abstractions across different data engines and vendor-specific APIs? -- Should dbt attempt to play a longer-term role in standardizing across them? - -💬 Discussion: ["Python models: the pandas problem (and a possible solution)"](https://github.com/dbt-labs/dbt-core/discussions/5738) -::: - -### Limitations - -Python models have capabilities that SQL models do not. They also have some drawbacks compared to SQL models: - -- **Time and cost.** Python models are slower to run than SQL models, and the cloud resources that run them can be more expensive. Running Python requires more general-purpose compute. That compute might sometimes live on a separate service or architecture from your SQL models. **However:** We believe that deploying Python models via dbt—with unified lineage, testing, and documentation—is, from a human standpoint, **dramatically** faster and cheaper. By comparison, spinning up separate infrastructure to orchestrate Python transformations in production and different tooling to integrate with dbt is much more time-consuming and expensive. -- **Syntax differences** are even more pronounced. Over the years, dbt has done a lot, via dispatch patterns and packages such as `dbt_utils`, to abstract over differences in SQL dialects across popular data warehouses. Python offers a **much** wider field of play. If there are five ways to do something in SQL, there are 500 ways to write it in Python, all with varying performance and adherence to standards. Those options can be overwhelming. As the maintainers of dbt, we will be learning from state-of-the-art projects tackling this problem and sharing guidance as we develop it. -- **These capabilities are very new.** As data warehouses develop new features, we expect them to offer cheaper, faster, and more intuitive mechanisms for deploying Python transformations. **We reserve the right to change the underlying implementation for executing Python models in future releases.** Our commitment to you is around the code in your model `.py` files, following the documented capabilities and guidance we're providing here. - -As a general rule, if there's a transformation you could write equally well in SQL or Python, we believe that well-written SQL is preferable: it's more accessible to a greater number of colleagues, and it's easier to write code that's performant at scale. If there's a transformation you _can't_ write in SQL, or where ten lines of elegant and well-annotated Python could save you 1000 lines of hard-to-read Jinja-SQL, Python is the way to go. - -## Specific data platforms - -In their initial launch, Python models are supported on three of the most popular data platforms: Snowflake, Databricks, and BigQuery/GCP (via Dataproc). Both Databricks and GCP's Dataproc use PySpark as the processing framework. Snowflake uses its own framework, Snowpark, which has many similarities to PySpark. - - - -
- -**Additional setup:** You will need to [acknowledge and accept Snowflake Third Party Terms](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-packages.html#getting-started) to use Anaconda packages. - -**Installing packages:** Snowpark supports several popular packages via Anaconda. The complete list is at https://repo.anaconda.com/pkgs/snowflake/. Packages are installed at the time your model is being run. Different models can have different package dependencies. If you are using third-party packages, Snowflake recommends using a dedicated virtual warehouse for best performance rather than one with many concurrent users. - -**About "sprocs":** dbt submits Python models to run as "stored procedures," which some people call "sprocs" for short. By default, dbt will create a named sproc containing your model's compiled Python code, and then "call" it to execute. Snowpark has a Private Preview feature for "temporary" or "anonymous" stored procedures ([docs](https://docs.snowflake.com/en/LIMITEDACCESS/call-with.html)), which are faster and leave a cleaner query history. If this feature is enabled for your account, you can switch it on for your models by configuring `use_anonymous_sproc: True`. We plan to switch this on for all dbt + Snowpark Python models in a future release. - - - -```yml -# I asked Snowflake Support to enable this Private Preview feature, -# and now my dbt-py models run even faster! -models: - use_anonymous_sproc: True -``` - - - -**Docs:** ["Developer Guide: Snowpark Python"](https://docs.snowflake.com/en/developer-guide/snowpark/python/index.html) - -
- -
- -**Submission methods:** Databricks supports a few different mechanisms to submit PySpark code, each with relative advantages. Some are better for supporting iterative development, while others are better for supporting lower-cost production deployments. The options are: -- `all_purpose_cluster` (default): dbt will run your Python model using the cluster ID configured as `cluster` in your connection profile or for this specific model. These clusters are more expensive but also much more responsive. We recommend using an interactive all-purpose cluster for quicker iteration in development. - - `create_notebook: True`: dbt will upload your model's compiled PySpark code to a notebook in the namespace `/Shared/dbt_python_model/{schema}`, where `{schema}` is the configured schema for the model, and execute that notebook to run using the all-purpose cluster. The appeal of this approach is that you can easily open the notebook in the Databricks UI for debugging or fine-tuning right after running your model. Remember to copy any changes into your dbt `.py` model code before re-running. - - `create_notebook: False` (default): dbt will use the [Command API](https://docs.databricks.com/dev-tools/api/1.2/index.html#run-a-command), which is slightly faster. -- `job_cluster`: dbt will upload your model's compiled PySpark code to a notebook in the namespace `/Shared/dbt_python_model/{schema}`, where `{schema}` is the configured schema for the model, and execute that notebook to run using a short-lived jobs cluster. For each Python model, Databricks will need to spin up the cluster, execute the model's PySpark transformation, and then spin down the cluster. As such, job clusters take longer before and after model execution, but they're also less expensive, so we recommend these for longer-running Python models in production. To use the `job_cluster` submission method, your model must be configured with `job_cluster_config`, which defines key-value properties for `new_cluster`, as defined in the [JobRunsSubmit API](https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunsSubmit). - -You can configure each model's `submission_method` in all the standard ways you supply configuration: - -```python -def model(dbt, session): - dbt.config( - submission_method="all_purpose_cluster", - create_notebook=True, - cluster_id="abcd-1234-wxyz" - ) - ... -``` -```yml -version: 2 -models: - - name: my_python_model - config: - submission_method: job_cluster - job_cluster_config: - spark_version: ... - node_type_id: ... -``` -```yml -# dbt_project.yml -models: - project_name: - subfolder: - # set defaults for all .py models defined in this subfolder - +submission_method: all_purpose_cluster - +create_notebook: False - +cluster_id: abcd-1234-wxyz -``` - -If not configured, `dbt-spark` will use the built-in defaults: the all-purpose cluster (based on `cluster` in your connection profile) without creating a notebook. The `dbt-databricks` adapter will default to the cluster configured in `http_path`. We encourage explicitly configuring the clusters for Python models in Databricks projects. - -**Installing packages:** When using all-purpose clusters, we recommend installing packages which you will be using to run your Python models. - -**Docs:** -- [PySpark DataFrame syntax](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.html) -- [Databricks: Introduction to DataFrames - Python](https://docs.databricks.com/spark/latest/dataframes-datasets/introduction-to-dataframes-python.html) - -
- -
- -The `dbt-bigquery` adapter uses a service called Dataproc to submit your Python models as PySpark jobs. That Python/PySpark code will read from your tables and views in BigQuery, perform all computation in Dataproc, and write the final result back to BigQuery. - -**Submission methods.** Dataproc supports two submission methods: `serverless` and `cluster`. Dataproc Serverless does not require a ready cluster, which saves on hassle and cost—but it is slower to start up, and much more limited in terms of available configuration. For example, Dataproc Serverless supports only a small set of Python packages, though it does include `pandas`, `numpy`, and `scikit-learn`. (See the full list [here](https://cloud.google.com/dataproc-serverless/docs/guides/custom-containers#example_custom_container_image_build), under "The following packages are installed in the default image"). Whereas, by creating a Dataproc Cluster in advance, you can fine-tune the cluster's configuration, install any PyPI packages you want, and benefit from faster, more responsive runtimes. - -Use the `cluster` submission method with dedicated Dataproc clusters you or your organization manage. Use the `serverless` submission method to avoid managing a Spark cluster. The latter may be quicker for getting started, but both are valid for production. - -**Additional setup:** -- Create or use an existing [Cloud Storage bucket](https://cloud.google.com/storage/docs/creating-buckets) -- Enable Dataproc APIs for your project + region -- If using the `cluster` submission method: Create or use an existing [Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) with the [Spark BigQuery connector initialization action](https://github.com/GoogleCloudDataproc/initialization-actions/tree/master/connectors#bigquery-connectors). (Google recommends copying the action into your own Cloud Storage bucket, rather than using the example version shown in the screenshot below.) - - - -The following configurations are needed to run Python models on Dataproc. You can add these to your [BigQuery profile](/reference/warehouse-setups/bigquery-setup#running-python-models-on-dataproc), or configure them on specific Python models: -- `gcs_bucket`: Storage bucket to which dbt will upload your model's compiled PySpark code. -- `dataproc_region`: GCP region in which you have enabled Dataproc (for example `us-central1`) -- `dataproc_cluster_name`: Name of Dataproc cluster to use for running Python model (executing PySpark job). Only required if `submission_method: cluster`. - -```python -def model(dbt, session): - dbt.config( - submission_method="cluster", - dataproc_cluster_name="my-favorite-cluster" - ) - ... -``` -```yml -version: 2 -models: - - name: my_python_model - config: - submission_method: serverless -``` - -Any user or service account that runs dbt Python models will need the following permissions, in addition to permissions needed for BigQuery ([docs](https://cloud.google.com/dataproc/docs/concepts/iam/iam)): -``` -dataproc.clusters.use -dataproc.jobs.create -dataproc.jobs.get -dataproc.operations.get -storage.buckets.get -storage.objects.create -storage.objects.delete -``` - -**Installing packages:** If you are using a Dataproc Cluster (as opposed to Dataproc Serverless), you can add third-party packages while creating the cluster. - -Google recommends installing Python packages on Dataproc clusters via initialization actions: -- [How initialization actions are used](https://github.com/GoogleCloudDataproc/initialization-actions/blob/master/README.md#how-initialization-actions-are-used) -- [Actions for installing via `pip` or `conda`](https://github.com/GoogleCloudDataproc/initialization-actions/tree/master/python) - -You can also install packages at cluster creation time by [defining cluster properties](https://cloud.google.com/dataproc/docs/tutorials/python-configuration#image_version_20): `dataproc:pip.packages` or `dataproc:conda.packages`. - - - -**Docs:** -- [Dataproc overview](https://cloud.google.com/dataproc/docs/concepts/overview) -- [PySpark DataFrame syntax](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.DataFrame.html) - -
- -
- -
diff --git a/website/docs/docs/cloud/manage-access/audit-log.md b/website/docs/docs/cloud/manage-access/audit-log.md index 818ec553e7b..98bf660b259 100644 --- a/website/docs/docs/cloud/manage-access/audit-log.md +++ b/website/docs/docs/cloud/manage-access/audit-log.md @@ -16,13 +16,9 @@ The dbt Cloud audit log stores all the events that occurred in your organization ## Accessing the audit log -To access audit log, click the gear icon in the top right, then click **Audit Log**. +To access the audit log, click the gear icon in the top right, then click **Audit Log**. -
- - - -
+ ## Understanding the audit log @@ -161,19 +157,17 @@ The audit log supports various events for different objects in dbt Cloud. You wi You can search the audit log to find a specific event or actor, which is limited to the ones listed in [Events in audit log](#events-in-audit-log). The audit log successfully lists historical events spanning the last 90 days. You can search for an actor or event using the search bar, and then narrow your results using the time window. -
- + -
## Exporting logs You can use the audit log to export all historical audit results for security, compliance, and analysis purposes: -- For events within 90 days — dbt Cloud will automatically display the 90 days selectable date range. Select **Export Selection** to download a CSV file of all the events that occurred in your organization within 90 days. +- For events within 90 days — dbt Cloud will automatically display the 90-day selectable date range. Select **Export Selection** to download a CSV file of all the events that occurred in your organization within 90 days. - For events beyond 90 days — Select **Export All**. The Account Admin will receive an email link to download a CSV file of all the events that occurred in your organization. - + diff --git a/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md b/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md index 6b68d440ba3..04dfbe093c3 100644 --- a/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md +++ b/website/docs/docs/cloud/manage-access/cloud-seats-and-users.md @@ -8,8 +8,8 @@ sidebar: "Users and licenses" In dbt Cloud, _licenses_ are used to allocate users to your account. There are three different types of licenses in dbt Cloud: - **Developer** — Granted access to the Deployment and [Development](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) functionality in dbt Cloud. -- **Read-Only** — Intended to view the [artifacts](/docs/deploy/artifacts) created in a dbt Cloud account. -- **IT** — Can manage users, groups, and licenses, among other permissions. Available on Enterprise and Team plans only. +- **Read-Only** — Intended to view the [artifacts](/docs/deploy/artifacts) created in a dbt Cloud account. Read-Only users can receive job notifications but not configure them. +- **IT** — Can manage users, groups, and licenses, among other permissions. IT users can receive job notifications but not configure them. Available on Enterprise and Team plans only. The user's assigned license determines the specific capabilities they can access in dbt Cloud. @@ -21,8 +21,8 @@ The user's assigned license determines the specific capabilities they can access | API Access | ✅ | ❌ | ❌ | | Use [Source Freshness](/docs/deploy/source-freshness) | ✅ | ✅ | ❌ | | Use [Docs](/docs/collaborate/build-and-view-your-docs) | ✅ | ✅ | ❌ | -| Receive [Job notifications](/docs/deploy/job-notifications) | ✅ | ✅ | ✅ | -*Available on Enterprise and Team plans only and doesn't count toward seat usage. Please note, IT seats are limited to 1 seat per Team or Enterprise account. +| Receive [Job notifications](/docs/deploy/job-notifications) | ✅ | ✅ | ✅ | +*Available on Enterprise and Team plans only and doesn't count toward seat usage. Please note, that IT seats are limited to 1 seat per Team or Enterprise account. ## Licenses diff --git a/website/docs/docs/collaborate/govern/model-contracts.md b/website/docs/docs/collaborate/govern/model-contracts.md index 912774877fc..442a20df1b6 100644 --- a/website/docs/docs/collaborate/govern/model-contracts.md +++ b/website/docs/docs/collaborate/govern/model-contracts.md @@ -192,11 +192,12 @@ In some cases, you can replace a test with its equivalent constraint. This has t **Why aren't tests part of the contract?** In a parallel for software APIs, the structure of the API response is the contract. Quality and reliability ("uptime") are also very important attributes of an API's quality, but they are not part of the contract per se. When the contract changes in a backwards-incompatible way, it is a breaking change that requires a bump in major version. -### Can I define a "partial" contract? +### Do I need to define every column for a contract? Currently, dbt contracts apply to **all** columns defined in a model, and they require declaring explicit expectations about **all** of those columns. The explicit declaration of a contract is not an accident—it's very much the intent of this feature. -We are investigating the feasibility of supporting "inferred" or "partial" contracts in the future. This would enable you to define constraints and strict data typing for a subset of columns, while still detecting breaking changes on other columns by comparing against the same model in production. If you're interested, please upvote or comment on [dbt-core#7432](https://github.com/dbt-labs/dbt-core/issues/7432). +At the same time, for models with many columns, we understand that this can mean a _lot_ of yaml. We are investigating the feasibility of supporting "inferred" contracts. This would enable you to define constraints and strict data typing for a subset of columns, while still detecting breaking changes on other columns by comparing against the same model in production. This isn't the same as a "partial" contract, because all columns in the model are still checked at runtime, and matched up with what's defined _explicitly_ in your yaml contract or _implicitly_ with the comparison state. If you're interested in "inferred" contract, please upvote or comment on [dbt-core#7432](https://github.com/dbt-labs/dbt-core/issues/7432). + ### How are breaking changes handled? @@ -207,4 +208,5 @@ Breaking changes include: - Changing the `data_type` of an existing column - Removing or modifying one of the `constraints` on an existing column (dbt v1.6 or higher) -More details are available in the [contract reference](/reference/resource-configs/contract#detecting-breaking-changes). \ No newline at end of file +More details are available in the [contract reference](/reference/resource-configs/contract#detecting-breaking-changes). + diff --git a/website/docs/docs/core/connect-data-platform/bigquery-setup.md b/website/docs/docs/core/connect-data-platform/bigquery-setup.md index ad056ab46b1..6b5bac53600 100644 --- a/website/docs/docs/core/connect-data-platform/bigquery-setup.md +++ b/website/docs/docs/core/connect-data-platform/bigquery-setup.md @@ -323,56 +323,6 @@ my-profile:
- - -BigQuery supports query timeouts. By default, the timeout is set to 300 seconds. If a dbt model takes longer than this timeout to complete, then BigQuery may cancel the query and issue the following error: - -``` - Operation did not complete within the designated timeout. -``` - -To change this timeout, use the `timeout_seconds` configuration: - - - -```yaml -my-profile: - target: dev - outputs: - dev: - type: bigquery - method: oauth - project: abc-123 - dataset: my_dataset - timeout_seconds: 600 # 10 minutes -``` - - - -The `retries` profile configuration designates the number of times dbt should retry queries that result in unhandled server errors. This configuration is only specified for BigQuery targets. Example: - - - -```yaml -# This example target will retry BigQuery queries 5 -# times with a delay. If the query does not succeed -# after the fifth attempt, then dbt will raise an error - -my-profile: - target: dev - outputs: - dev: - type: bigquery - method: oauth - project: abc-123 - dataset: my_dataset - retries: 5 -``` - - - - - ### Dataset locations The location of BigQuery datasets can be configured using the `location` configuration in a BigQuery profile. diff --git a/website/docs/docs/core/connect-data-platform/spark-setup.md b/website/docs/docs/core/connect-data-platform/spark-setup.md index 7c7ac15204b..b22416fd3a5 100644 --- a/website/docs/docs/core/connect-data-platform/spark-setup.md +++ b/website/docs/docs/core/connect-data-platform/spark-setup.md @@ -211,8 +211,6 @@ your_profile_name:
- - ## Optional configurations ### Retries @@ -231,13 +229,14 @@ connect_retries: 3 - + ### Server side configuration Spark can be customized using [Application Properties](https://spark.apache.org/docs/latest/configuration.html). Using these properties the execution can be customized, for example, to allocate more memory to the driver process. Also, the Spark SQL runtime can be set through these properties. For example, this allows the user to [set a Spark catalogs](https://spark.apache.org/docs/latest/configuration.html#spark-sql). + ## Caveats ### Usage with EMR diff --git a/website/docs/docs/deploy/job-notifications.md b/website/docs/docs/deploy/job-notifications.md index 72725a1e460..8d242abac78 100644 --- a/website/docs/docs/deploy/job-notifications.md +++ b/website/docs/docs/deploy/job-notifications.md @@ -9,10 +9,10 @@ Setting up notifications in dbt Cloud will allow you to receive alerts via Email ### Email -These are the following options for setting up email notifications: +These are the following options for setting up email notifications. Refer to [Users and licenses](/docs/cloud/manage-access/seats-and-users) for info on license types eligible for email notifications. -- As a **user** — You can set up email notifications for yourself under your Profile. -- As an **admin** — You can set up notifications on behalf of your team members. Refer to [Users and licenses](/docs/cloud/manage-access/seats-and-users) for info on license types eligible for email notifications. +- As a **user** — You can set up email notifications for yourself under your Profile. +- As an **admin** — You can set up notifications on behalf of your team members. To set up job notifications, follow these steps: diff --git a/website/docs/faqs/Core/install-python-compatibility.md b/website/docs/faqs/Core/install-python-compatibility.md index d24466f4990..4d6066d931b 100644 --- a/website/docs/faqs/Core/install-python-compatibility.md +++ b/website/docs/faqs/Core/install-python-compatibility.md @@ -23,12 +23,6 @@ The latest version of `dbt-core` is compatible with Python versions 3.7, 3.8, 3.
- - -As of v1.0, `dbt-core` is compatible with Python versions 3.7, 3.8, and 3.9. - - - Adapter plugins and their dependencies are not always compatible with the latest version of Python. For example, dbt-snowflake v0.19 is not compatible with Python 3.9, but dbt-snowflake versions 0.20+ are. New dbt minor versions will add support for new Python3 minor versions as soon as all dependencies can support it. In turn, dbt minor versions will drop support for old Python3 minor versions right before they reach [end of life](https://endoflife.date/python). diff --git a/website/docs/guides/best-practices/how-we-build-our-metrics/semantic-layer-3-build-semantic-models.md b/website/docs/guides/best-practices/how-we-build-our-metrics/semantic-layer-3-build-semantic-models.md index 2c2122572b8..a2dc55e37ae 100644 --- a/website/docs/guides/best-practices/how-we-build-our-metrics/semantic-layer-3-build-semantic-models.md +++ b/website/docs/guides/best-practices/how-we-build-our-metrics/semantic-layer-3-build-semantic-models.md @@ -148,7 +148,9 @@ from source ```YAML dimensions: - - name: date_trunc('day', ordered_at) + - name: ordered_at + expr: date_trunc('day', ordered_at) + # use date_trunc(ordered_at, DAY) if using [BigQuery](/docs/build/dimensions#time) type: time type_params: time_granularity: day @@ -166,7 +168,9 @@ We'll discuss an alternate situation, dimensional tables that have static numeri ```YAML ... dimensions: - - name: date_trunc('day', ordered_at) + - name: ordered_at + expr: date_trunc('day', ordered_at) + # use date_trunc(ordered_at, DAY) if using BigQuery type: time type_params: time_granularity: day @@ -254,6 +258,8 @@ semantic_models: dimensions: - name: ordered_at + expr: date_trunc('day', ordered_at) + # use date_trunc(ordered_at, DAY) if using BigQuery type: time type_params: time_granularity: day diff --git a/website/docs/guides/legacy/best-practices.md b/website/docs/guides/legacy/best-practices.md index 018d48ba181..10e02271518 100644 --- a/website/docs/guides/legacy/best-practices.md +++ b/website/docs/guides/legacy/best-practices.md @@ -159,12 +159,6 @@ dbt test --select result:fail --exclude --defer --state path/to/p > Note: If you're using the `--state target/` flag, `result:error` and `result:fail` flags can only be selected concurrently(in the same command) if using the `dbt build` command. `dbt test` will overwrite the `run_results.json` from `dbt run` in a previous command invocation. - - -Only supported by v1.1 or newer. - - - Only supported by v1.1 or newer. diff --git a/website/docs/reference/dbt-jinja-functions/ref.md b/website/docs/reference/dbt-jinja-functions/ref.md index b9b14bed42a..6df06a2f415 100644 --- a/website/docs/reference/dbt-jinja-functions/ref.md +++ b/website/docs/reference/dbt-jinja-functions/ref.md @@ -69,7 +69,7 @@ select * from {{ ref('model_name') }} ### Two-argument variant -There is also a two-argument variant of the `ref` function. With this variant, you can pass both a namespace (project or package) and model name to `ref` to avoid ambiguity. +You can also use a two-argument variant of the `ref` function. With this variant, you can pass both a namespace (project or package) and model name to `ref` to avoid ambiguity. When using two arguments with projects (not packages), you also need to set [cross project dependencies](/docs/collaborate/govern/project-dependencies). ```sql select * from {{ ref('project_or_package', 'model_name') }} diff --git a/website/docs/reference/node-selection/methods.md b/website/docs/reference/node-selection/methods.md index ff86d60c06a..ca66b00044f 100644 --- a/website/docs/reference/node-selection/methods.md +++ b/website/docs/reference/node-selection/methods.md @@ -252,11 +252,6 @@ $ dbt seed --select result:error --state path/to/artifacts # run all seeds that ``` ### The "source_status" method - - -Supported in v1.1 or newer. - - diff --git a/website/docs/reference/node-selection/syntax.md b/website/docs/reference/node-selection/syntax.md index 1a43a32e2bc..a60d23cd16f 100644 --- a/website/docs/reference/node-selection/syntax.md +++ b/website/docs/reference/node-selection/syntax.md @@ -174,12 +174,6 @@ $ dbt run --select result:+ state:modified+ --defer --state ./ - -Only supported by v1.1 or newer. - - - Only supported by v1.1 or newer. @@ -199,11 +193,6 @@ dbt build --select source_status:fresher+ For more example commands, refer to [Pro-tips for workflows](/guides/legacy/best-practices.md#pro-tips-for-workflows). ### The "source_status" status - - -Only supported by v1.1 or newer. - - diff --git a/website/docs/reference/resource-configs/databricks-configs.md b/website/docs/reference/resource-configs/databricks-configs.md index dc7f0cd53e3..31dcc827741 100644 --- a/website/docs/reference/resource-configs/databricks-configs.md +++ b/website/docs/reference/resource-configs/databricks-configs.md @@ -121,7 +121,7 @@ select date_day, count(*) as users -from events +from new_events group by 1 ``` diff --git a/website/docs/reference/resource-configs/grants.md b/website/docs/reference/resource-configs/grants.md index 68d1e6eb14e..3a65672fa5e 100644 --- a/website/docs/reference/resource-configs/grants.md +++ b/website/docs/reference/resource-configs/grants.md @@ -243,6 +243,7 @@ models: - Databricks automatically enables `grants` on SQL endpoints. For interactive clusters, admins should enable grant functionality using these two setup steps in the Databricks documentation: - [Enable table access control for your workspace](https://docs.databricks.com/administration-guide/access-control/table-acl.html) - [Enable table access control for a cluster](https://docs.databricks.com/security/access-control/table-acls/table-acl.html) +- In order to grant `READ_METADATA` or `USAGE`, use [post-hooks](https://docs.getdbt.com/reference/resource-configs/pre-hook-post-hook) diff --git a/website/docs/reference/resource-configs/persist_docs.md b/website/docs/reference/resource-configs/persist_docs.md index 6facf3945cb..7134972d2ca 100644 --- a/website/docs/reference/resource-configs/persist_docs.md +++ b/website/docs/reference/resource-configs/persist_docs.md @@ -151,7 +151,7 @@ Some known issues and limitations: - + - Column names that must be quoted, such as column names containing special characters, will cause runtime errors if column-level `persist_docs` is enabled. This is fixed in v1.2. diff --git a/website/docs/reference/resource-properties/config.md b/website/docs/reference/resource-properties/config.md index 32143c1da07..1d3a2de6592 100644 --- a/website/docs/reference/resource-properties/config.md +++ b/website/docs/reference/resource-properties/config.md @@ -108,12 +108,6 @@ version: 2 - - -We have added support for the `config` property on sources in dbt Core v1.1 - - - diff --git a/website/docs/reference/source-configs.md b/website/docs/reference/source-configs.md index 1ee2fe1daba..43b9bfbff6b 100644 --- a/website/docs/reference/source-configs.md +++ b/website/docs/reference/source-configs.md @@ -71,14 +71,6 @@ Sources can be configured via a `config:` block within their `.yml` definitions, - - -Sources can be configured from the `dbt_project.yml` file under the `sources:` key. This configuration is most useful for configuring sources imported from [a package](package-management). You can disable sources imported from a package to prevent them from rendering in the documentation, or to prevent [source freshness checks](/docs/build/sources#snapshotting-source-data-freshness) from running on source tables imported from packages. - -Unlike other resource types, sources do not yet support a `config` property. It is not possible to (re)define source configs hierarchically across multiple YAML files. - - - ### Examples #### Disable all sources imported from a package To apply a configuration to all sources included from a [package](/docs/build/packages), diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 0cc6299ed39..d4115a700cc 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -47,6 +47,9 @@ var siteSettings = { onBrokenLinks: "throw", onBrokenMarkdownLinks: "throw", trailingSlash: false, + customFields: { + isVercel: process.env.REACT_APP_VERCEL + }, themeConfig: { docs:{ sidebar: { diff --git a/website/functions/image-cache-wrapper.js b/website/functions/image-cache-wrapper.js new file mode 100644 index 00000000000..84f85c12a17 --- /dev/null +++ b/website/functions/image-cache-wrapper.js @@ -0,0 +1,18 @@ +// This function is used to break the cache on images +// preventing stale or broken images from being served + +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; + +const CACHE_VERSION = '2' + +export default function imageCacheWrapper(src) { + const { siteConfig: {customFields} } = useDocusaurusContext(); + + const cacheParam = customFields?.isVercel === '1' + ? `?v=${CACHE_VERSION}` + : `` + + return ( + src + cacheParam + ) +} diff --git a/website/snippets/core-versions-table.md b/website/snippets/core-versions-table.md index fb2e2a5d60e..5832f9f14c3 100644 --- a/website/snippets/core-versions-table.md +++ b/website/snippets/core-versions-table.md @@ -17,7 +17,6 @@ _Future release dates are tentative and subject to change._ | dbt Core | Planned Release | Critical & dbt Cloud Support Until | |----------|-----------------|-------------------------------------| -| **v1.6** | _July 2023_ | _July 2024_ | | **v1.7** | _Oct 2023_ | _Oct 2024_ | | **v1.8** | _Jan 2024_ | _Jan 2025_ | | **v1.9** | _Apr 2024_ | _Apr 2025_ | diff --git a/website/src/components/author/index.js b/website/src/components/author/index.js index a8b7ad7c0ef..6b49295936d 100644 --- a/website/src/components/author/index.js +++ b/website/src/components/author/index.js @@ -4,6 +4,7 @@ import Link from '@docusaurus/Link'; import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; import BlogLayout from '@theme/BlogLayout'; import getAllPosts from '../../utils/get-all-posts'; +import imageCacheWrapper from '../../../functions/image-cache-wrapper'; function Author(props) { const { authorData } = props @@ -38,7 +39,7 @@ function Author(props) { itemType="http://schema.org/Person">
- {name} + {name}

{name}

diff --git a/website/src/components/communitySpotlightCard/index.js b/website/src/components/communitySpotlightCard/index.js index 06b77aa2be2..08707a93dd4 100644 --- a/website/src/components/communitySpotlightCard/index.js +++ b/website/src/components/communitySpotlightCard/index.js @@ -1,6 +1,7 @@ import React from 'react' import Link from '@docusaurus/Link'; import styles from './styles.module.css'; +import imageCacheWrapper from '../../../functions/image-cache-wrapper'; const SpotlightWrapper = ({ isSpotlightMember, frontMatter, children }) => { return isSpotlightMember ? ( @@ -55,13 +56,13 @@ function CommunitySpotlightCard({ frontMatter, isSpotlightMember = false }) {
{id && isSpotlightMember ? ( {title} ) : ( {title} diff --git a/website/src/components/discourse/index.js b/website/src/components/discourse/index.js index 18e4d3e7254..759903a175f 100644 --- a/website/src/components/discourse/index.js +++ b/website/src/components/discourse/index.js @@ -1,6 +1,7 @@ import React, { useState, useEffect } from 'react' import axios from 'axios' import feedStyles from './styles.module.css'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; // Bare component with no default props set export const DiscourseFeed = ({ @@ -24,6 +25,8 @@ export const DiscourseFeed = ({ styles = {} }) => { + const { siteConfig: {customFields} } = useDocusaurusContext(); + const [topics, setTopics] = useState([]) const [loading, setLoading] = useState(true) const [isError, setIsError] = useState(false) @@ -38,10 +41,10 @@ export const DiscourseFeed = ({ setLoading(true) setIsError(false) - // Build Netlify Function endpoint - const endpoint = window?.location?.hostname?.includes('localhost') - ? 'http://localhost:8888/.netlify/functions/get-discourse-topics' - : '/.netlify/functions/get-discourse-topics' + // Build function endpoint + const endpoint = customFields?.isVercel === '1' + ? `/api/get-discourse-topics` + : `/.netlify/functions/get-discourse-topics` // If 'after' prop not passed in, set relative after date let afterDate = after diff --git a/website/src/components/discourseBlogComments/index.js b/website/src/components/discourseBlogComments/index.js index 63279285f2a..091f1047cb7 100644 --- a/website/src/components/discourseBlogComments/index.js +++ b/website/src/components/discourseBlogComments/index.js @@ -2,9 +2,12 @@ import React, { useState, useEffect } from 'react' import styles from './styles.module.css' import axios from 'axios' import sanitizeHtml from 'sanitize-html'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; export const DiscourseBlogComments = ({title,slug}) => { + const { siteConfig: {customFields} } = useDocusaurusContext(); + const DISCOURSE_TOPIC_ENDPOINT = `https://discourse.getdbt.com/t/` const commentsToLoad = 6 @@ -28,10 +31,10 @@ export const DiscourseBlogComments = ({title,slug}) => { const fetchData = async () => { try { - const endpoint = window?.location?.hostname?.includes('localhost') - ? `http://localhost:8888/.netlify/functions/get-discourse-comments?title=${title}&slug=${slug}` - : `/.netlify/functions/get-discourse-comments?title=${title}&slug=${slug}` - + const endpoint = customFields?.isVercel === '1' + ? `/api/get-discourse-comments?title=${title}&slug=${slug}` + : `/.netlify/functions/get-discourse-comments?title=${title}&slug=${slug}` + const { data } = await axios.get(endpoint) // Set error state if data not available diff --git a/website/src/components/lightbox/index.js b/website/src/components/lightbox/index.js index b4c2da3c905..1c748bbb04f 100644 --- a/website/src/components/lightbox/index.js +++ b/website/src/components/lightbox/index.js @@ -1,5 +1,6 @@ import React from 'react'; import styles from './styles.module.css'; +import imageCacheWrapper from '../../../functions/image-cache-wrapper'; function Lightbox({ src, @@ -35,7 +36,7 @@ function Lightbox({ data-toggle="lightbox" alt={alt ? alt : title ? title : ''} title={title ? title : ''} - src={src} + src={imageCacheWrapper(src)} /> diff --git a/website/src/theme/BlogPostItem/Header/Author/index.js b/website/src/theme/BlogPostItem/Header/Author/index.js index a37d9e9985a..f82428df789 100644 --- a/website/src/theme/BlogPostItem/Header/Author/index.js +++ b/website/src/theme/BlogPostItem/Header/Author/index.js @@ -1,6 +1,7 @@ import React from 'react'; import clsx from 'clsx'; import Link from '@docusaurus/Link'; +import imageCacheWrapper from '../../../../../functions/image-cache-wrapper'; function MaybeLink(props) { if (props.href || props.slug) { return ; @@ -21,7 +22,7 @@ export default function BlogPostItemHeaderAuthor({author, className}) {
{imageURL && ( - {name} + {name} )} diff --git a/website/vercel.json b/website/vercel.json new file mode 100644 index 00000000000..c5fb0638fba --- /dev/null +++ b/website/vercel.json @@ -0,0 +1,3996 @@ +{ + "cleanUrls": true, + "trailingSlash": false, + "redirects": [ + { + "source": "/docs/deploy/job-triggers", + "destination": "/docs/deploy/deploy-jobs", + "permanent": true + }, + { + "source": "/docs/deploy/job-settings", + "destination": "/docs/deploy/deploy-jobs", + "permanent": true + }, + { + "source": "/docs/deploy/dbt-cloud-job", + "destination": "/docs/deploy/deploy-jobs", + "permanent": true + }, + { + "source": "/faqs/environments/beta-release", + "destination": "/docs/dbt-versions/product-lifecycles", + "permanent": true + }, + { + "source": "/docs/deploy/slim-ci-jobs", + "destination": "/docs/deploy/ci-jobs", + "permanent": true + }, + { + "source": "/guides/dbt-ecosystem/sl-partner-integration-guide", + "destination": "/docs/use-dbt-semantic-layer/avail-sl-integrations", + "permanent": true + }, + { + "source": "/docs/use-dbt-semantic-layer/dbt-semantic-layer", + "destination": "/docs/use-dbt-semantic-layer/dbt-sl", + "permanent": true + }, + { + "source": "/docs/use-dbt-semantic-layer/set-up-semantic-layer", + "destination": "/docs/use-dbt-semantic-layer/setup-sl", + "permanent": true + }, + { + "source": "/docs/use-dbt-semantic-layer/setup-dbt-semantic-layer", + "destination": "/docs/use-dbt-semantic-layer/setup-sl", + "permanent": true + }, + { + "source": "/docs/use-dbt-semantic-layer/quickstart-semantic-layer", + "destination": "/docs/use-dbt-semantic-layer/quickstart-sl", + "permanent": true + }, + { + "source": "/docs/collaborate/environments/environments-in-dbt", + "destination": "/docs/environments-in-dbt", + "permanent": true + }, + { + "source": "/docs/collaborate/environments/dbt-cloud-environments", + "destination": "/docs/deploy/dbt-cloud-environments", + "permanent": true + }, + { + "source": "/docs/collaborate/environments/dbt-core-environments", + "destination": "/docs/core/dbt-core-environments", + "permanent": true + }, + { + "source": "/docs/cloud/manage-access/licenses-and-groups", + "destination": "/docs/cloud/manage-access/about-user-access", + "permanent": true + }, + { + "source": "/docs/deploy/cloud-ci-job", + "destination": "/docs/deploy/continuous-integration", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/bigquery", + "destination": "/quickstarts/bigquery", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/databricks", + "destination": "/quickstarts/databricks", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/redshift", + "destination": "/quickstarts/redshift", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/snowflake", + "destination": "/quickstarts/snowflake", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/starburst-galaxy", + "destination": "/quickstarts/starburst-galaxy", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-core/codespace", + "destination": "/quickstarts/codespace", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-core/manual-install", + "destination": "/quickstarts/manual-install", + "permanent": true + }, + { + "source": "/docs/deploy/project-state", + "destination": "/reference/node-selection/syntax", + "permanent": true + }, + { + "source": "/reference/global-configs", + "destination": "/reference/global-configs/about-global-configs", + "permanent": true + }, + { + "source": "/docs/quickstarts/overview", + "destination": "/quickstarts", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#verified-adapters", + "destination": "/docs/supported-data-platforms", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#community-adapters", + "destination": "/docs/community-adapters", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#adapter-installation", + "destination": "/docs/connect-adapters", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#adapter-taxonomy", + "destination": "/docs/supported-data-platforms", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#verified-by-dbt-labs", + "destination": "/docs/supported-data-platforms", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#maintainers", + "destination": "/docs/connect-adapters#maintainers", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#contributing-to-dbt-core-adapters", + "destination": "/docs/contribute-core-adapters", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#contributing-to-a-pre-existing-adapter", + "destination": "/docs/contribute-core-adapters#contribute-to-a-pre-existing-adapter", + "permanent": true + }, + { + "source": "/docs/supported-data-platforms#creating-a-new-adapter", + "destination": "/docs/contribute-core-adapters#create-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/core/connection-profiles", + "destination": "/docs/core/connect-data-platform/connection-profiles", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/bigquery-setup", + "destination": "/docs/core/connect-data-platform/bigquery-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/postgres-setup", + "destination": "/docs/core/connect-data-platform/postgres-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/redshift-setup", + "destination": "/docs/core/connect-data-platform/redshift-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/snowflake-setup", + "destination": "/docs/core/connect-data-platform/snowflake-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/mssql-setup", + "destination": "/docs/core/connect-data-platform/mssql-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/trino-setup", + "destination": "/docs/core/connect-data-platform/trino-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/singlestore-setup", + "destination": "/docs/core/connect-data-platform/singlestore-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/spark-setup", + "destination": "/docs/core/connect-data-platform/spark-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/databricks-setup", + "destination": "/docs/core/connect-data-platform/databricks-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/hive-setup", + "destination": "/docs/core/connect-data-platform/hive-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/exasol-setup", + "destination": "/docs/core/connect-data-platform/exasol-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/oracle-setup", + "destination": "/docs/core/connect-data-platform/oracle-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/azuresynapse-setup", + "destination": "/docs/core/connect-data-platform/azuresynapse-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/dremio-setup", + "destination": "/docs/core/connect-data-platform/dremio-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/clickhouse-setup", + "destination": "/docs/core/connect-data-platform/clickhouse-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/materialize-setup", + "destination": "/docs/core/connect-data-platform/materialize-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/rockset-setup", + "destination": "/docs/core/connect-data-platform/rockset-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/firebolt-setup", + "destination": "/docs/core/connect-data-platform/firebolt-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/teradata-setup", + "destination": "/docs/core/connect-data-platform/teradata-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/athena-setup", + "destination": "/docs/core/connect-data-platform/athena-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/vertica-setup", + "destination": "/docs/core/connect-data-platform/vertica-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/tidb-setup", + "destination": "/docs/core/connect-data-platform/tidb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/glue-setup", + "destination": "/docs/core/connect-data-platform/glue-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/mindsdb-setup", + "destination": "/docs/core/connect-data-platform/mindsdb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/greenplum-setup", + "destination": "/docs/core/connect-data-platform/greenplum-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/impala-setup", + "destination": "/docs/core/connect-data-platform/impala-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/layer-setup", + "destination": "/docs/core/connect-data-platform/layer-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/iomete-setup", + "destination": "/docs/core/connect-data-platform/iomete-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/duckdb-setup", + "destination": "/docs/core/connect-data-platform/duckdb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/sqlite-setup", + "destination": "/docs/core/connect-data-platform/sqlite-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/mysql-setup", + "destination": "/docs/core/connect-data-platform/mysql-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/ibmdb2-setup", + "destination": "/docs/core/connect-data-platform/ibmdb2-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/alloydb-setup", + "destination": "/docs/core/connect-data-platform/alloydb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/doris-setup", + "destination": "/docs/core/connect-data-platform/doris-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/infer-setup", + "destination": "/docs/core/connect-data-platform/infer-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/databend-setup", + "destination": "/docs/core/connect-data-platform/databend-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/fal-setup", + "destination": "/docs/core/connect-data-platform/fal-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/decodable-setup", + "destination": "/docs/core/connect-data-platform/decodable-setup", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-source", + "destination": "/docs/dbt-cloud-apis/discovery-schema-source", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-sources", + "destination": "/docs/dbt-cloud-apis/discovery-schema-sources", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-test", + "destination": "/docs/dbt-cloud-apis/discovery-schema-test", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-tests", + "destination": "/docs/dbt-cloud-apis/discovery-schema-tests", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-seed", + "destination": "/docs/dbt-cloud-apis/discovery-schema-seed", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-seeds", + "destination": "/docs/dbt-cloud-apis/discovery-schema-seeds", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-snapshots", + "destination": "/docs/dbt-cloud-apis/discovery-schema-snapshots", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-model", + "destination": "/docs/dbt-cloud-apis/discovery-schema-model", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-models", + "destination": "/docs/dbt-cloud-apis/discovery-schema-models", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-modelByEnv", + "destination": "/docs/dbt-cloud-apis/discovery-schema-modelByEnv", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-metrics", + "destination": "/docs/dbt-cloud-apis/discovery-schema-metrics", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-metric", + "destination": "/docs/dbt-cloud-apis/discovery-schema-metric", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-exposures", + "destination": "/docs/dbt-cloud-apis/discovery-schema-exposures", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-schema-exposure", + "destination": "/docs/dbt-cloud-apis/discovery-schema-exposure", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-use-case-guides", + "destination": "/docs/dbt-cloud-apis/discovery-use-cases-and-examples", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-api", + "destination": "/docs/dbt-cloud-apis/discovery-api", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-apis/metadata-querying", + "destination": "/docs/dbt-cloud-apis/discovery-querying", + "permanent": true + }, + { + "source": "/docs/core/connection-profiles#understanding-threads", + "destination": "/docs/running-a-dbt-project/using-threads", + "permanent": true + }, + { + "source": "/docs/cloud/privatelink/about-privatelink", + "destination": "/docs/cloud/secure/about-privatelink", + "permanent": true + }, + { + "source": "/docs/cloud/privatelink/snowflake-privatelink", + "destination": "/docs/cloud/secure/about-privatelink", + "permanent": true + }, + { + "source": "/docs/cloud/privatelink/redshift-privatelink", + "destination": "/docs/cloud/secure/about-privatelink", + "permanent": true + }, + { + "source": "/docs/cloud/privatelink/databricks-privatelink", + "destination": "/docs/cloud/secure/about-privatelink", + "permanent": true + }, + { + "source": "/docs/cloud/privatelink/ip-restrictions", + "destination": "/docs/cloud/secure/about-privatelink", + "permanent": true + }, + { + "source": "/docs/deploy/dbt-cloud-job#create-and-schedule-jobs", + "destination": "/docs/deploy/dbt-cloud-job#create-and-schedule-jobs", + "permanent": true + }, + { + "source": "/docs/cloud/dbt-cloud-tips", + "destination": "/docs/cloud/dbt-cloud-ide/dbt-cloud-tips", + "permanent": true + }, + { + "source": "/docs/cloud/develop-in-the-cloud", + "destination": "/docs/cloud/dbt-cloud-ide/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab", + "destination": "/docs/deploy/dbt-cloud-job#model-timing", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-core/quickstart", + "destination": "/quickstarts/manual-install", + "permanent": true + }, + { + "source": "/docs/dbt-versions/release-notes/January-2022/model-timing-more", + "destination": "/docs/deploy/dbt-cloud-job#model-timing", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#dbt-cloud", + "destination": "/docs/deploy/dbt-cloud-job", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#airflow", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#prefect", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#run-dbt-in-production", + "destination": "/docs/deploy/deployments", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#on-prefect-2", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#on-prefect-1", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#dagster", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#automation-servers", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/deploy/deployments#cron", + "destination": "/docs/deploy/deployment-tools", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/enterprise-permissions#permission-sets", + "destination": "/docs/cloud/manage-access/enterprise-permissions#permission-sets", + "permanent": true + }, + { + "source": "/docs/get-started/privatelink/about-privatelink", + "destination": "/docs/cloud/privatelink/about-privatelink", + "permanent": true + }, + { + "source": "/docs/get-started/privatelink/snowflake-privatelink", + "destination": "/docs/cloud/privatelink/snowflake-privatelink", + "permanent": true + }, + { + "source": "/docs/get-started/privatelink/redshift-privatelink", + "destination": "/docs/cloud/privatelink/redshift-privatelink", + "permanent": true + }, + { + "source": "/docs/get-started/privatelink/databricks-privatelink", + "destination": "/docs/cloud/privatelink/databricks-privatelink", + "permanent": true + }, + { + "source": "/docs/get-started/dbt-cloud-features", + "destination": "/docs/cloud/about-cloud/dbt-cloud-features", + "permanent": true + }, + { + "source": "/docs/deploy/regions-ip-addresses", + "destination": "/docs/cloud/about-cloud/regions-ip-addresses", + "permanent": true + }, + { + "source": "/docs/deploy/architecture", + "destination": "/docs/cloud/about-cloud/architecture", + "permanent": true + }, + { + "source": "/docs/deploy/single-tenant", + "destination": "/docs/cloud/about-cloud/tenancy", + "permanent": true + }, + { + "source": "/docs/deploy/multi-tenant", + "destination": "/docs/cloud/about-cloud/tenancy", + "permanent": true + }, + { + "source": "/docs/cloud/manage-access/about-access", + "destination": "/docs/cloud/manage-access/about-user-access", + "permanent": true + }, + { + "source": "/docs/collaborate/git/connect-github", + "destination": "/docs/cloud/git/connect-github", + "permanent": true + }, + { + "source": "/docs/collaborate/git/connect-gitlab", + "destination": "/docs/cloud/git/connect-gitlab", + "permanent": true + }, + { + "source": "/docs/collaborate/git/connect-azure-devops", + "destination": "/docs/cloud/git/connect-azure-devops", + "permanent": true + }, + { + "source": "/docs/collaborate/git/setup-azure", + "destination": "/docs/cloud/git/setup-azure", + "permanent": true + }, + { + "source": "/docs/collaborate/git/authenticate-azure", + "destination": "/docs/cloud/git/authenticate-azure", + "permanent": true + }, + { + "source": "/docs/collaborate/git/import-a-project-by-git-url", + "destination": "/docs/cloud/git/import-a-project-by-git-url", + "permanent": true + }, + { + "source": "/docs/collaborate/publish/about-publishing-models", + "destination": "/docs/collaborate/govern/about-model-governance", + "permanent": true + }, + { + "source": "/docs/collaborate/publish/model-contracts", + "destination": "/docs/collaborate/govern/model-contracts", + "permanent": true + }, + { + "source": "/docs/collaborate/publish/model-access", + "destination": "/docs/collaborate/govern/model-access", + "permanent": true + }, + { + "source": "/docs/collaborate/publish/model-versions", + "destination": "/docs/collaborate/govern/model-versions", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/about-access", + "destination": "/docs/cloud/manage-access/about-user-access", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/seats-and-users", + "destination": "/docs/cloud/manage-access/seats-and-users", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/self-service-permissions", + "destination": "/docs/cloud/manage-access/self-service-permissions", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/enterprise-permissions", + "destination": "/docs/cloud/manage-access/enterprise-permissions", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/sso-overview", + "destination": "/docs/cloud/manage-access/sso-overview", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/set-up-sso-saml-2.0", + "destination": "/docs/cloud/manage-access/set-up-sso-saml-2.0", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/set-up-sso-okta", + "destination": "/docs/cloud/manage-access/set-up-sso-okta", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/set-up-sso-google-workspace", + "destination": "/docs/cloud/manage-access/set-up-sso-google-workspace", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/set-up-sso-azure-active-directory", + "destination": "/docs/cloud/manage-access/set-up-sso-azure-active-directory", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/set-up-snowflake-oauth", + "destination": "/docs/cloud/manage-access/set-up-snowflake-oauth", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/set-up-bigquery-oauth", + "destination": "/docs/cloud/manage-access/set-up-bigquery-oauth", + "permanent": true + }, + { + "source": "/docs/collaborate/manage-access/audit-log", + "destination": "/docs/cloud/manage-access/audit-log", + "permanent": true + }, + { + "source": "/docs/get-started/develop-in-the-cloud", + "destination": "/docs/cloud/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/get-started/dbt-cloud-tips", + "destination": "/docs/cloud/dbt-cloud-tips", + "permanent": true + }, + { + "source": "/docs/get-started/installation", + "destination": "/docs/core/installation", + "permanent": true + }, + { + "source": "/docs/get-started/about-the-cli", + "destination": "/docs/core/about-the-cli", + "permanent": true + }, + { + "source": "/docs/get-started/homebrew-install", + "destination": "/docs/core/homebrew-install", + "permanent": true + }, + { + "source": "/docs/get-started/pip-install", + "destination": "/docs/core/pip-install", + "permanent": true + }, + { + "source": "/docs/get-started/docker-install", + "destination": "/docs/core/docker-install", + "permanent": true + }, + { + "source": "/docs/get-started/source-install", + "destination": "/docs/core/source-install", + "permanent": true + }, + { + "source": "/docs/get-started/connection-profiles", + "destination": "/docs/core/connection-profiles", + "permanent": true + }, + { + "source": "/docs/get-started/run-your-dbt-projects", + "destination": "/docs/running-a-dbt-project/run-your-dbt-projects", + "permanent": true + }, + { + "source": "/docs/get-started/learning-more/refactoring-legacy-sql", + "destination": "/guides/migration/tools/refactoring-legacy-sql", + "permanent": true + }, + { + "source": "/docs/get-started/learning-more/using-jinja", + "destination": "/guides/advanced/using-jinja", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/set-up-dbt-cloud", + "destination": "/quickstarts", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-snowflake", + "destination": "/docs/quickstarts/dbt-cloud/snowflake", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-redshift", + "destination": "/docs/quickstarts/dbt-cloud/redshift", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-databricks", + "destination": "/quickstarts/databricks", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-bigquery", + "destination": "/docs/quickstarts/dbt-cloud/bigquery", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-databricks", + "destination": "/quickstarts/databricks", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-redshift", + "destination": "/docs/quickstarts/dbt-cloud/redshift", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/getting-set-up/setting-up-snowflake", + "destination": "/docs/quickstarts/dbt-cloud/snowflake", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/building-your-first-project/schedule-a-job", + "destination": "/quickstarts/bigquery", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project", + "destination": "/docs/quickstarts/dbt-cloud/bigquery#add-tests-to-your-models", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/building-your-first-project/build-your-first-models", + "destination": "/quickstarts/bigquery?step=8", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/overview", + "destination": "/quickstarts", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started-dbt-core", + "destination": "/docs/quickstarts/dbt-core/quickstart", + "permanent": true + }, + { + "source": "/docs/get-started/develop-in-the-cloud#set-up-environments", + "destination": "/docs/get-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/get-started/develop-in-the-cloud#developer-credentials", + "destination": "/docs/get-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/getting-started/develop-in-the-cloud#setting-up-developer-credentials", + "destination": "/docs/get-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-redshift-and-postgres", + "destination": "/docs/get-started/connect-your-database#connecting-to-postgres-redshift-and-alloydb", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database#connecting-to-snowflake", + "destination": "/docs/get-started/connect-your-database#connecting-to-snowflake", + "permanent": true + }, + { + "source": "/docs/get-started/connect-your-database#connecting-to-snowflake", + "destination": "/docs/cloud/connect-data-platform/connect-snowflake", + "permanent": true + }, + { + "source": "/docs/get-started/connect-your-database#connecting-to-postgres-redshift-and-alloydb", + "destination": "/cloud/connect-data-platform/connect-redshift-postgresql-alloydb", + "permanent": true + }, + { + "source": "/docs/cloud/connect-data-platform/connect-your-database", + "destination": "/docs/cloud/connect-data-platform/about-connections", + "permanent": true + }, + { + "source": "/faqs/connecting-to-two-dbs-not-allowed", + "destination": "/faqs/warehouse/connecting-to-two-dbs-not-allowed", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/ide-beta", + "destination": "/docs/get-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/bigquery", + "destination": "/quickstarts/bigquery", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/redshift", + "destination": "/quickstarts/redshift", + "permanent": true + }, + { + "source": "/docs/quickstarts/dbt-cloud/snowflake", + "destination": "/quickstarts/snowflake", + "permanent": true + }, + { + "source": "/quickstarts/starburst-galaxy", + "destination": "/quickstarts/starburst-galaxy", + "permanent": true + }, + { + "source": "/quickstarts/codespace", + "destination": "/quickstarts/codespace", + "permanent": true + }, + { + "source": "/quickstarts/manual-install", + "destination": "/quickstarts/manual-install", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-model-timing-tab", + "destination": "/docs/get-started/dbt-cloud-features#model-timing-dashboard", + "permanent": true + }, + { + "source": "/docs/dbt-cloud", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", + "destination": "/docs/dbt-versions/upgrade-core-in-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide", + "destination": "/docs/get-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-overview", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/artifacts", + "destination": "/docs/deploy/artifacts", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/python-models", + "destination": "/docs/build/python-models", + "permanent": true + }, + { + "source": "/docs/deploy/regions", + "destination": "/docs/deploy/regions-ip-addresses", + "permanent": true + }, + { + "source": "/advanced/adapter-development/1-what-are-adapters", + "destination": "/guides/dbt-ecosystem/adapter-development/1-what-are-adapters", + "permanent": true + }, + { + "source": "/advanced/adapter-development/2-prerequisites-for-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/2-prerequisites-for-a-new-adapter", + "permanent": true + }, + { + "source": "/advanced/adapter-development/3-building-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/advanced/adapter-development/4-testing-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/4-testing-a-new-adapter", + "permanent": true + }, + { + "source": "/advanced/adapter-development/5-documenting-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter", + "permanent": true + }, + { + "source": "/advanced/adapter-development/6-promoting-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/6-promoting-a-new-adapter", + "permanent": true + }, + { + "source": "/advanced/adapter-development/7-verifying-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/1-what-are-adapters", + "destination": "/guides/dbt-ecosystem/adapter-development/1-what-are-adapters", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/2-prerequisites-for-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/3-building-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/4-testing-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/4-testing-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/5-documenting-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/6-promoting-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/6-promoting-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/advanced/adapter-development/7-verifying-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/7-verifying-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/legacy/debugging-errors", + "destination": "/guides/best-practices/debugging-errors", + "permanent": true + }, + { + "source": "/guides/legacy/writing-custom-generic-tests", + "destination": "/guides/best-practices/writing-custom-generic-tests", + "permanent": true + }, + { + "source": "/guides/legacy/creating-new-materializations", + "destination": "/guides/advanced/creating-new-materializations", + "permanent": true + }, + { + "source": "/guides/getting-started", + "destination": "/docs/get-started/getting-started/overview", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/building-your-first-project", + "destination": "/docs/get-started/getting-started/building-your-first-project/build-your-first-models", + "permanent": true + }, + { + "source": "/docs/get-started/getting-started/create-a-project", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/guides/getting-started/building-your-first-project", + "destination": "/docs/get-started/getting-started/building-your-first-project/build-your-first-models", + "permanent": true + }, + { + "source": "/guides/getting-started/building-your-first-project/build-your-first-models", + "destination": "/docs/get-started/getting-started/building-your-first-project/build-your-first-models", + "permanent": true + }, + { + "source": "/guides/getting-started/building-your-first-project/schedule-a-job", + "destination": "/docs/get-started/getting-started/building-your-first-project/schedule-a-job", + "permanent": true + }, + { + "source": "/guides/getting-started/building-your-first-project/test-and-document-your-project", + "destination": "/docs/get-started/getting-started/building-your-first-project/test-and-document-your-project", + "permanent": true + }, + { + "source": "/guides/getting-started/create-a-project", + "destination": "/docs/get-started/getting-started/building-your-first-project/build-your-first-models301", + "permanent": true + }, + { + "source": "/guides/getting-started/getting-set-up", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/guides/getting-started/getting-set-up/setting-up-bigquery", + "destination": "/docs/get-started/getting-started/getting-set-up/setting-up-bigquery", + "permanent": true + }, + { + "source": "/guides/getting-started/getting-set-up/setting-up-databricks", + "destination": "/docs/get-started/getting-started/getting-set-up/setting-up-databricks", + "permanent": true + }, + { + "source": "/guides/getting-started/getting-set-up/setting-up-redshift", + "destination": "/docs/get-started/getting-started/getting-set-up/setting-up-redshift", + "permanent": true + }, + { + "source": "/guides/getting-started/getting-set-up/setting-up-snowflake", + "destination": "/docs/get-started/getting-started/getting-set-up/setting-up-snowflake", + "permanent": true + }, + { + "source": "/guides/getting-started/getting-started", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/guides/getting-started/learning-more", + "destination": "/docs/get-started/getting-started-dbt-core", + "permanent": true + }, + { + "source": "/guides/getting-started/learning-more/getting-started-dbt-core", + "destination": "/docs/get-started/getting-started-dbt-core", + "permanent": true + }, + { + "source": "/guides/getting-started/learning-more/refactoring-legacy-sql", + "destination": "/docs/get-started/learning-more/refactoring-legacy-sql", + "permanent": true + }, + { + "source": "/guides/getting-started/learning-more/using-jinja", + "destination": "/docs/get-started/learning-more/using-jinja", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-quickstart", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/docs/cloud-quickstart", + "destination": "/docs/dbt-cloud/cloud-quickstart", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud", + "destination": "/docs/get-started/getting-started/set-up-dbt-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database", + "destination": "/docs/cloud/connect-data-platform/about-connections", + "permanent": true + }, + { + "source": "/docs/get-started/connect-your-database", + "destination": "/docs/cloud/connect-data-platform/about-connections", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/profile", + "destination": "/docs/get-started/connection-profiles", + "permanent": true + }, + { + "source": "/guides/best-practices/materializations/guides/best-practices/materializations/1-overview", + "destination": "/guides/best-practices/materializations/1-guide-overview", + "permanent": true + }, + { + "source": "/docs/deploy/understanding-state", + "destination": "/docs/deploy/about-state", + "permanent": true + }, + { + "source": "/guides/legacy/understanding-state", + "destination": "/docs/deploy/about-state", + "permanent": true + }, + { + "source": "/guides/migration/versions/Older%20versions/understanding-state", + "destination": "/docs/deploy/about-state", + "permanent": true + }, + { + "source": "/docs/collaborate/git/resolve-merge-conflicts", + "destination": "/docs/collaborate/git/merge-conflicts", + "permanent": true + }, + { + "source": "/docs/collaborate/environments", + "destination": "/docs/collaborate/environments/environments-in-dbt", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/running-dbt-in-production", + "destination": "/docs/deploy/deployments", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications", + "destination": "/docs/deploy/job-notifications", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud", + "destination": "/docs/develop/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/january-2020-pricing-updates", + "destination": "https://www.getdbt.com/pricing/", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise", + "destination": "https://www.getdbt.com/pricing/", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/archival", + "destination": "/docs/build/snapshots", + "permanent": true + }, + { + "source": "/docs/about/license", + "destination": "/community/resources/contributor-license-agreements", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository", + "destination": "/docs/collaborate/git/managed-repository", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/release-notes", + "destination": "/docs/dbt-versions/dbt-cloud-release-notes", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/audit-log", + "destination": "/docs/collaborate/manage-access/audit-log", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-setting-up-bigquery-oauth", + "destination": "/docs/collaborate/manage-access/set-up-bigquery-oauth", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth", + "destination": "/docs/collaborate/manage-access/set-up-snowflake-oauth", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta", + "destination": "/docs/collaborate/manage-access/set-up-sso-okta", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory", + "destination": "/docs/collaborate/manage-access/set-up-sso-azure-active-directory", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite", + "destination": "/docs/collaborate/manage-access/set-up-sso-google-workspace", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-saml-2.0", + "destination": "/docs/collaborate/manage-access/set-up-sso-saml-2.0", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/sso-overview", + "destination": "/docs/collaborate/manage-access/sso-overview", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/access-control/enterprise-permissions", + "destination": "/docs/collaborate/manage-access/enterprise-permissions", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/access-control/self-service-permissions", + "destination": "/docs/collaborate/manage-access/self-service-permissions", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/access-control/cloud-seats-and-users", + "destination": "/docs/collaborate/manage-access/seats-and-users", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/access-control/access-control-overview", + "destination": "/docs/collaborate/manage-access/about-access", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation", + "destination": "/docs/collaborate/build-and-view-your-docs", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/documentation", + "destination": "/docs/collaborate/documentation", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/managing-environments", + "destination": "/docs/collaborate/environments/environments-in-dbt", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url", + "destination": "/docs/collaborate/git/import-a-project-by-git-url", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/authenticate-azure", + "destination": "/docs/collaborate/git/authenticate-azure", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/setup-azure", + "destination": "/docs/collaborate/git/setup-azure", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-azure-devops", + "destination": "/docs/collaborate/git/connect-azure-devops", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-gitlab", + "destination": "/docs/collaborate/git/connect-gitlab", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-installing-the-github-application", + "destination": "/docs/collaborate/git/connect-github", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/setting-up", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/handling-merge-conflicts", + "destination": "/docs/collaborate/git/resolve-merge-conflicts", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/viewing-docs-in-the-ide", + "destination": "/docs/collaborate/cloud-build-and-view-your-docs", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories", + "destination": "/docs/collaborate/git/pr-template", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration", + "destination": "/docs/deploy/cloud-ci-job", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-dashboard-status-tiles", + "destination": "/docs/deploy/dashboard-status-tiles", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness", + "destination": "/docs/deploy/source-freshness", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-notifications", + "destination": "/docs/deploy/job-notifications", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule", + "destination": "/docs/deploy/job-triggers", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/deployments/airgapped-deployment", + "destination": "/docs/deploy/airgapped-deployment", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/deployments/single-tenant-deployment", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/deployments/multi-tenant-deployment", + "destination": "/docs/deploy/multi-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/deployments/deployment-architecture", + "destination": "/docs/deploy/architecture", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/deployments/deployment-overview", + "destination": "/docs/deploy/deployments", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name", + "destination": "/docs/build/custom-target-names", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/using-custom-aliases", + "destination": "/docs/build/custom-aliases", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/using-custom-databases", + "destination": "/docs/build/custom-databases", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/using-custom-schemas", + "destination": "/docs/build/custom-schemas", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposures", + "destination": "/docs/dbt-cloud-apis/metadata-schema-exposures", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-exposure", + "destination": "/docs/dbt-cloud-apis/metadata-schema-exposure", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-tests", + "destination": "/docs/dbt-cloud-apis/metadata-schema-tests", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-test", + "destination": "/docs/dbt-cloud-apis/metadata-schema-test", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-snapshots", + "destination": "/docs/dbt-cloud-apis/metadata-schema-snapshots", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seeds", + "destination": "/docs/dbt-cloud-apis/metadata-schema-seeds", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-seed", + "destination": "/docs/dbt-cloud-apis/metadata-schema-seed", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-sources", + "destination": "/docs/dbt-cloud-apis/metadata-schema-sources", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-source", + "destination": "/docs/dbt-cloud-apis/metadata-schema-source", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metrics", + "destination": "/docs/dbt-cloud-apis/metadata-schema-metrics", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-metric", + "destination": "/docs/dbt-cloud-apis/metadata-schema-metric", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-modelByEnv", + "destination": "/docs/dbt-cloud-apis/metadata-schema-modelByEnv", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-models", + "destination": "/docs/dbt-cloud-apis/metadata-schema-models", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/schema/metadata-schema-model", + "destination": "/docs/dbt-cloud-apis/metadata-schema-model", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-querying", + "destination": "/docs/dbt-cloud-apis/metadata-querying", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/metadata/metadata-overview", + "destination": "/docs/dbt-cloud-apis/metadata-api", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/admin-cloud-api", + "destination": "/docs/dbt-cloud-apis/admin-cloud-api", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/service-tokens", + "destination": "/docs/dbt-cloud-apis/service-tokens", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/user-tokens", + "destination": "/docs/dbt-cloud-apis/user-tokens", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-api/cloud-apis", + "destination": "/docs/dbt-cloud-apis/overview", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/hooks-operations", + "destination": "/docs/build/hooks-operations", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/analyses", + "destination": "/docs/build/analyses", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/package-management", + "destination": "/docs/build/packages", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-environment-variables", + "destination": "/docs/build/environment-variables", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/using-variables", + "destination": "/docs/build/project-variables", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/jinja-macros", + "destination": "/docs/build/jinja-macros", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/configuring-incremental-models", + "destination": "/docs/build/incremental-models", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/materializations", + "destination": "/docs/build/materializations", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/tests", + "destination": "/docs/build/tests", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/metrics", + "destination": "/docs/build/metrics", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/exposures", + "destination": "/docs/build/exposures", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/snapshots", + "destination": "/docs/build/snapshots", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/seeds", + "destination": "/docs/build/seeds", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models", + "destination": "/docs/build/sql-models", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/using-sources", + "destination": "/docs/build/sources", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/projects", + "destination": "/docs/build/projects", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/python-models", + "destination": "/docs/build/python-models", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/macros", + "destination": "/docs/guides/building-packages", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/setting-up", + "destination": "/docs/guides/building-packages", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-jinja-functions", + "destination": "/docs/guides/building-packages", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-upgrading-dbt-versions", + "destination": "/docs/dbt-versions/upgrade-core-in-cloud", + "permanent": true + }, + { + "source": "/docs/core-versions", + "destination": "/docs/dbt-versions/core", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-dbt-cloud-support", + "destination": "/docs/dbt-support", + "permanent": true + }, + { + "source": "/docs/about/viewpoint", + "destination": "/community/resources/viewpoint", + "permanent": true + }, + { + "source": "/docs/viewpoint", + "destination": "/community/resources/viewpoint", + "permanent": true + }, + { + "source": "/dbt-cli/configure-your-profile", + "destination": "/docs/get-started/connection-profiles", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-cli", + "destination": "/docs/get-started/about-the-cli", + "permanent": true + }, + { + "source": "/dbt-cli/install/from-source", + "destination": "/docs/get-started/source-install", + "permanent": true + }, + { + "source": "/dbt-cli/install/docker", + "destination": "/docs/get-started/docker-install", + "permanent": true + }, + { + "source": "/dbt-cli/install/pip", + "destination": "/docs/get-started/pip-install", + "permanent": true + }, + { + "source": "/dbt-cli/install/homebrew", + "destination": "/docs/get-started/homebrew-install", + "permanent": true + }, + { + "source": "/dbt-cli/install/overview", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/the-dbt-ide", + "destination": "/docs/get-started/dbt-cloud-features", + "permanent": true + }, + { + "source": "/((?!useful).*components)", + "destination": "https://github.com/dbt-labs/docs.getdbt.com/blob/current/contributing/adding-page-components.md", + "permanent": true + }, + { + "source": "/guides/legacy/managing-environments", + "destination": "/docs/building-a-dbt-project/managing-environments", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/dbt-api", + "destination": "/docs/introduction", + "permanent": true + }, + { + "source": "/img/docs/dbt-cloud/dbt-cloud-enterprise/icon.png", + "destination": "https://www.getdbt.com/ui/img/dbt-icon.png", + "permanent": true + }, + { + "source": "/dbt-cli/installation-guides/centos", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/dbt-cli/installation-guides/centos", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/dbt-cli/installation-guides/install-from-source", + "destination": "/dbt-cli/install/from-source", + "permanent": true + }, + { + "source": "/dbt-cli/installation-guides/macos", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/dbt-cli/installation-guides/ubuntu-debian", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/dbt-cli/installation-guides/windows", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/dbt-cli/installation", + "destination": "/docs/get-started/installation", + "permanent": true + }, + { + "source": "/dbt-jinja-functions", + "destination": "/reference/dbt-jinja-functions", + "permanent": true + }, + { + "source": "/docs", + "destination": "/docs/introduction", + "permanent": true + }, + { + "source": "/docs/adapter", + "destination": "/docs/writing-code-in-dbt/jinja-context/adapter", + "permanent": true + }, + { + "source": "/docs/analyses", + "destination": "/docs/building-a-dbt-project/analyses", + "permanent": true + }, + { + "source": "/docs/api-variable", + "destination": "/docs/writing-code-in-dbt/api-variable", + "permanent": true + }, + { + "source": "/docs/archival", + "destination": "/docs/building-a-dbt-project/archival", + "permanent": true + }, + { + "source": "/docs/artifacts", + "destination": "/docs/dbt-cloud/using-dbt-cloud/artifacts", + "permanent": true + }, + { + "source": "/docs/bigquery-configs", + "destination": "/reference/resource-configs/bigquery-configs", + "permanent": true + }, + { + "source": "/reference/resource-properties/docs", + "destination": "/reference/resource-configs/docs", + "permanent": true + }, + { + "source": "/reference/resource-properties/latest-version", + "destination": "/reference/resource-properties/latest_version", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/bigquery-configs", + "destination": "/reference/resource-configs/bigquery-configs", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/configuring-models", + "destination": "/reference/model-configs", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/enable-and-disable-models", + "destination": "/reference/resource-configs/enabled", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/redshift-configs", + "destination": "/reference/resource-configs/redshift-configs", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/snowflake-configs", + "destination": "/reference/resource-configs/snowflake-configs", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/spark-configs", + "destination": "/reference/resource-configs/spark-configs", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/tags", + "destination": "/reference/resource-configs/tags", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/using-sql-headers", + "destination": "/reference/resource-configs/sql_header", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-projects", + "destination": "/docs/building-a-dbt-project/projects", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-projects/configuring-query-comments", + "destination": "/reference/project-configs/query-comment", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-projects/configuring-quoting", + "destination": "/reference/project-configs/quoting", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-projects/creating-a-project", + "destination": "/docs/building-a-dbt-project/projects#creating-a-dbt-project", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions", + "destination": "/reference/project-configs/require-dbt-version", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-projects/use-an-existing-project", + "destination": "/docs/building-a-dbt-project/projects#using-an-existing-project", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/hooks", + "destination": "/docs/building-a-dbt-project/hooks-operations", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/testing-and-documentation", + "destination": "/docs/building-a-dbt-project/tests", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/testing-and-documentation/documentation", + "destination": "/docs/building-a-dbt-project/testing-and-documentation/documentation", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/testing-and-documentation/documentation-website", + "destination": "/docs/building-a-dbt-project/testing-and-documentation/documentation", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/testing-and-documentation/schemayml-files", + "destination": "/reference/declaring-properties", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/testing-and-documentation/testing", + "destination": "/docs/building-a-dbt-project/tests", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/using-operations", + "destination": "/docs/building-a-dbt-project/hooks-operations", + "permanent": true + }, + { + "source": "/docs/building-models", + "destination": "/docs/building-a-dbt-project/building-models", + "permanent": true + }, + { + "source": "/docs/building-packages", + "destination": "/guides/legacy/building-packages", + "permanent": true + }, + { + "source": "/docs/centos", + "destination": "/dbt-cli/installation", + "permanent": true + }, + { + "source": "/docs/clean", + "destination": "/reference/commands/clean", + "permanent": true + }, + { + "source": "/docs/cloud-choosing-a-dbt-version", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", + "permanent": true + }, + { + "source": "/docs/cloud-configuring-dbt-cloud", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud", + "permanent": true + }, + { + "source": "/docs/cloud-enabling-continuous-integration-with-github", + "destination": "/docs/deploy/cloud-ci-job", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration-with-github", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-enabling-continuous-integration", + "permanent": true + }, + { + "source": "/docs/cloud-generating-documentation", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-generating-documentation", + "permanent": true + }, + { + "source": "/docs/cloud-import-a-project-by-git-url", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-import-a-project-by-git-url", + "permanent": true + }, + { + "source": "/docs/cloud-installing-the-github-application", + "destination": "/docs/cloud/git/connect-github", + "permanent": true + }, + { + "source": "/docs/cloud-managing-permissions", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-managing-permissions", + "permanent": true + }, + { + "source": "/docs/cloud-overview", + "destination": "/docs/dbt-cloud/cloud-overview", + "permanent": true + }, + { + "source": "/docs/cloud-seats-and-users", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-seats-and-users", + "permanent": true + }, + { + "source": "/docs/cloud-setting-a-custom-target-name", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-setting-a-custom-target-name", + "permanent": true + }, + { + "source": "/docs/cloud-snapshotting-source-freshness", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-snapshotting-source-freshness", + "permanent": true + }, + { + "source": "/docs/cloud-supported-dbt-versions", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", + "permanent": true + }, + { + "source": "/docs/cloud-using-a-custom-cron-schedule", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-using-a-custom-cron-schedule", + "permanent": true + }, + { + "source": "/docs/cloud-using-a-managed-repository", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-using-a-managed-repository", + "permanent": true + }, + { + "source": "/docs/cmd-docs", + "destination": "/reference/commands/cmd-docs", + "permanent": true + }, + { + "source": "/docs/command-line-interface", + "destination": "/reference/dbt-commands", + "permanent": true + }, + { + "source": "/docs/compile", + "destination": "/reference/commands/compile", + "permanent": true + }, + { + "source": "/docs/config", + "destination": "/docs/writing-code-in-dbt/jinja-context/config", + "permanent": true + }, + { + "source": "/docs/configure-your-profile", + "destination": "/dbt-cli/configure-your-profile", + "permanent": true + }, + { + "source": "/docs/configuring-incremental-models", + "destination": "/docs/building-a-dbt-project/building-models/configuring-incremental-models", + "permanent": true + }, + { + "source": "/docs/configuring-models", + "destination": "/reference/model-configs", + "permanent": true + }, + { + "source": "/docs/configuring-query-comments", + "destination": "/docs/building-a-dbt-project/dbt-projects/configuring-query-comments", + "permanent": true + }, + { + "source": "/docs/configuring-quoting", + "destination": "/docs/building-a-dbt-project/dbt-projects/configuring-quoting", + "permanent": true + }, + { + "source": "/docs/configuring-resources-from-the-project-file", + "destination": "/docs/building-a-dbt-project/dbt-projects/configuring-resources-from-the-project-file", + "permanent": true + }, + { + "source": "/docs/connecting-your-database", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/connecting-your-database", + "permanent": true + }, + { + "source": "/docs/contributor-license-agreements", + "destination": "/docs/contributing/contributor-license-agreements", + "permanent": true + }, + { + "source": "/docs/creating-a-project", + "destination": "/docs/building-a-dbt-project/dbt-projects/creating-a-project", + "permanent": true + }, + { + "source": "/docs/creating-new-materializations", + "destination": "/guides/legacy/creating-new-materializations", + "permanent": true + }, + { + "source": "/docs/creating-date-partitioned-tables", + "destination": "/docs/guides/database-specific-guides/creating-date-partitioned-tables", + "permanent": true + }, + { + "source": "/docs/custom-schema-tests", + "destination": "/guides/legacy/writing-custom-generic-tests", + "permanent": true + }, + { + "source": "/docs/database-specific-guides", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/dbt-api", + "destination": "/docs/running-a-dbt-project/dbt-api", + "permanent": true + }, + { + "source": "/docs/dbt-cloud-enterprise", + "destination": "/docs/dbt-cloud/dbt-cloud-enterprise", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-repositories", + "destination": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-configuring-repositories", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-configuring-dbt-cloud/cloud-choosing-a-dbt-version", + "destination": "/docs/dbt-versions/upgrade-core-in-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions", + "destination": "/docs/dbt-cloud/access-control/enterprise-permissions", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/architecture", + "destination": "/dbt-cloud/on-premises/dependencies", + "permanent": true + }, + { + "source": "/docs/dbt-projects", + "destination": "/docs/building-a-dbt-project/dbt-projects", + "permanent": true + }, + { + "source": "/docs/dbt_projectyml-file", + "destination": "/docs/building-a-dbt-project/dbt-projects/dbt_projectyml-file", + "permanent": true + }, + { + "source": "/docs/debug", + "destination": "/reference/commands/debug", + "permanent": true + }, + { + "source": "/docs/debug-method", + "destination": "/docs/writing-code-in-dbt/jinja-context/debug-method", + "permanent": true + }, + { + "source": "/docs/deps", + "destination": "/reference/commands/deps", + "permanent": true + }, + { + "source": "/docs/doc", + "destination": "/docs/writing-code-in-dbt/jinja-context/doc", + "permanent": true + }, + { + "source": "/docs/documentation", + "destination": "/docs/building-a-dbt-project/documentation", + "permanent": true + }, + { + "source": "/docs/documentation-website", + "destination": "/docs/building-a-dbt-project/documentation", + "permanent": true + }, + { + "source": "/docs/dont-nest-your-curlies", + "destination": "/docs/building-a-dbt-project/dont-nest-your-curlies", + "permanent": true + }, + { + "source": "/docs/enable-and-disable-models", + "destination": "/reference/resource-configs/enabled", + "permanent": true + }, + { + "source": "/docs/enterprise-permissions", + "destination": "/docs/dbt-cloud/dbt-cloud-enterprise/enterprise-permissions", + "permanent": true + }, + { + "source": "/docs/env_var", + "destination": "/docs/writing-code-in-dbt/jinja-context/env_var", + "permanent": true + }, + { + "source": "/docs/exceptions", + "destination": "/docs/writing-code-in-dbt/jinja-context/exceptions", + "permanent": true + }, + { + "source": "/docs/execute", + "destination": "/docs/writing-code-in-dbt/jinja-context/execute", + "permanent": true + }, + { + "source": "/docs/exit-codes", + "destination": "/reference/exit-codes", + "permanent": true + }, + { + "source": "/docs/flags", + "destination": "/docs/writing-code-in-dbt/jinja-context/flags", + "permanent": true + }, + { + "source": "/docs/fromjson", + "destination": "/docs/writing-code-in-dbt/jinja-context/fromjson", + "permanent": true + }, + { + "source": "/docs/getting-started-with-jinja", + "destination": "/docs/building-a-dbt-project/jinja-macros", + "permanent": true + }, + { + "source": "/docs/global-cli-flags", + "destination": "/reference/global-cli-flags", + "permanent": true + }, + { + "source": "/docs/graph", + "destination": "/docs/writing-code-in-dbt/jinja-context/graph", + "permanent": true + }, + { + "source": "/docs/guides/building-packages", + "destination": "/guides/legacy/building-packages", + "permanent": true + }, + { + "source": "/docs/guides/creating-new-materializations", + "destination": "/guides/legacy/creating-new-materializations", + "permanent": true + }, + { + "source": "/docs/guides/debugging-errors", + "destination": "/guides/legacy/debugging-errors", + "permanent": true + }, + { + "source": "/docs/guides/debugging-schema-names", + "destination": "/guides/legacy/debugging-schema-names", + "permanent": true + }, + { + "source": "/docs/guides/getting-help", + "destination": "/guides/legacy/getting-help", + "permanent": true + }, + { + "source": "/docs/guides/managing-environments", + "destination": "/guides/legacy/managing-environments", + "permanent": true + }, + { + "source": "/docs/guides/navigating-the-docs", + "destination": "/guides/legacy/navigating-the-docs", + "permanent": true + }, + { + "source": "/docs/guides/understanding-state", + "destination": "/guides/legacy/understanding-state", + "permanent": true + }, + { + "source": "/docs/guides/videos", + "destination": "/guides/legacy/videos", + "permanent": true + }, + { + "source": "/docs/guides/writing-custom-generic-tests", + "destination": "/guides/legacy/writing-custom-generic-tests", + "permanent": true + }, + { + "source": "/docs/guides/writing-custom-schema-tests", + "destination": "/guides/legacy/writing-custom-generic-tests", + "permanent": true + }, + { + "source": "/docs/guides/best-practices#choose-your-materializations-wisely", + "destination": "/guides/legacy/best-practices#choose-your-materializations-wisely", + "permanent": true + }, + { + "source": "/docs/guides/best-practices#version-control-your-dbt-project", + "destination": "/guides/legacy/best-practices#version-control-your-dbt-project", + "permanent": true + }, + { + "source": "/docs/best-practices", + "destination": "/guides/legacy/best-practices", + "permanent": true + }, + { + "source": "/docs/guides/best-practices", + "destination": "/guides/best-practices", + "permanent": true + }, + { + "source": "/docs/hooks", + "destination": "/docs/building-a-dbt-project/hooks-operations", + "permanent": true + }, + { + "source": "/docs/init", + "destination": "/reference/commands/init", + "permanent": true + }, + { + "source": "/docs/install-from-source", + "destination": "/dbt-cli/installation", + "permanent": true + }, + { + "source": "/docs/installation", + "destination": "/docs/core/installation", + "permanent": true + }, + { + "source": "/docs/invocation_id", + "destination": "/docs/writing-code-in-dbt/jinja-context/invocation_id", + "permanent": true + }, + { + "source": "/docs/jinja-context", + "destination": "/docs/writing-code-in-dbt/jinja-context", + "permanent": true + }, + { + "source": "/docs/license", + "destination": "/docs/about/license", + "permanent": true + }, + { + "source": "/docs/list", + "destination": "/reference/commands/list", + "permanent": true + }, + { + "source": "/docs/log", + "destination": "/docs/writing-code-in-dbt/jinja-context/log", + "permanent": true + }, + { + "source": "/docs/macos", + "destination": "/dbt-cli/installation", + "permanent": true + }, + { + "source": "/docs/macros", + "destination": "/guides/legacy/building-packages", + "permanent": true + }, + { + "source": "/docs/maintaining-multiple-environments-with-dbt", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/managing-environments", + "destination": "/guides/legacy/managing-environments", + "permanent": true + }, + { + "source": "/docs/materializations", + "destination": "/docs/building-a-dbt-project/building-models/materializations", + "permanent": true + }, + { + "source": "/docs/model-selection-syntax", + "destination": "/reference/node-selection/syntax", + "permanent": true + }, + { + "source": "/docs/modules", + "destination": "/docs/writing-code-in-dbt/jinja-context/modules", + "permanent": true + }, + { + "source": "/docs/on-run-end-context", + "destination": "/docs/writing-code-in-dbt/jinja-context/on-run-end-context", + "permanent": true + }, + { + "source": "/docs/overview", + "destination": "/docs/introduction", + "permanent": true + }, + { + "source": "/docs/performance-optimization", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/package-management", + "destination": "/docs/building-a-dbt-project/package-management", + "permanent": true + }, + { + "source": "/docs/profile-bigquery", + "destination": "/reference/warehouse-profiles/bigquery-profile", + "permanent": true + }, + { + "source": "/docs/profile-mssql", + "destination": "/reference/warehouse-profiles/mssql-profile", + "permanent": true + }, + { + "source": "/docs/profile-postgres", + "destination": "/reference/warehouse-profiles/postgres-profile", + "permanent": true + }, + { + "source": "/docs/profile-presto", + "destination": "/reference/warehouse-profiles/presto-profile", + "permanent": true + }, + { + "source": "/docs/profile-redshift", + "destination": "/reference/warehouse-profiles/redshift-profile", + "permanent": true + }, + { + "source": "/docs/profile-snowflake", + "destination": "/reference/warehouse-profiles/snowflake-profile", + "permanent": true + }, + { + "source": "/docs/profile-spark", + "destination": "/reference/warehouse-profiles/spark-profile", + "permanent": true + }, + { + "source": "/docs/redshift-configs", + "destination": "/reference/resource-configs/redshift-configs", + "permanent": true + }, + { + "source": "/docs/spark-configs", + "destination": "/reference/resource-configs/spark-configs", + "permanent": true + }, + { + "source": "/docs/redshift-v2", + "destination": "/reference/warehouse-profiles/redshift-profile", + "permanent": true + }, + { + "source": "/docs/ref", + "destination": "/docs/writing-code-in-dbt/jinja-context/ref", + "permanent": true + }, + { + "source": "/docs/requiring-specific-dbt-versions", + "destination": "/docs/building-a-dbt-project/dbt-projects/requiring-specific-dbt-versions", + "permanent": true + }, + { + "source": "/docs/requiring-dbt-versions", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/return", + "destination": "/docs/writing-code-in-dbt/jinja-context/return", + "permanent": true + }, + { + "source": "/docs/rpc", + "destination": "/reference/commands/rpc", + "permanent": true + }, + { + "source": "/docs/run", + "destination": "/reference/commands/run", + "permanent": true + }, + { + "source": "/docs/run-operation", + "destination": "/reference/commands/run-operation", + "permanent": true + }, + { + "source": "/docs/run_query", + "destination": "/docs/writing-code-in-dbt/jinja-context/run_query", + "permanent": true + }, + { + "source": "/docs/run_started_at", + "destination": "/docs/writing-code-in-dbt/jinja-context/run_started_at", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface", + "destination": "/reference/dbt-commands", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/clean", + "destination": "/reference/commands/clean", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/cmd-docs", + "destination": "/reference/commands/cmd-docs", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/compile", + "destination": "/reference/commands/compile", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/debug", + "destination": "/reference/commands/debug", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/deps", + "destination": "/reference/commands/deps", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/exit-codes", + "destination": "/reference/exit-codes", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/global-cli-flags", + "destination": "/reference/global-cli-flags", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/init", + "destination": "/reference/commands/init", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/list", + "destination": "/reference/commands/list", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/model-selection-syntax", + "destination": "/reference/model-selection-syntax", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/rpc", + "destination": "/reference/commands/rpc", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/run", + "destination": "/reference/commands/run", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/run-operation", + "destination": "/reference/commands/run-operation", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/seed", + "destination": "/reference/commands/seed", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/snapshot", + "destination": "/reference/commands/snapshot", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/source", + "destination": "/reference/commands/source", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/test", + "destination": "/reference/commands/test", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/command-line-interface/version", + "destination": "/reference/global-cli-flags#version", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface", + "destination": "/docs/running-a-dbt-project/using-the-cli", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/centos", + "destination": "/dbt-cli/installation-guides/centos", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/configure-your-profile", + "destination": "/dbt-cli/configure-your-profile", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/install-from-source", + "destination": "/dbt-cli/installation-guides/install-from-source", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/installation", + "destination": "/dbt-cli/installation", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/macos", + "destination": "/dbt-cli/installation-guides/macos", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/ubuntu-debian", + "destination": "/dbt-cli/installation-guides/ubuntu-debian", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-command-line-interface/windows", + "destination": "/dbt-cli/installation-guides/windows", + "permanent": true + }, + { + "source": "/docs/running-dbt-in-production", + "destination": "/docs/running-a-dbt-project/running-dbt-in-production", + "permanent": true + }, + { + "source": "/docs/schema", + "destination": "/docs/writing-code-in-dbt/jinja-context/schema", + "permanent": true + }, + { + "source": "/docs/schemas", + "destination": "/docs/writing-code-in-dbt/jinja-context/schemas", + "permanent": true + }, + { + "source": "/docs/schemayml-files", + "destination": "/reference/declaring-properties", + "permanent": true + }, + { + "source": "/docs/seed", + "destination": "/reference/commands/seed", + "permanent": true + }, + { + "source": "/docs/seeds", + "destination": "/docs/building-a-dbt-project/seeds", + "permanent": true + }, + { + "source": "/docs/setting-up-enterprise-sso-with-azure-active-directory", + "destination": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-sso-with-azure-active-directory", + "permanent": true + }, + { + "source": "/docs/setting-up-snowflake-sso", + "destination": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-enterprise-snowflake-oauth", + "permanent": true + }, + { + "source": "/docs/setting-up-sso-with-google-gsuite", + "destination": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-google-gsuite", + "permanent": true + }, + { + "source": "/docs/setting-up-sso-with-okta", + "destination": "/docs/dbt-cloud/dbt-cloud-enterprise/setting-up-sso-with-okta", + "permanent": true + }, + { + "source": "/docs/snapshot", + "destination": "/reference/commands/snapshot", + "permanent": true + }, + { + "source": "/docs/snapshots", + "destination": "/docs/building-a-dbt-project/snapshots", + "permanent": true + }, + { + "source": "/docs/snowflake-configs", + "destination": "/reference/resource-configs/snowflake-configs", + "permanent": true + }, + { + "source": "/docs/source", + "destination": "/reference/commands/source", + "permanent": true + }, + { + "source": "/docs/statement-blocks", + "destination": "/docs/writing-code-in-dbt/jinja-context/statement-blocks", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-bigquery", + "destination": "/reference/bigquery-profile", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-mssql", + "destination": "/reference/mssql-profile", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-postgres", + "destination": "/reference/postgres-profile", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-presto", + "destination": "/reference/presto-profile", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-redshift", + "destination": "/reference/redshift-profile", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-snowflake", + "destination": "/reference/snowflake-profile", + "permanent": true + }, + { + "source": "/docs/supported-databases/profile-spark", + "destination": "/reference/spark-profile", + "permanent": true + }, + { + "source": "/docs/tags", + "destination": "/reference/resource-configs/tags", + "permanent": true + }, + { + "source": "/docs/target", + "destination": "/docs/writing-code-in-dbt/jinja-context/target", + "permanent": true + }, + { + "source": "/docs/test", + "destination": "/reference/commands/test", + "permanent": true + }, + { + "source": "/docs/testing", + "destination": "/docs/building-a-dbt-project/tests", + "permanent": true + }, + { + "source": "/docs/testing-and-documentation", + "destination": "/docs/building-a-dbt-project/tests", + "permanent": true + }, + { + "source": "/docs/the-dbt-ide", + "destination": "/docs/cloud/about-cloud/dbt-cloud-features", + "permanent": true + }, + { + "source": "/docs/this", + "destination": "/docs/writing-code-in-dbt/jinja-context/this", + "permanent": true + }, + { + "source": "/docs/tojson", + "destination": "/docs/writing-code-in-dbt/jinja-context/tojson", + "permanent": true + }, + { + "source": "/docs/ubuntu-debian", + "destination": "/dbt-cli/installation", + "permanent": true + }, + { + "source": "/docs/use-an-existing-project", + "destination": "/docs/building-a-dbt-project/dbt-projects/use-an-existing-project", + "permanent": true + }, + { + "source": "/docs/using-custom-aliases", + "destination": "/docs/building-a-dbt-project/building-models/using-custom-aliases", + "permanent": true + }, + { + "source": "/docs/using-custom-database", + "destination": "/docs/building-a-dbt-project/building-models/using-custom-databases", + "permanent": true + }, + { + "source": "/docs/using-custom-schemas", + "destination": "/docs/building-a-dbt-project/building-models/using-custom-schemas", + "permanent": true + }, + { + "source": "/docs/using-dbt-cloud", + "destination": "/docs/dbt-cloud/using-dbt-cloud", + "permanent": true + }, + { + "source": "/docs/using-jinja", + "destination": "/guides/getting-started/learning-more/using-jinja", + "permanent": true + }, + { + "source": "/docs/using-operations", + "destination": "/docs/building-a-dbt-project/hooks-operations", + "permanent": true + }, + { + "source": "/docs/using-sources", + "destination": "/docs/building-a-dbt-project/using-sources", + "permanent": true + }, + { + "source": "/docs/using-sql-headers", + "destination": "/reference/resource-configs/sql_header", + "permanent": true + }, + { + "source": "/docs/using-the-command-line-interface", + "destination": "/docs/running-a-dbt-project/using-the-cli", + "permanent": true + }, + { + "source": "/docs/using-the-dbt-ide", + "destination": "/docs/running-a-dbt-project/using-the-dbt-ide", + "permanent": true + }, + { + "source": "/docs/using-variables", + "destination": "/docs/building-a-dbt-project/building-models/using-variables", + "permanent": true + }, + { + "source": "/docs/var", + "destination": "/docs/writing-code-in-dbt/jinja-context/var", + "permanent": true + }, + { + "source": "/docs/version", + "destination": "/reference/global-cli-flags#version", + "permanent": true + }, + { + "source": "/docs/videos", + "destination": "/guides/legacy/videos", + "permanent": true + }, + { + "source": "/docs/warehouse-specific-configurations", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/windows", + "destination": "/dbt-cli/installation", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/api-variable", + "destination": "/", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/class-reference", + "destination": "/reference/dbt-classes", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/extending-dbts-programming-environment/creating-new-materializations", + "destination": "/guides/legacy/creating-new-materializations", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/extending-dbts-programming-environment/custom-schema-tests", + "destination": "/guides/legacy/writing-custom-schema-tests", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/getting-started-with-jinja", + "destination": "/docs/building-a-dbt-project/jinja-macros", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/adapter", + "destination": "/reference/dbt-jinja-functions/adapter", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/as_text", + "destination": "/reference/dbt-jinja-functions/as_text", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/builtins", + "destination": "/reference/dbt-jinja-functions/builtins", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/config", + "destination": "/reference/dbt-jinja-functions/config", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/dbt-project-yml-context", + "destination": "/reference/dbt-jinja-functions/dbt-project-yml-context", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/dbt_version", + "destination": "/reference/dbt-jinja-functions/dbt_version", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/debug-method", + "destination": "/reference/dbt-jinja-functions/debug-method", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/doc", + "destination": "/reference/dbt-jinja-functions/doc", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/env_var", + "destination": "/reference/dbt-jinja-functions/env_var", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/exceptions", + "destination": "/reference/dbt-jinja-functions/exceptions", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/execute", + "destination": "/reference/dbt-jinja-functions/execute", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/flags", + "destination": "/reference/dbt-jinja-functions/flags", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/fromjson", + "destination": "/reference/dbt-jinja-functions/fromjson", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/fromyaml", + "destination": "/reference/dbt-jinja-functions/fromyaml", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/graph", + "destination": "/reference/dbt-jinja-functions/graph", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/invocation_id", + "destination": "/reference/dbt-jinja-functions/invocation_id", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/log", + "destination": "/reference/dbt-jinja-functions/log", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/modules", + "destination": "/reference/dbt-jinja-functions/modules", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/on-run-end-context", + "destination": "/reference/dbt-jinja-functions/on-run-end-context", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/profiles-yml-context", + "destination": "/reference/dbt-jinja-functions/profiles-yml-context", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/project_name", + "destination": "/reference/dbt-jinja-functions/project_name", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/ref", + "destination": "/reference/dbt-jinja-functions/ref", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/return", + "destination": "/reference/dbt-jinja-functions/return", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/run_query", + "destination": "/reference/dbt-jinja-functions/run_query", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/run_started_at", + "destination": "/reference/dbt-jinja-functions/run_started_at", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/schema", + "destination": "/reference/dbt-jinja-functions/schema", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/schemas", + "destination": "/reference/dbt-jinja-functions/schemas", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/source", + "destination": "/reference/dbt-jinja-functions/source", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/statement-blocks", + "destination": "/reference/dbt-jinja-functions/statement-blocks", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/target", + "destination": "/reference/dbt-jinja-functions/target", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/this", + "destination": "/reference/dbt-jinja-functions/this", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/tojson", + "destination": "/reference/dbt-jinja-functions/tojson", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/toyaml", + "destination": "/reference/dbt-jinja-functions/toyaml", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/jinja-context/var", + "destination": "/reference/dbt-jinja-functions/var", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/macros", + "destination": "/docs/building-a-dbt-project/jinja-macros", + "permanent": true + }, + { + "source": "/docs/writing-code-in-dbt/using-jinja", + "destination": "/guides/getting-started/learning-more/using-jinja", + "permanent": true + }, + { + "source": "/faqs/getting-help", + "destination": "/guides/legacy/getting-help", + "permanent": true + }, + { + "source": "/migration-guide/upgrading-to-0-17-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/migration-guide/upgrading-to-0-18-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/reference", + "destination": "/", + "permanent": true + }, + { + "source": "/reference/accounts", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/api", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/bigquery-profile", + "destination": "/reference/warehouse-profile/bigquery-profile", + "permanent": true + }, + { + "source": "/reference/connections", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/data-test-configs", + "destination": "/reference/test-configs", + "permanent": true + }, + { + "source": "/reference/declaring-properties", + "destination": "/reference/configs-and-properties", + "permanent": true + }, + { + "source": "/reference/dbt-artifacts", + "destination": "/reference/artifacts/dbt-artifacts", + "permanent": true + }, + { + "source": "/reference/environments", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/events", + "destination": "/reference/events-logging", + "permanent": true + }, + { + "source": "/reference/jobs", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/model-selection-syntax", + "destination": "/reference/node-selection/syntax", + "permanent": true + }, + { + "source": "/reference/project-configs/on-run-end", + "destination": "/reference/project-configs/on-run-start-on-run-end", + "permanent": true + }, + { + "source": "/reference/project-configs/on-run-start", + "destination": "/reference/project-configs/on-run-start-on-run-end", + "permanent": true + }, + { + "source": "/reference/repositories", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/resource-configs/post-hook", + "destination": "/reference/resource-configs/pre-hook-post-hook", + "permanent": true + }, + { + "source": "/reference/resource-configs/pre-hook", + "destination": "/reference/resource-configs/pre-hook-post-hook", + "permanent": true + }, + { + "source": "/reference/resource-properties/tags", + "destination": "/reference/resource-configs/tags", + "permanent": true + }, + { + "source": "/reference/resource-properties/meta", + "destination": "/reference/resource-configs/meta", + "permanent": true + }, + { + "source": "/reference/runs", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/using-the-dbt-cloud-api", + "destination": "/dbt-cloud/api", + "permanent": true + }, + { + "source": "/reference/model-selection-syntax/#test-selection-examples", + "destination": "/reference/node-selection/test-selection-examples", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/building-models/using-custom-database", + "destination": "/docs/building-a-dbt-project/building-models/using-custom-databases", + "permanent": true + }, + { + "source": "/dbt-cloud/api", + "destination": "/dbt-cloud/api-v2", + "permanent": true + }, + { + "source": "/dbt-cloud/api-v2-old", + "destination": "/dbt-cloud/api-v2-legacy", + "permanent": true + }, + { + "source": "/dbt-cloud/api-v4", + "destination": "/docs/dbt-cloud-apis/admin-cloud-api", + "permanent": true + }, + { + "source": "/reference/project-configs/source-paths", + "destination": "/reference/project-configs/model-paths", + "permanent": true + }, + { + "source": "/reference/project-configs/data-paths", + "destination": "/reference/project-configs/seed-paths", + "permanent": true + }, + { + "source": "/reference/project-configs/modules-paths", + "destination": "/reference/project-configs/packages-install-path", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-slack-notifications", + "destination": "/docs/dbt-cloud/using-dbt-cloud/cloud-notifications", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/presto-profile", + "destination": "/reference/profiles.yml", + "permanent": true + }, + { + "source": "/setting-up", + "destination": "/guides/getting-started/getting-set-up/setting-up-bigquery", + "permanent": true + }, + { + "source": "/tutorial/setting-up", + "destination": "/quickstarts", + "permanent": true + }, + { + "source": "/tutorial/test-and-document-your-project", + "destination": "/guides/getting-started/building-your-first-project/test-and-document-your-project", + "permanent": true + }, + { + "source": "/tutorial/build-your-first-models", + "destination": "/guides/getting-started/building-your-first-project/build-your-first-models", + "permanent": true + }, + { + "source": "/tutorial/deploy-your-project", + "destination": "/guides/getting-started/building-your-first-project/schedule-a-job", + "permanent": true + }, + { + "source": "/tutorial/using-jinja", + "destination": "/guides/getting-started/learning-more/using-jinja", + "permanent": true + }, + { + "source": "/tutorial/2b-create-a-project-dbt-cli", + "destination": "/guides/getting-started/learning-more/getting-started-dbt-core", + "permanent": true + }, + { + "source": "/tutorial/create-a-project-dbt-cli", + "destination": "/guides/getting-started/learning-more/getting-started-dbt-core", + "permanent": true + }, + { + "source": "/tutorial/2a-create-a-project-dbt-cloud", + "destination": "/guides/getting-started", + "permanent": true + }, + { + "source": "/tutorial/create-a-project-dbt-cloud", + "destination": "/guides/getting-started", + "permanent": true + }, + { + "source": "/tutorial/getting-started", + "destination": "/guides/getting-started", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-changelog", + "destination": "/docs/dbt-cloud/release-notes", + "permanent": true + }, + { + "source": "/faqs/all", + "destination": "/docs/faqs", + "permanent": true + }, + { + "source": "/faqs/:slug", + "destination": "/docs/faqs/:slug*", + "permanent": true + }, + { + "source": "/faqs/dbt-jinja-functions", + "destination": "/reference/dbt-jinja-functions", + "permanent": true + }, + { + "source": "/tutorial/learning-more/:slug", + "destination": "/guides/getting-started/learning-more/:slug*", + "permanent": true + }, + { + "source": "/tutorial/getting-set-up/:slug", + "destination": "/guides/getting-started/getting-set-up/:slug*", + "permanent": true + }, + { + "source": "/tutorial/building-your-first-project/:slug", + "destination": "/guides/getting-started/building-your-first-project/:slug*", + "permanent": true + }, + { + "source": "/tutorial/refactoring-legacy-sql", + "destination": "/guides/migration/tools/refactoring-legacy-sql", + "permanent": true + }, + { + "source": "/blog/change-data-capture-metrics", + "destination": "/blog/change-data-capture", + "permanent": true + }, + { + "source": "/blog/intelligent-slim-ci", + "destination": "/docs/deploy/continuous-integration", + "permanent": true + }, + { + "source": "/blog/model-timing-tab", + "destination": "/blog/how-we-shaved-90-minutes-off-model", + "permanent": true + }, + { + "source": "/reference/warehouse-setups/resource-configs/materialize-configs/indexes", + "destination": "/reference/resource-configs/materialize-configs#indexes", + "permanent": true + }, + { + "source": "/docs/build/building-models", + "destination": "/docs/build/models", + "permanent": true + }, + { + "source": "/docs/build/bigquery-profile", + "destination": "/reference/resource-configs/bigquery-configs", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/bigquery-setup", + "destination": "/reference/warehouse-setups/bigquery-setup", + "permanent": true + }, + { + "source": "/date-trunc-sql", + "destination": "/blog/date-trunc-sql", + "permanent": true + }, + { + "source": "/docs/using-hooks", + "destination": "/", + "permanent": true + }, + { + "source": "/blog/how-we-structure-our-dbt-projects", + "destination": "/guides/best-practices/how-we-structure/1-guide-overview", + "permanent": true + }, + { + "source": "/data-testing-why-you-need-it-and-how-to-get-started", + "destination": "https://www.getdbt.com/blog/data-quality-testing/", + "permanent": true + }, + { + "source": "/docs/profile", + "destination": "/docs/supported-data-platforms", + "permanent": true + }, + { + "source": "/docs/available-adapters", + "destination": "/docs/supported-data-platforms", + "permanent": true + }, + { + "source": "/docs/supported-databases", + "destination": "/docs/supported-data-platforms", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-14-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-15-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-16-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-17-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-18-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-19-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-from-0-10-to-0-11", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-014", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/upgrading-to-014", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/upgrading-to-0-14-1", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/upgrading-to-0-16-0", + "destination": "/guides/migration/versions", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-20-0", + "destination": "/guides/migration/versions/upgrading-to-v0.20", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-0-21-0", + "destination": "/guides/migration/versions/upgrading-to-v0.21", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-1-0-0", + "destination": "/guides/migration/versions/upgrading-to-v1.0", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/upgrading-to-v1.0", + "destination": "/guides/migration/versions/upgrading-to-v1.0", + "permanent": true + }, + { + "source": "/docs/guides/getting-help", + "destination": "/guides/legacy/getting-help", + "permanent": true + }, + { + "source": "/docs/guides/migration-guide/:slug", + "destination": "/guides/migration/versions/:slug*", + "permanent": true + }, + { + "source": "/docs/guides/:slug", + "destination": "/guides/legacy/:slug*", + "permanent": true + }, + { + "source": "/guides/best-practices/environment-setup/1-env-guide-overview", + "destination": "/guides/orchestration/set-up-ci/overview", + "permanent": true + }, + { + "source": "/guides/best-practices/environment-setup/2-one-deployment-environment", + "destination": "/guides/orchestration/set-up-ci/in-15-minutes", + "permanent": true + }, + { + "source": "/guides/best-practices/environment-setup/3-many-deployment-environments", + "destination": "/guides/orchestration/set-up-ci/multiple-environments", + "permanent": true + }, + { + "source": "/docs/contributing/what-are-adapters", + "destination": "/guides/advanced/adapter-development/1-what-are-adapters", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/1-what-are-adapters", + "destination": "/guides/advanced/adapter-development/1-what-are-adapters", + "permanent": true + }, + { + "source": "/docs/contributing/prerequisites-for-a-new-adapter", + "destination": "/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/2-prerequisites-for-a-new-adapter", + "destination": "/guides/advanced/adapter-development/2-prerequisites-for-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/building-a-new-adapter", + "destination": "/guides/advanced/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/3-building-a-new-adapter", + "destination": "/guides/advanced/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/v0.13/docs/building-a-new-adapter", + "destination": "/guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/building-a-new-adapter", + "destination": "/guides/advanced/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/testing-a-new-adapter", + "destination": "/guides/advanced/adapter-development/4-testing-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/4-testing-a-new-adapter", + "destination": "/guides/advanced/adapter-development/4-testing-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/documenting-a-new-adapter", + "destination": "/guides/advanced/adapter-development/5-documenting-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/5-documenting-a-new-adapter", + "destination": "/guides/advanced/adapter-development/5-documenting-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/promoting-a-new-adapter", + "destination": "/guides/advanced/adapter-development/6-promoting-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/6-promoting-a-new-adapter", + "destination": "/guides/advanced/adapter-development/6-promoting-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/verifying-a-new-adapter", + "destination": "/guides/advanced/adapter-development/7-verifying-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/contributing/adapter-development/7-verifying-a-new-adapter", + "destination": "/guides/advanced/adapter-development/7-verifying-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/using-dbt-cloud/cloud-metrics-layer", + "destination": "/docs/use-dbt-semantic-layer/dbt-semantic-layer", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/impala-profile", + "destination": "/reference/warehouse-setups/impala-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/exasol-profile", + "destination": "/reference/warehouse-setups/exasol-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/layer-profile", + "destination": "/reference/warehouse-setups/layer-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/postgres-profile", + "destination": "/reference/warehouse-setups/postgres-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/greenplum-profile", + "destination": "/reference/warehouse-setups/greenplum-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/alloydb-profile", + "destination": "/reference/warehouse-setups/alloydb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/azuresynapse-profile", + "destination": "/reference/warehouse-setups/azuresynapse-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/snowflake-profile", + "destination": "/reference/warehouse-setups/snowflake-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/rockset-profile", + "destination": "/reference/warehouse-setups/rockset-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/trino-profile", + "destination": "/reference/warehouse-setups/trino-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/glue-profile", + "destination": "/reference/warehouse-setups/glue-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/duckdb-profile", + "destination": "/reference/warehouse-setups/duckdb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/vertica-profile", + "destination": "/reference/warehouse-setups/vertica-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/clickhouse-profile", + "destination": "/reference/warehouse-setups/clickhouse-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/athena-profile", + "destination": "/reference/warehouse-setups/athena-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/iomete-profile", + "destination": "/reference/warehouse-setups/iomete-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/mssql-profile", + "destination": "/reference/warehouse-setups/mssql-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/tidb-profile", + "destination": "/reference/warehouse-setups/tidb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/materialize-profile", + "destination": "/reference/warehouse-setups/materialize-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/redshift-profile", + "destination": "/reference/warehouse-setups/redshift-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/databricks-profile", + "destination": "/reference/warehouse-setups/databricks-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/bigquery-profile", + "destination": "/reference/warehouse-setups/bigquery-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/dremio-profile", + "destination": "/reference/warehouse-setups/dremio-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/oracle-profile", + "destination": "/reference/warehouse-setups/oracle-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/teradata-profile", + "destination": "/reference/warehouse-setups/teradata-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/singlestore-profile", + "destination": "/reference/warehouse-setups/singlestore-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/sqlite-profile", + "destination": "/reference/warehouse-setups/sqlite-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/spark-profile", + "destination": "/reference/warehouse-setups/spark-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/mindsdb-profile", + "destination": "/reference/warehouse-setups/mindsdb-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/ibmdb2-profile", + "destination": "/reference/warehouse-setups/ibmdb2-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/firebolt-profile", + "destination": "/reference/warehouse-setups/firebolt-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/mysql-profile", + "destination": "/reference/warehouse-setups/mysql-setup", + "permanent": true + }, + { + "source": "/reference/warehouse-profiles/hive-profile", + "destination": "/reference/warehouse-setups/hive-setup", + "permanent": true + }, + { + "source": "/reference/using-sources", + "destination": "/docs/build/sources", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/the-dbt-ide", + "destination": "/docs/getting-started/dbt-cloud-features", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/handling-merge-conflicts", + "destination": "/docs/collaborate/git/resolve-merge-conflicts", + "permanent": true + }, + { + "source": "/dbt-cloud/cloud-ide/viewing-docs-in-the-ide", + "destination": "/docs/getting-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/cloud-ide/ide-beta", + "destination": "/docs/getting-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/docs/running-a-dbt-project/using-the-dbt-ide", + "destination": "/docs/getting-started/develop-in-the-cloud", + "permanent": true + }, + { + "source": "/dbt-cloud/cloud-ide/the-ide-git-button", + "destination": "/docs/collaborate/git/version-control-basics", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/setting-up", + "destination": "/guides/legacy/building-packages", + "permanent": true + }, + { + "source": "/docs/building-a-dbt-project/dbt-jinja-functions", + "destination": "/reference/dbt-jinja-functions", + "permanent": true + }, + { + "source": "/docs/contributing/long-lived-discussions-guidelines", + "destination": "/community/resources/forum-guidelines", + "permanent": true + }, + { + "source": "/docs/guides/legacy/navigating-the-docs.md", + "destination": "/community/contribute", + "permanent": true + }, + { + "source": "/community/writing-on-discourse", + "destination": "/community/contributing/contributing-online-community", + "permanent": true + }, + { + "source": "/community/contributing", + "destination": "/community/contribute", + "permanent": true + }, + { + "source": "/docs/contributing/contributor-license-agreements", + "destination": "/community/resources/contributor-license-agreements", + "permanent": true + }, + { + "source": "/community/maintaining-a-channel", + "destination": "/community/resources/maintaining-a-channel", + "permanent": true + }, + { + "source": "/docs/contributing/oss-expectations", + "destination": "/community/resources/oss-expectations", + "permanent": true + }, + { + "source": "/docs/slack-rules-of-the-road", + "destination": "/community/resources/community-rules-of-the-road", + "permanent": true + }, + { + "source": "/docs/contributing/slack-rules-of-the-road", + "destination": "/community/resources/community-rules-of-the-road", + "permanent": true + }, + { + "source": "/community/resources/slack-rules-of-the-road", + "destination": "/community/resources/community-rules-of-the-road", + "permanent": true + }, + { + "source": "/blog/getting-started-with-the-dbt-semantic-layer", + "destination": "/blog/understanding-the-components-of-the-dbt-semantic-layer", + "permanent": true + }, + { + "source": "/docs/getting-started/develop-in-the-cloud#creating-a-development-environment", + "destination": "/docs/get-started/develop-in-the-cloud#set-up-and-access-the-cloud-ide", + "permanent": true + }, + { + "source": "/docs/cloud-developer-ide", + "destination": "/docs/build/custom-target-names#dbt-cloud-ide", + "permanent": true + }, + { + "source": "/website/docs/docs/contributing/building-a-new-adapter.md", + "destination": "/guides/dbt-ecosystem/adapter-development/3-building-a-new-adapter", + "permanent": true + }, + { + "source": "/guides/legacy/getting-help", + "destination": "/community/resources/getting-help", + "permanent": true + }, + { + "source": "/blog/tags/release-notes", + "destination": "/docs/dbt-versions/dbt-cloud-release-notes", + "permanent": true + }, + { + "source": "/faqs/dbt-jinja-functions", + "destination": "/reference/dbt-jinja-functions", + "permanent": true + }, + { + "source": "/website/docs/docs/contributing/documenting-a-new-adapter.md", + "destination": "/guides/dbt-ecosystem/adapter-development/5-documenting-a-new-adapter", + "permanent": true + }, + { + "source": "/docs/docs/contributing/documenting-a-new-adapter", + "destination": "/docs/contributing/documenting-a-new-adapter", + "permanent": true + }, + { + "source": "/v0.8/reference", + "destination": "/", + "permanent": true + }, + { + "source": "/v0.10/reference", + "destination": "/", + "permanent": true + }, + { + "source": "/v0.12/reference", + "destination": "/", + "permanent": true + }, + { + "source": "/v0.13/reference", + "destination": "/", + "permanent": true + }, + { + "source": "/v0.13/docs/requiring-dbt-versions", + "destination": "/", + "permanent": true + }, + { + "source": "/v0.14/docs/cloud-developer-ide", + "destination": "/", + "permanent": true + }, + { + "source": "/v0.15/docs/cloud-import-a-project-by-git-url", + "destination": "/docs/cloud/git/import-a-project-by-git-url", + "permanent": true + }, + { + "source": "/v0.15/docs/configure-your-profile", + "destination": "/docs/core/connection-profiles", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/dependencies", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/faqs", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/index", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/installation", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/prerequisites", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/setup", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/system-requirements", + "destination": "/docs/deploy/single-tenant", + "permanent": true + }, + { + "source": "/docs/dbt-cloud/on-premises/upgrading-kots", + "destination": "/docs/deploy/single-tenant", + "permanent": true + } + ] +}