diff --git a/website/docs/docs/core/connect-data-platform/spark-setup.md b/website/docs/docs/core/connect-data-platform/spark-setup.md
index 93595cea3f6..9d9e0c9d5fb 100644
--- a/website/docs/docs/core/connect-data-platform/spark-setup.md
+++ b/website/docs/docs/core/connect-data-platform/spark-setup.md
@@ -20,10 +20,6 @@ meta:
-:::note
-See [Databricks setup](#databricks-setup) for the Databricks version of this page.
-:::
-
import SetUpPages from '/snippets/_setup-pages-intro.md';
@@ -204,6 +200,7 @@ connect_retries: 3
+
### Server side configuration
Spark can be customized using [Application Properties](https://spark.apache.org/docs/latest/configuration.html). Using these properties the execution can be customized, for example, to allocate more memory to the driver process. Also, the Spark SQL runtime can be set through these properties. For example, this allows the user to [set a Spark catalogs](https://spark.apache.org/docs/latest/configuration.html#spark-sql).
diff --git a/website/sidebars.js b/website/sidebars.js
index a82b2e06ec2..23a58360bbc 100644
--- a/website/sidebars.js
+++ b/website/sidebars.js
@@ -135,7 +135,6 @@ const sidebarSettings = {
"docs/cloud/secure/redshift-privatelink",
"docs/cloud/secure/postgres-privatelink",
"docs/cloud/secure/vcs-privatelink",
- "docs/cloud/secure/ip-restrictions",
],
}, // PrivateLink
"docs/cloud/billing",
diff --git a/website/snippets/dbt-databricks-for-databricks.md b/website/snippets/dbt-databricks-for-databricks.md
index f1c5ec84af1..1e18da33d42 100644
--- a/website/snippets/dbt-databricks-for-databricks.md
+++ b/website/snippets/dbt-databricks-for-databricks.md
@@ -1,4 +1,5 @@
:::info If you're using Databricks, use `dbt-databricks`
-If you're using Databricks, the `dbt-databricks` adapter is recommended over `dbt-spark`.
-If you're still using dbt-spark with Databricks consider [migrating from the dbt-spark adapter to the dbt-databricks adapter](/guides/migrate-from-spark-to-databricks).
+If you're using Databricks, the `dbt-databricks` adapter is recommended over `dbt-spark`. If you're still using dbt-spark with Databricks consider [migrating from the dbt-spark adapter to the dbt-databricks adapter](/guides/migrate-from-spark-to-databricks).
+
+For the Databricks version of this page, refer to [Databricks setup](#databricks-setup).
:::
diff --git a/website/snippets/warehouse-setups-cloud-callout.md b/website/snippets/warehouse-setups-cloud-callout.md
index 3bc1147a637..56edd3a96ea 100644
--- a/website/snippets/warehouse-setups-cloud-callout.md
+++ b/website/snippets/warehouse-setups-cloud-callout.md
@@ -1,3 +1,3 @@
-:::info `profiles.yml` file is for CLI users only
-If you're using dbt Cloud, you don't need to create a `profiles.yml` file. This file is only for CLI users. To connect your data platform to dbt Cloud, refer to [About data platforms](/docs/cloud/connect-data-platform/about-connections).
+:::info `profiles.yml` file is for dbt Core users only
+If you're using dbt Cloud, you don't need to create a `profiles.yml` file. This file is only for dbt Core users. To connect your data platform to dbt Cloud, refer to [About data platforms](/docs/cloud/connect-data-platform/about-connections).
:::