-
Notifications
You must be signed in to change notification settings - Fork 138
/
Copy pathworkspace.tf
60 lines (56 loc) · 2.58 KB
/
workspace.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
resource "azurerm_databricks_workspace" "this" {
name = "${local.prefix}-workspace"
resource_group_name = azurerm_resource_group.this.name
location = azurerm_resource_group.this.location
sku = "premium"
tags = local.tags
customer_managed_key_enabled = true
custom_parameters {
virtual_network_id = azurerm_virtual_network.this.id
private_subnet_name = azurerm_subnet.private.name
public_subnet_name = azurerm_subnet.public.name
public_subnet_network_security_group_association_id = azurerm_subnet_network_security_group_association.public.id
private_subnet_network_security_group_association_id = azurerm_subnet_network_security_group_association.private.id
storage_account_name = local.dbfsname
}
# We need this, otherwise destroy doesn't cleanup things correctly
depends_on = [
azurerm_subnet_network_security_group_association.public,
azurerm_subnet_network_security_group_association.private
]
}
resource "databricks_cluster" "coldstart" {
cluster_name = "cluster - external metastore"
spark_version = data.databricks_spark_version.latest_lts.id
node_type_id = var.node_type
data_security_mode = "SINGLE_USER"
single_user_name = local.my_username
autotermination_minutes = 30
autoscale {
min_workers = 1
max_workers = 1
}
spark_conf = {
"spark.hadoop.javax.jdo.option.ConnectionDriverName" : "com.microsoft.sqlserver.jdbc.SQLServerDriver",
"spark.hadoop.javax.jdo.option.ConnectionURL" : "{{secrets/hive/HIVE-URL}}",
"spark.hadoop.metastore.catalog.default" : "hive",
"spark.databricks.delta.preview.enabled" : true,
"spark.hadoop.javax.jdo.option.ConnectionUserName" : "{{secrets/hive/HIVE-USER}}",
"datanucleus.fixedDatastore" : true,
"spark.hadoop.javax.jdo.option.ConnectionPassword" : "{{secrets/hive/HIVE-PASSWORD}}",
"datanucleus.autoCreateSchema" : false,
"spark.sql.hive.metastore.jars" : "/dbfs/tmp/hive/3-1-0/lib/*",
"spark.sql.hive.metastore.version" : "3.1.0",
}
spark_env_vars = {
"HIVE_PASSWORD" = "{{secrets/hive/HIVE-PASSWORD}}",
"HIVE_USER" = "{{secrets/hive/HIVE-USER}}",
"HIVE_URL" = "{{secrets/hive/HIVE-URL}}",
}
depends_on = [
databricks_secret_scope.kv,
azurerm_key_vault_secret.hiveuser,
azurerm_key_vault_secret.hivepwd,
azurerm_key_vault_secret.hiveurl
]
}