Files
qmi-cloud-tf-modules/databases/dbricks/main.tf
2022-10-28 12:53:46 +02:00

138 lines
2.8 KiB
HCL

terraform {
required_version = ">= 0.14"
required_providers {
databricks = {
source = "databricks/databricks"
version = ">= 1.6.0"
}
}
}
/*resource "databricks_workspace_conf" "this" {
custom_config = {
"enableIpAccessLists" : true
}
}
resource "databricks_ip_access_list" "allowed-list" {
label = "allow_in"
list_type = "ALLOW"
ip_addresses = [
"52.249.189.38/32",
"13.67.39.86/32",
"20.67.110.207/32",
"14.98.59.168/29",
"182.74.33.8/29",
"188.65.156.32/28",
"212.73.252.96/29",
"194.90.96.176/29",
"213.57.84.160/29",
"4.4.97.104/29",
"206.196.17.32/27",
#QCS
"18.205.71.36/32",
"18.232.32.199/32",
"34.237.68.254/32",
"34.247.21.179/32",
"52.31.212.214/32",
"54.154.95.18/32",
"13.210.43.241/32",
"13.236.104.42/32",
"13.236.206.172/32",
"18.138.163.172/32",
"18.142.157.182/32",
"54.179.13.251/32"
]
}*/
data "databricks_group" "admins" {
display_name = "admins"
}
resource "databricks_user" "aor" {
count = lower(var.user_email) != "aor@qlik.com"? 1 : 0
allow_cluster_create = true
databricks_sql_access = true
user_name = "aor@qlik.com"
display_name = "Manuel Romero"
}
resource "databricks_user" "me" {
allow_cluster_create = true
databricks_sql_access = true
user_name = var.user_email
display_name = var.user_id
}
resource "databricks_group_member" "i-am-admin" {
group_id = data.databricks_group.admins.id
member_id = databricks_user.me.id
}
resource "databricks_group_member" "i-am-admin-aor" {
count = lower(var.user_email) != "aor@qlik.com"? 1 : 0
group_id = data.databricks_group.admins.id
member_id = databricks_user.aor[0].id
}
resource "databricks_sql_endpoint" "sqlep" {
count = (var.sku == "premium")? 1 : 0
name = "qmi-sqlwh-${databricks_user.me.id}"
cluster_size = "Small"
max_num_clusters = 1
}
resource "databricks_token" "pat" {
depends_on = [
databricks_user.me,
databricks_user.aor
]
comment = "qmi"
lifetime_seconds = 8640000
}
resource "databricks_cluster" "dbrickscluster" {
count = (var.cluster_name != null)? 1 : 0
cluster_name = "cluster-${var.cluster_name}"
spark_version = var.spark_version
node_type_id = var.node_type_id
spark_conf = {
"spark.hadoop.hive.server2.enable.doAs": false,
"spark.databricks.delta.preview.enabled": true,
"spark.databricks.repl.allowedLanguages": "sql,python,r",
"spark.databricks.cluster.profile": "serverless",
"fs.azure.account.key.${var.storage_account_name}.dfs.core.windows.net": var.storage_account_accesskey
}
spark_env_vars = {
"PYSPARK_PYTHON": "/databricks/python3/bin/python3"
}
autoscale {
min_workers = 1
max_workers = 4
}
autotermination_minutes = 120
}