Files
2025-07-11 11:14:50 +02:00

146 lines
3.6 KiB
HCL

terraform {
required_version = ">= 0.14"
required_providers {
databricks = {
source = "databricks/databricks"
version = ">= 1.13.0"
}
}
}
locals {
cluster_nameUpper = upper(var.cluster_name)
}
/*resource "databricks_workspace_conf" "this" {
custom_config = {
"enableIpAccessLists" : true
}
}
*/
data "databricks_group" "admins" {
display_name = "admins"
}
resource "databricks_user" "aor" {
count = lower(var.user_email) != "aor@qlik.com"? 1 : 0
allow_cluster_create = true
databricks_sql_access = true
user_name = "aor@qlik.com"
display_name = "Manuel Romero"
}
resource "databricks_user" "me" {
allow_cluster_create = true
databricks_sql_access = true
user_name = var.user_email
display_name = var.user_id
}
resource "databricks_group_member" "i-am-admin" {
group_id = data.databricks_group.admins.id
member_id = databricks_user.me.id
}
resource "databricks_group_member" "i-am-admin-aor" {
count = lower(var.user_email) != "aor@qlik.com"? 1 : 0
group_id = data.databricks_group.admins.id
member_id = databricks_user.aor[0].id
}
resource "databricks_secret_scope" "scopeapp" {
count = (var.sku == "premium")? 1 : 0
name = "${var.cluster_name}scope"
}
resource "databricks_secret" "publishing_api" {
count = (var.sku == "premium")? 1 : 0
key = "accesskey"
string_value = var.app_reg_secret
scope = databricks_secret_scope.scopeapp[0].name
}
resource "databricks_sql_endpoint" "sqlep" {
count = (var.sku == "premium")? 1 : 0
name = "${local.cluster_nameUpper} SQL warehouse"
cluster_size = var.cluster_size
max_num_clusters = 1
}
resource "databricks_sql_global_config" "this" {
count = (var.sku == "premium")? 1 : 0
security_policy = "DATA_ACCESS_CONTROL"
data_access_config = {
"spark.hadoop.fs.azure.account.auth.type.${var.storage_account_name}.dfs.core.windows.net" : "OAuth",
"spark.hadoop.fs.azure.account.oauth.provider.type.${var.storage_account_name}.dfs.core.windows.net": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
"spark.hadoop.fs.azure.account.oauth2.client.id.${var.storage_account_name}.dfs.core.windows.net" : var.app_reg_id,
"spark.hadoop.fs.azure.account.oauth2.client.secret.${var.storage_account_name}.dfs.core.windows.net" : "{{secrets/${var.cluster_name}scope/accesskey}}",
"spark.hadoop.fs.azure.account.oauth2.client.endpoint.${var.storage_account_name}.dfs.core.windows.net": "https://login.microsoftonline.com/c21eeb5f-f5a6-44e8-a997-124f2f7a497c/oauth2/token"
}
enable_serverless_compute = false
sql_config_params = {}
}
resource "databricks_token" "pat" {
depends_on = [
databricks_user.me,
databricks_user.aor
]
comment = "qmi"
lifetime_seconds = 8640000
}
resource "databricks_cluster" "dbrickscluster" {
count = (var.cluster_name != null)? 1 : 0
cluster_name = "cluster-${var.cluster_name}"
spark_version = var.spark_version
node_type_id = var.node_type_id
spark_conf = {
"spark.hadoop.hive.server2.enable.doAs": false,
"spark.databricks.delta.preview.enabled": true,
"spark.databricks.repl.allowedLanguages": "sql,python,r",
"spark.databricks.cluster.profile": "serverless",
"fs.azure.account.key.${var.storage_account_name}.dfs.core.windows.net": var.storage_account_accesskey,
"spark.databricks.delta.properties.defaults.autoOptimize.autoCompact": true,
"spark.databricks.delta.properties.defaults.autoOptimize.optimizeWrite": true
}
spark_env_vars = {
"PYSPARK_PYTHON": "/databricks/python3/bin/python3"
}
autoscale {
min_workers = 1
max_workers = 4
}
autotermination_minutes = 45
}