Files
Manuel Romero b697607c88 bricks)
2022-10-05 10:14:34 +02:00

166 lines
3.9 KiB
HCL

terraform {
required_version = ">= 0.14"
required_providers {
databricks = {
source = "databricks/databricks"
version = ">= 0.3.0"
}
}
}
locals {
myRegex = "/[^[:alpha:]]/"
splitLower = split(" ", lower(var.user_id))
np0 = replace(element(local.splitLower,0), local.myRegex, "")
np1 = replace(element(local.splitLower,1), local.myRegex, "")
c_n1 = substr(local.np0, 0, 3)
c_n2 = substr(local.np1, 0, 1)
c_n3 = substr(strrev(local.np1), 0, 1)
cluster_name = var.cluster_name != null? var.cluster_name : "${local.c_n1}${local.c_n2}${local.c_n3}"
}
resource "azurerm_databricks_workspace" "dbricksws" {
count = var.enabled == true? 1 : 0
location = var.location
name = "${local.cluster_name}-qlikdbricks-ws"
resource_group_name = var.resource_group_name
sku = "standard"
tags = {
Deployment = "QMI PoC"
"Cost Center" = "3100"
QMI_user = var.user_id
}
}
resource "azurerm_role_assignment" "contributor-dbricks" {
count = var.enabled == true? 1 : 0
scope = azurerm_databricks_workspace.dbricksws[0].id
role_definition_name = "Contributor"
principal_id = var.app_registration_principal_id
}
provider "databricks" {
alias = "mws"
azure_workspace_resource_id = var.enabled == true? azurerm_databricks_workspace.dbricksws[0].id : null
azure_client_id = var.enabled == true? "9b4761fd-4823-4f9d-ab3a-a95af38e7c29" : null
azure_client_secret = var.enabled == true? "FWH7Q~ByC.U5zNh0BaIDdK_poyxoy4SxW8hi1" : null
azure_tenant_id = var.enabled == true? "c21eeb5f-f5a6-44e8-a997-124f2f7a497c" : null
}
/*resource "databricks_workspace_conf" "this" {
count = var.enabled == true? 1 : 0
provider = databricks.mws
custom_config = {
"enableIpAccessLists" : true
}
}
resource "databricks_ip_access_list" "allowed-list" {
count = var.enabled == true? 1 : 0
provider = databricks.mws
label = "allow_in"
list_type = "ALLOW"
ip_addresses = [
"52.249.189.38/32",
"13.67.39.86/32",
"20.67.110.207/32",
"14.98.59.168/29",
"182.74.33.8/29",
"188.65.156.32/28",
"212.73.252.96/29",
"194.90.96.176/29",
"213.57.84.160/29",
"4.4.97.104/29",
"206.196.17.32/27",
#QCS
"18.205.71.36/32",
"18.232.32.199/32",
"34.237.68.254/32",
"34.247.21.179/32",
"52.31.212.214/32",
"54.154.95.18/32",
"13.210.43.241/32",
"13.236.104.42/32",
"13.236.206.172/32",
"18.138.163.172/32",
"18.142.157.182/32",
"54.179.13.251/32"
]
}*/
resource "databricks_user" "me" {
provider = databricks.mws
count = var.enabled == true? 1 : 0
depends_on = [
azurerm_role_assignment.contributor-dbricks
]
user_name = var.user_email
display_name = var.user_id
}
resource "databricks_token" "pat" {
provider = databricks.mws
count = var.enabled == true? 1 : 0
depends_on = [
azurerm_role_assignment.contributor-dbricks,
databricks_user.me
]
comment = "qmi"
lifetime_seconds = 8640000
}
resource "databricks_cluster" "dbrickscluster" {
provider = databricks.mws
count = (var.enabled == true && var.create_cluster == true)? 1 : 0
depends_on = [
azurerm_role_assignment.contributor-dbricks
]
cluster_name = "cluster-${local.cluster_name}"
spark_version = "7.3.x-scala2.12"
node_type_id = "Standard_DS3_v2"
spark_conf = {
"spark.hadoop.hive.server2.enable.doAs": false,
"spark.databricks.delta.preview.enabled": true,
"spark.databricks.repl.allowedLanguages": "sql,python,r",
"spark.databricks.cluster.profile": "serverless",
"fs.azure.account.key.${var.storage_account_name}.dfs.core.windows.net": var.storage_account_accesskey
}
spark_env_vars = {
"PYSPARK_PYTHON": "/databricks/python3/bin/python3"
}
autoscale {
min_workers = 1
max_workers = 4
}
autotermination_minutes = 120
}