Files
qmi-cloud-tf-modules/databases/dbricks/main.tf
Manuel Romero 1b59ceb21e fix databricks
2021-11-19 16:46:48 +01:00

105 lines
2.6 KiB
HCL

terraform {
required_version = ">= 0.13"
required_providers {
databricks = {
source = "databrickslabs/databricks"
version = ">= 0.3.0"
}
}
}
resource "azurerm_databricks_workspace" "dbricksws" {
count = var.enabled == true? 1 : 0
location = var.location
name = "${var.cluster_name}-qlikdbricks-ws"
resource_group_name = var.resource_group_name
sku = "standard"
tags = {
Deployment = "QMI PoC"
"Cost Center" = "3100"
QMI_user = var.user_id
}
}
resource "azurerm_role_assignment" "contributor-dbricks" {
count = var.enabled == true? 1 : 0
scope = azurerm_databricks_workspace.dbricksws[0].id
role_definition_name = "Contributor"
principal_id = var.app_registration_principal_id
}
provider "databricks" {
alias = "mws"
azure_workspace_resource_id = var.enabled == true? azurerm_databricks_workspace.dbricksws[0].id : null
azure_client_id = var.enabled == true? "9b4761fd-4823-4f9d-ab3a-a95af38e7c29" : null
azure_client_secret = var.enabled == true? "FWH7Q~ByC.U5zNh0BaIDdK_poyxoy4SxW8hi1" : null
azure_tenant_id = var.enabled == true? "c21eeb5f-f5a6-44e8-a997-124f2f7a497c" : null
}
resource "databricks_user" "me" {
provider = databricks.mws
count = var.enabled == true? 1 : 0
depends_on = [
azurerm_role_assignment.contributor-dbricks
]
user_name = var.user_email
display_name = var.user_id
}
resource "databricks_token" "pat" {
provider = databricks.mws
count = var.enabled == true? 1 : 0
depends_on = [
azurerm_role_assignment.contributor-dbricks,
databricks_user.me
]
comment = "qmi"
lifetime_seconds = 8640000
}
/*resource "databricks_cluster" "dbrickscluster" {
provider = databricks.mws
count = var.enabled == true? 1 : 0
depends_on = [
azurerm_role_assignment.contributor-dbricks
]
cluster_name = "cluster-${var.cluster_name}"
spark_version = "7.3.x-scala2.12"
node_type_id = "Standard_DS3_v2"
spark_conf = {
"spark.hadoop.hive.server2.enable.doAs": false,
"spark.databricks.delta.preview.enabled": true,
"spark.databricks.repl.allowedLanguages": "sql,python,r",
"spark.databricks.cluster.profile": "serverless",
"fs.azure.account.key.${var.storage_account_name}.dfs.core.windows.net": var.storage_account_accesskey
}
spark_env_vars = {
"PYSPARK_PYTHON": "/databricks/python3/bin/python3"
}
autoscale {
min_workers = 1
max_workers = 4
}
autotermination_minutes = 120
}*/