Skip to content

Commit 83642d3

Browse files
committed
test
1 parent 9250b55 commit 83642d3

File tree

3 files changed

+34
-20
lines changed

3 files changed

+34
-20
lines changed

cluster.tf

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,9 @@ resource "databricks_cluster" "cluster" {
4242
}
4343

4444
autotermination_minutes = var.cluster_autotermination_minutes
45-
custom_tags = merge(local.shared_tags)
45+
#custom_tags = merge(local.shared_tags)
46+
custom_tags = var.custom_tags != null ? merge(var.custom_tags, local.shared_tags) : merge(local.shared_tags)
47+
4648
spark_conf = var.spark_conf
4749
}
4850

job.tf

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,15 @@
33
# 1. NEW CLUSTER WITH NEW NOTEBOOKS
44
# ------------------------------------------------
55
resource "databricks_job" "new_cluster_new_job_new_notebooks" {
6-
for_each = (var.deploy_jobs == true && var.existing_cluster == false && var.local_notebooks != null) ? { for p in var.local_notebooks : "${p.job_name}-${p.local_path}" => p } : {}
7-
#for_each = (var.deploy_jobs == true && var.cluster_id == null && var.local_notebooks != null) ? { for p in var.local_notebooks : "${p.job_name}-${p.local_path}" => p } : {}
6+
for_each = (var.deploy_jobs == true && var.cluster_id == null && var.local_notebooks != null) ? { for p in var.local_notebooks : "${p.job_name}-${p.local_path}" => p } : {}
87

98
name = "${each.value.job_name} (Terraform managed)"
109

11-
existing_cluster_id = local.cluster_info
10+
new_cluster {
11+
num_workers = var.num_workers
12+
spark_version = data.databricks_spark_version.latest.id
13+
node_type_id = join("", data.databricks_node_type.cluster_node_type.*.id)
14+
}
1215

1316
notebook_task {
1417
notebook_path = lookup(each.value, "path", "${data.databricks_current_user.me.home}/${each.value.job_name}")
@@ -40,20 +43,18 @@ resource "databricks_job" "new_cluster_new_job_new_notebooks" {
4043
}
4144
}
4245
}
46+
4347
# ------------------------------------------------
44-
# 2. NEW CLUSTER WITH EXITING NOTEBOOKS
48+
# 2. EXISTING CLUSTER WITH NEW NOTEBOOKS
4549
# ------------------------------------------------
46-
resource "databricks_job" "new_cluster_new_job_existing_notebooks" {
47-
for_each = (var.deploy_jobs == true && var.existing_cluster == false && var.remote_notebooks != null) ? { for p in var.remote_notebooks : "${p.job_name}-${p.path}" => p } : {}
48-
#for_each = (var.deploy_jobs == true && var.cluster_id == null && var.remote_notebooks != null) ? { for p in var.remote_notebooks : "${p.job_name}-${p.path}" => p } : {}
49-
50-
51-
name = "${each.value.job_name} (Terraform managed)"
50+
resource "databricks_job" "existing_cluster_new_job_new_notebooks" {
51+
for_each = (var.deploy_jobs == true && var.cluster_id != null && var.local_notebooks != null) ? { for p in var.local_notebooks : "${p.job_name}-${p.local_path}" => p } : {}
5252

53+
name = "${each.value.job_name} (Terraform managed)"
5354
existing_cluster_id = local.cluster_info
5455

5556
notebook_task {
56-
notebook_path = lookup(each.value, "path")
57+
notebook_path = lookup(each.value, "path", "${data.databricks_current_user.me.home}/${each.value.job_name}")
5758
base_parameters = var.task_parameters
5859
}
5960

@@ -82,18 +83,22 @@ resource "databricks_job" "new_cluster_new_job_existing_notebooks" {
8283
}
8384
}
8485
}
85-
8686
# ------------------------------------------------
87-
# 3. EXISTING CLUSTER WITH NEW NOTEBOOKS
87+
# 3. NEW CLUSTER WITH EXITING NOTEBOOKS
8888
# ------------------------------------------------
89-
resource "databricks_job" "existing_cluster_new_job_new_notebooks" {
90-
for_each = (var.deploy_jobs == true && var.cluster_id != null && var.local_notebooks != null) ? { for p in var.local_notebooks : "${p.job_name}-${p.local_path}" => p } : {}
89+
resource "databricks_job" "new_cluster_new_job_existing_notebooks" {
90+
for_each = (var.deploy_jobs == true && var.cluster_id == null && var.remote_notebooks != null) ? { for p in var.remote_notebooks : "${p.job_name}-${p.path}" => p } : {}
9191

92-
name = "${each.value.job_name} (Terraform managed)"
93-
existing_cluster_id = local.cluster_info
92+
name = "${each.value.job_name} (Terraform managed)"
93+
94+
new_cluster {
95+
num_workers = var.num_workers
96+
spark_version = data.databricks_spark_version.latest.id
97+
node_type_id = join("", data.databricks_node_type.cluster_node_type.*.id)
98+
}
9499

95100
notebook_task {
96-
notebook_path = lookup(each.value, "path", "${data.databricks_current_user.me.home}/${each.value.job_name}")
101+
notebook_path = lookup(each.value, "path")
97102
base_parameters = var.task_parameters
98103
}
99104

@@ -122,6 +127,7 @@ resource "databricks_job" "existing_cluster_new_job_new_notebooks" {
122127
}
123128
}
124129
}
130+
125131
# ------------------------------------------------
126132
# 4. EXISTING CLUSTER WITH EXITING NOTEBOOKS
127133
# ------------------------------------------------
@@ -160,4 +166,4 @@ resource "databricks_job" "existing_cluster_new_job_existing_notebooks" {
160166
pause_status = lookup(schedule.value, "pause_status", null)
161167
}
162168
}
163-
}
169+
}

variables.tf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -402,3 +402,9 @@ variable "cluster_name" {
402402
description = "Cluster name"
403403
default = null
404404
}
405+
406+
variable "custom_tags" {
407+
type = any
408+
description = "Extra custom tags"
409+
default = null
410+
}

0 commit comments

Comments
 (0)