This repository has been archived by the owner on Feb 26, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.tf
82 lines (69 loc) · 1.89 KB
/
main.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# DATA
data "databricks_current_user" "me" {}
# RESOURCES
## Notebook
resource "databricks_notebook" "dlt_pipeline_notebook" {
source = "${path.module}/src/dlt_pipeline_notebook.py"
path = "${data.databricks_current_user.me.home}/examples/terraform-dlt-${var.cluster_environment_type}"
format = "SOURCE"
}
## DLT pipeline
resource "databricks_pipeline" "this" {
name = "Terraform DLT Example - ${var.cluster_environment_type}"
storage = var.dlt_pipeline_storage_path
target = var.dlt_databricks_database
configuration = {
s3_bucket_name = var.s3_bucket_name
}
cluster {
label = "default"
num_workers = 2
node_type_id = var.cluster_instance_type
driver_node_type_id = var.cluster_instance_type
aws_attributes {
instance_profile_arn = var.cluster_instance_profile_arn
}
custom_tags = {
CostCenter = var.cluster_cost_center
EnvironmentType = var.cluster_environment_type
Service = var.cluster_service
}
}
cluster {
label = "maintenance"
num_workers = 1
aws_attributes {
instance_profile_arn = var.cluster_instance_profile_arn
}
custom_tags = {
CostCenter = var.cluster_cost_center
EnvironmentType = var.cluster_environment_type
Service = var.cluster_service
}
}
library {
notebook {
path = databricks_notebook.dlt_pipeline_notebook.id
}
}
filters {}
edition = "ADVANCED"
development = true
continuous = false
}
resource "databricks_job" "this" {
name = "Terraform DLT Example - job"
task {
task_key = "a"
pipeline_task {
pipeline_id = databricks_pipeline.this.id
}
}
schedule {
quartz_cron_expression = "0 0 3 ? * Mon,Wed,Fri"
timezone_id = "Europe/Amsterdam"
}
email_notifications {
on_failure = [data.databricks_current_user.me.user_name]
}
}