-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathvariables.tf
86 lines (80 loc) · 2.35 KB
/
variables.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
variable "kafka" {
type = object({
environment = object({
id = string
})
cluster = object({
id = string
api_version = string
kind = string
rest_endpoint = string
})
credentials = object({
api_key_id = string
api_key_secret = string
})
})
sensitive = true
description = "Information and credentials about/from the Kafka cluster"
}
variable "gcp" {
type = object({
project = string
region = string
})
description = <<EOT
project: The GCP project of your data product
region: The GCP region where your data product should be located
EOT
}
variable "domain" {
type = string
description = "The domain of the data product"
}
variable "name" {
type = string
description = "The name of the data product"
}
variable "input" {
type = list(object({
topic = string
format = string
}))
description = <<EOT
topic: Name of the Kafka topic which should be processed
format: Currently only 'JSON' is supported
EOT
}
variable "output" {
type = object({
data_access = list(string)
discovery_access = list(string)
tables = list(object({
id = string
schema = string
delete_on_destroy = bool
}))
})
description = <<EOT
dataset_id: The id of the dataset in which your data product will exist
dataset_description: A description of the dataset
data_access: List of users with access to the data product
discovery_access: List of users with access to the discovery endpoint
region: The google cloud region in which your data product should be created
tables.id: The table_id of your data product, which will be used to create a BigQuery table. Must be equal to the corresponding kafka topic name.
tables.schema: The path to the products bigquery schema
tables.delete_on_destroy: 'true' if the BigQuery table should be deleted if the terraform resource gets destroyed. Use with care!
EOT
}
variable "output_tables_time_partitioning" {
type = map(object({
type = string
field = string
}))
default = {}
description = <<EOT
You can configure time based partitioning by passing an object which has the tables id as its key.
type: Possible values are: DAY, HOUR, MONTH, YEAR
field: The field which should be used for partitioning. Falls back to consumption time, if null is passed.
EOT
}