-
Notifications
You must be signed in to change notification settings - Fork 10
/
values.yaml
136 lines (124 loc) · 4.52 KB
/
values.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
# Duplicate this file and put your customization here
airflowImage: tekn0ir/airflow-docker
airflowImageTag: "1.10.4"
imagePullPolicy: IfNotPresent
fernetKey: af7CN0q6ag5U3g08IsPsw3K45U7Xa0axgVFhoh-3zB8=
## To specify a separately provisioned backend database for Airflow use this:
# sqlAlchemyConn: postgresql+psycopg2://username:password@db-hostname:5432/schema
## Specify ingress settings
## ref: https://kubernetes.io/docs/concepts/services-networking/ingress/
ingress:
enabled: true
annotations: {}
labels: {}
hosts:
- airflow.192.168.99.100.xip.io
tls: []
# - secretName: chart-example-tls
# hosts:
# - airflow.192.168.99.100.xip.io
## Specify service annotations
## This example add prometheus metrics scrape that is available in the default docker image
service:
annotations:
prometheus.io/scrape: "true"
prometheus.io/path: "/admin/metrics"
prometheus.io/port: "8080"
webserver:
replicas: 2
annotations: {}
scheduler:
annotations: {}
## RBAC enabled/disable, the airflow containers needs a service account
## with permissisions sufficient to spawn workers
## if disabled use
## serviceAccountName: <name of service account>
rbac:
enabled: true
## Prepare your environment, runs in its own pod when DB is up
## ref: https://airflow.apache.org/cli.html
provisioner:
enabled: true
cmds: |-
airflow initdb;
sleep 10;
airflow connections --add --conn_id my_rs_connection \
--conn_type jdbc \
--conn_host jdbc:redshift://my-redshift.eu-west-1.redshift.amazonaws.com \
--conn_login my_rs_user \
--conn_password my_secret_password \
--conn_schema my_database \
--conn_port 5439 \
--conn_extra '{"extra__jdbc__drv_path": "/usr/local/ariflow/drivers/RedshiftJDBC42-no-awssdk-1.2.15.1025.jar", "extra__jdbc__drv_clsname": "com.amazon.redshift.jdbc42.Driver"}';
airflow create_user \
--role Admin \
--username airflow \
--password airflow \
--firstname Air \
--lastname Flow \
--email [email protected];
# Airflow configuration
# https://airflow.apache.org/howto/set-config.html
# Any airflow setting can be set by the scheme AIRFLOW__{SECTION}__{KEY}
# You are able to add arbitrary config here and it will be added to webserver, scheduler and workers
# The only configs automatically set by the char are:
# AIRFLOW__KUBERNETES__AIRFLOW_CONFIGMAP
# AIRFLOW__KUBERNETES__NAMESPACE
# AIRFLOW__KUBERNETES__WORKER_SERVICE_ACCOUNT_NAME
# AIRFLOW__CORE__SQL_ALCHEMY_CONN
# here is an example of what else you want to configure
config:
# Other
AWS_DEFAULT_REGION: eu-west-1
# Core
AIRFLOW__CORE__AIRFLOW_HOME: /usr/local/airflow
AIRFLOW__CORE__DAGS_FOLDER: /usr/local/airflow/dags
AIRFLOW__CORE__BASE_LOG_FOLDER: /usr/local/airflow/logs
AIRFLOW__CORE__LOGGING_LEVEL: INFO
# Webserver
AIRFLOW__WEBSERVER__BASE_URL: http://airflow.192.168.99.100.xip.io
AIRFLOW__WEBSERVER__AUTHENTICATE: "True"
AIRFLOW__WEBSERVER__EXPOSE_CONFIG: "True"
AIRFLOW__WEBSERVER__RBAC: "True"
# Kubernetes
AIRFLOW__KUBERNETES__WORKER_CONTAINER_REPOSITORY: tekn0ir/airflow-docker
AIRFLOW__KUBERNETES__WORKER_CONTAINER_TAG: 1.10.4
AIRFLOW__KUBERNETES__WORKER_CONTAINER_IMAGE_PULL_POLICY: IfNotPresent
AIRFLOW__KUBERNETES__DELETE_WORKER_PODS: "True"
AIRFLOW__KUBERNETES__DAGS_IN_IMAGE: "True"
# Remember that the following will be automatically set by the chart!:
# AIRFLOW__KUBERNETES__AIRFLOW_CONFIGMAP
# AIRFLOW__KUBERNETES__NAMESPACE
# AIRFLOW__KUBERNETES__WORKER_SERVICE_ACCOUNT_NAME
# AIRFLOW__CORE__SQL_ALCHEMY_CONN
########################################################################################################################
## Configuration values for the postgresql dependency.
## ref: https://github.com/kubernetes/charts/blob/master/stable/postgresql/README.md
##
postgresql:
## Use the PostgreSQL chart dependency.
## Set to false if bringing your own PostgreSQL.
##
enabled: true
## If bringing your own PostgreSQL, the full uri to use
## e.g. postgres://airflow:[email protected]:5432/airflow?sslmode=disable
##
# uri:
### PostgreSQL User to create.
##
postgresqlUsername: airflow
## PostgreSQL Password for the new user.
## If not set, a random 10 characters password will be used.
##
postgresqlPassword: airflow
## PostgreSQL Database to create.
##
postgresqlDatabase: airflow
## Persistent Volume Storage configuration.
## ref: https://kubernetes.io/docs/user-guide/persistent-volumes
##
persistence:
## Enable PostgreSQL persistence using Persistent Volume Claims.
##
enabled: false
resourcePolicy: nil