1
+ x-airflow-common :
2
+ &airflow-common
3
+ image : ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.9.2}
4
+ # build: .
5
+ environment :
6
+ &airflow-common-env
7
+ AIRFLOW__CORE__EXECUTOR : CeleryExecutor
8
+ AIRFLOW__DATABASE__SQL_ALCHEMY_CONN : postgresql+psycopg2://airflow:airflow@postgres/airflow
9
+ AIRFLOW__CELERY__RESULT_BACKEND : db+postgresql://airflow:airflow@postgres/airflow
10
+ AIRFLOW__CELERY__BROKER_URL : redis://:@redis:6379/0
11
+ AIRFLOW__CORE__FERNET_KEY : ' '
12
+ AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION : ' true'
13
+ AIRFLOW__CORE__LOAD_EXAMPLES : ' true'
14
+ AIRFLOW__API__AUTH_BACKENDS : ' airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
15
+ AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK : ' true'
16
+ _PIP_ADDITIONAL_REQUIREMENTS : ${_PIP_ADDITIONAL_REQUIREMENTS:-}
17
+ volumes :
18
+ - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
19
+ - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs
20
+ - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config
21
+ - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins
22
+ user : " ${AIRFLOW_UID:-50000}:0"
23
+ depends_on :
24
+ &airflow-common-depends-on
25
+ redis :
26
+ condition : service_healthy
27
+ postgres :
28
+ condition : service_healthy
29
+
30
+ services :
31
+ postgres :
32
+ image : postgres:13
33
+ environment :
34
+ POSTGRES_USER : airflow
35
+ POSTGRES_PASSWORD : airflow
36
+ POSTGRES_DB : airflow
37
+ volumes :
38
+ - postgres-db-volume:/var/lib/postgresql/data
39
+ healthcheck :
40
+ test : ["CMD", "pg_isready", "-U", "airflow"]
41
+ interval : 10s
42
+ retries : 5
43
+ start_period : 5s
44
+ restart : always
45
+
46
+ redis :
47
+ # Redis is limited to 7.2-bookworm due to licencing change
48
+ # https://redis.io/blog/redis-adopts-dual-source-available-licensing/
49
+ image : redis:7.2-bookworm
50
+ expose :
51
+ - 6379
52
+ healthcheck :
53
+ test : ["CMD", "redis-cli", "ping"]
54
+ interval : 10s
55
+ timeout : 30s
56
+ retries : 50
57
+ start_period : 30s
58
+ restart : always
59
+
60
+ airflow-webserver :
61
+ << : *airflow-common
62
+ command : webserver
63
+ ports :
64
+ - " 8080:8080"
65
+ healthcheck :
66
+ test : ["CMD", "curl", "--fail", "http://localhost:8080/health"]
67
+ interval : 30s
68
+ timeout : 10s
69
+ retries : 5
70
+ start_period : 30s
71
+ restart : always
72
+ depends_on :
73
+ << : *airflow-common-depends-on
74
+ airflow-init :
75
+ condition : service_completed_successfully
76
+
77
+ airflow-scheduler :
78
+ << : *airflow-common
79
+ command : scheduler
80
+ healthcheck :
81
+ test : ["CMD", "curl", "--fail", "http://localhost:8974/health"]
82
+ interval : 30s
83
+ timeout : 10s
84
+ retries : 5
85
+ start_period : 30s
86
+ restart : always
87
+ depends_on :
88
+ << : *airflow-common-depends-on
89
+ airflow-init :
90
+ condition : service_completed_successfully
91
+
92
+ airflow-worker :
93
+ << : *airflow-common
94
+ command : celery worker
95
+ healthcheck :
96
+ # yamllint disable rule:line-length
97
+ test :
98
+ - " CMD-SHELL"
99
+ - ' celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
100
+ interval : 30s
101
+ timeout : 10s
102
+ retries : 5
103
+ start_period : 30s
104
+ environment :
105
+ << : *airflow-common-env
106
+ # Required to handle warm shutdown of the celery workers properly
107
+ # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
108
+ DUMB_INIT_SETSID : " 0"
109
+ restart : always
110
+ depends_on :
111
+ << : *airflow-common-depends-on
112
+ airflow-init :
113
+ condition : service_completed_successfully
114
+
115
+ airflow-triggerer :
116
+ << : *airflow-common
117
+ command : triggerer
118
+ healthcheck :
119
+ test : ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"']
120
+ interval : 30s
121
+ timeout : 10s
122
+ retries : 5
123
+ start_period : 30s
124
+ restart : always
125
+ depends_on :
126
+ << : *airflow-common-depends-on
127
+ airflow-init :
128
+ condition : service_completed_successfully
129
+
130
+ airflow-init :
131
+ << : *airflow-common
132
+ entrypoint : /bin/bash
133
+ # yamllint disable rule:line-length
134
+ command :
135
+ - -c
136
+ - |
137
+ if [[ -z "${AIRFLOW_UID}" ]]; then
138
+ echo
139
+ echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
140
+ echo "If you are on Linux, you SHOULD follow the instructions below to set "
141
+ echo "AIRFLOW_UID environment variable, otherwise files will be owned by root."
142
+ echo "For other operating systems you can get rid of the warning with manually created .env file:"
143
+ echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user"
144
+ echo
145
+ fi
146
+ one_meg=1048576
147
+ mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg))
148
+ cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat)
149
+ disk_available=$$(df / | tail -1 | awk '{print $$4}')
150
+ warning_resources="false"
151
+ if (( mem_available < 4000 )) ; then
152
+ echo
153
+ echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m"
154
+ echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))"
155
+ echo
156
+ warning_resources="true"
157
+ fi
158
+ if (( cpus_available < 2 )); then
159
+ echo
160
+ echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m"
161
+ echo "At least 2 CPUs recommended. You have $${cpus_available}"
162
+ echo
163
+ warning_resources="true"
164
+ fi
165
+ if (( disk_available < one_meg * 10 )); then
166
+ echo
167
+ echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m"
168
+ echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))"
169
+ echo
170
+ warning_resources="true"
171
+ fi
172
+ if [[ $${warning_resources} == "true" ]]; then
173
+ echo
174
+ echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m"
175
+ echo "Please follow the instructions to increase amount of resources available:"
176
+ echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin"
177
+ echo
178
+ fi
179
+ mkdir -p /sources/logs /sources/dags /sources/plugins
180
+ chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins}
181
+ exec /entrypoint airflow version
182
+ # yamllint enable rule:line-length
183
+ environment :
184
+ << : *airflow-common-env
185
+ _AIRFLOW_DB_MIGRATE : ' true'
186
+ _AIRFLOW_WWW_USER_CREATE : ' true'
187
+ _AIRFLOW_WWW_USER_USERNAME : ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
188
+ _AIRFLOW_WWW_USER_PASSWORD : ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
189
+ _PIP_ADDITIONAL_REQUIREMENTS : ' '
190
+ user : " 0:0"
191
+ volumes :
192
+ - ${AIRFLOW_PROJ_DIR:-.}:/sources
193
+
194
+ airflow-cli :
195
+ << : *airflow-common
196
+ profiles :
197
+ - debug
198
+ environment :
199
+ << : *airflow-common-env
200
+ CONNECTION_CHECK_MAX_COUNT : " 0"
201
+ # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
202
+ command :
203
+ - bash
204
+ - -c
205
+ - airflow
206
+
207
+ volumes :
208
+ postgres-db-volume:
0 commit comments