forked from coderxio/sagerx
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdocker-compose.yml
114 lines (108 loc) · 3.46 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
version: "3.8"
x-airflow-common: &airflow-common
build:
context: ./airflow
image: sagerx_airflow:v0.0.1 # versioning allows a rebuild of docker image where necessary
environment: &airflow-common-env
AIRFLOW__CORE__EXECUTOR: LocalExecutor
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
#AIRFLOW__WEBSERVER__WEB_SERVER_MASTER_TIMEOUT: "300" # un-comment if gunicorn timeout reached
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
AIRFLOW_CONN_POSTGRES_DEFAULT: postgresql://sagerx:sagerx@postgres:5432/sagerx
AIRFLOW_VAR_UMLS_API: ${UMLS_API}
AIRFLOW_VAR_SLACK_API: ${SLACK_API:-}
AIRFLOW_CONN_SLACK: http://:@https%3A%2F%2Fhooks.slack.com%2Fservices%2F${SLACK_API:-}
_PIP_ADDITIONAL_REQUIREMENTS: ""
DBT_PROFILES_DIR: /dbt
user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-0}"
volumes:
- ./airflow/dags:/opt/airflow/dags
- ./airflow/logs:/opt/airflow/logs
- ./airflow/plugins:/opt/airflow/plugins
- ./airflow/data:/opt/airflow/data
- ./airflow/config/airflow.cfg:/opt/airflow/airflow.cfg
- ./dbt:/dbt
services:
postgres:
image: postgres:14-alpine
container_name: postgres
environment:
POSTGRES_USER: sagerx
PGUSER: sagerx
POSTGRES_PASSWORD: sagerx
POSTGRES_DB: sagerx
ports:
- 5432:5432
volumes:
- ./postgres:/docker-entrypoint-initdb.d
- ./airflow/data:/opt/airflow/data
- ./airflow/extracts:/opt/airflow/extracts
build:
context: .
shm_size: "4gb"
pgadmin:
image: dpage/pgadmin4:6.15
container_name: pgadmin
environment:
PGADMIN_DEFAULT_EMAIL: [email protected]
PGADMIN_DEFAULT_PASSWORD: pgadmin
PGADMIN_CONFIG_SERVER_MODE: "False"
PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: "False"
ports:
- 8002:80
volumes:
- ./pgadmin/servers.json:/pgadmin4/servers.json
dbt:
build:
context: ./dbt
image: dbt
container_name: dbt
user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-0}"
ports:
- 8081:8081
volumes:
- ./dbt:/dbt
command: tail -f /dev/null
airflow-init:
<<: *airflow-common
container_name: airflow-init
entrypoint: /bin/bash
command:
- -c
- |
mkdir -p /sources/logs /sources/dags /sources/plugins /sources/data /sources/extracts
chown -R "${AIRFLOW_UID}:${AIRFLOW_GID-0}" /sources/{logs,dags,plugins,data,extracts}
chmod -R o+rw /sources/extracts
exec /entrypoint airflow version
environment:
<<: *airflow-common-env
_AIRFLOW_DB_UPGRADE: "true"
_AIRFLOW_WWW_USER_CREATE: "true"
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
user: "0:0"
volumes:
- ./airflow:/sources
depends_on:
- postgres
airflow-webserver:
<<: *airflow-common
container_name: airflow-webserver
command: webserver
environment:
<<: *airflow-common-env
AWS_ACCESS_KEY_ID: ${ACCESS_KEY}
AWS_SECRET_ACCESS_KEY: ${SECRET_ACCESS_KEY}
AWS_DEST_BUCKET: ${DEST_BUCKET}
ports:
- 8001:8080
airflow-scheduler:
<<: *airflow-common
container_name: airflow-scheduler
command: scheduler
environment:
<<: *airflow-common-env
AWS_ACCESS_KEY_ID: ${ACCESS_KEY}
AWS_SECRET_ACCESS_KEY: ${SECRET_ACCESS_KEY}
AWS_DEST_BUCKET: ${DEST_BUCKET}