|
| 1 | +# Licensed to the Apache Software Foundation (ASF) under one |
| 2 | +# or more contributor license agreements. See the NOTICE file |
| 3 | +# distributed with this work for additional information |
| 4 | +# regarding copyright ownership. The ASF licenses this file |
| 5 | +# to you under the Apache License, Version 2.0 (the |
| 6 | +# "License"); you may not use this file except in compliance |
| 7 | +# with the License. You may obtain a copy of the License at |
| 8 | +# |
| 9 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +# |
| 11 | +# Unless required by applicable law or agreed to in writing, |
| 12 | +# software distributed under the License is distributed on an |
| 13 | +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| 14 | +# KIND, either express or implied. See the License for the |
| 15 | +# specific language governing permissions and limitations |
| 16 | +# under the License. |
| 17 | +# |
| 18 | + |
| 19 | +# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. |
| 20 | +# |
| 21 | +# WARNING: This configuration is for local development. Do not use it in a production deployment. |
| 22 | +# |
| 23 | +# This configuration supports basic configuration using environment variables or an .env file |
| 24 | +# The following variables are supported: |
| 25 | +# |
| 26 | +# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. |
| 27 | +# Default: apache/airflow:master-python3.8 |
| 28 | +# AIRFLOW_UID - User ID in Airflow containers |
| 29 | +# Default: 50000 |
| 30 | +# AIRFLOW_GID - Group ID in Airflow containers |
| 31 | +# Default: 50000 |
| 32 | +# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account. |
| 33 | +# Default: airflow |
| 34 | +# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account. |
| 35 | +# Default: airflow |
| 36 | +# |
| 37 | +# Feel free to modify this file to suit your needs. |
| 38 | +--- |
| 39 | +version: "3" |
| 40 | +x-airflow-common: &airflow-common |
| 41 | + image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.1.0} |
| 42 | + environment: &airflow-common-env |
| 43 | + AIRFLOW__CORE__EXECUTOR: CeleryExecutor |
| 44 | + AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow |
| 45 | + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow |
| 46 | + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 |
| 47 | + AIRFLOW__CORE__FERNET_KEY: "" |
| 48 | + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true" |
| 49 | + AIRFLOW__CORE__LOAD_EXAMPLES: "false" |
| 50 | + AIRFLOW__API__AUTH_BACKEND: "airflow.api.auth.backend.basic_auth" |
| 51 | + volumes: |
| 52 | + - ./dags:/opt/airflow/dags |
| 53 | + - ./logs:/opt/airflow/logs |
| 54 | + - ./plugins:/opt/airflow/plugins |
| 55 | + - ./requirements.txt:/opt/airflow/requirements.txt |
| 56 | + user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" |
| 57 | + depends_on: |
| 58 | + redis: |
| 59 | + condition: service_healthy |
| 60 | + postgres: |
| 61 | + condition: service_healthy |
| 62 | + |
| 63 | +services: |
| 64 | + postgres: |
| 65 | + image: postgres:13 |
| 66 | + environment: |
| 67 | + POSTGRES_USER: airflow |
| 68 | + POSTGRES_PASSWORD: airflow |
| 69 | + POSTGRES_DB: airflow |
| 70 | + volumes: |
| 71 | + - postgres-db-volume:/var/lib/postgresql/data |
| 72 | + healthcheck: |
| 73 | + test: ["CMD", "pg_isready", "-U", "airflow"] |
| 74 | + interval: 5s |
| 75 | + retries: 5 |
| 76 | + restart: always |
| 77 | + |
| 78 | + redis: |
| 79 | + image: redis:latest |
| 80 | + ports: |
| 81 | + - 6379:6379 |
| 82 | + healthcheck: |
| 83 | + test: ["CMD", "redis-cli", "ping"] |
| 84 | + interval: 5s |
| 85 | + timeout: 30s |
| 86 | + retries: 50 |
| 87 | + restart: always |
| 88 | + |
| 89 | + airflow-webserver: |
| 90 | + <<: *airflow-common |
| 91 | + container_name: "airflow_webserver" |
| 92 | + command: 'bash -c "pip3 install -r requirements.txt && airflow webserver"' |
| 93 | + ports: |
| 94 | + - 8085:8080 |
| 95 | + healthcheck: |
| 96 | + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] |
| 97 | + interval: 10s |
| 98 | + timeout: 10s |
| 99 | + retries: 5 |
| 100 | + restart: always |
| 101 | + |
| 102 | + airflow-scheduler: |
| 103 | + <<: *airflow-common |
| 104 | + command: 'bash -c "pip3 install -r requirements.txt && airflow scheduler"' |
| 105 | + healthcheck: |
| 106 | + test: |
| 107 | + [ |
| 108 | + "CMD-SHELL", |
| 109 | + 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"', |
| 110 | + ] |
| 111 | + interval: 10s |
| 112 | + timeout: 10s |
| 113 | + retries: 5 |
| 114 | + restart: always |
| 115 | + |
| 116 | + airflow-worker: |
| 117 | + <<: *airflow-common |
| 118 | + command: 'bash -c "pip3 install -r requirements.txt && airflow celery worker"' |
| 119 | + healthcheck: |
| 120 | + test: |
| 121 | + - "CMD-SHELL" |
| 122 | + - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' |
| 123 | + interval: 10s |
| 124 | + timeout: 10s |
| 125 | + retries: 5 |
| 126 | + restart: always |
| 127 | + |
| 128 | + airflow-init: |
| 129 | + <<: *airflow-common |
| 130 | + command: version |
| 131 | + environment: |
| 132 | + <<: *airflow-common-env |
| 133 | + _AIRFLOW_DB_UPGRADE: "true" |
| 134 | + _AIRFLOW_WWW_USER_CREATE: "true" |
| 135 | + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} |
| 136 | + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} |
| 137 | + |
| 138 | + flower: |
| 139 | + <<: *airflow-common |
| 140 | + command: 'bash -c "pip3 install -r requirements.txt && airflow celery flower"' |
| 141 | + ports: |
| 142 | + - 5555:5555 |
| 143 | + healthcheck: |
| 144 | + test: ["CMD", "curl", "--fail", "http://localhost:5555/"] |
| 145 | + interval: 10s |
| 146 | + timeout: 10s |
| 147 | + retries: 5 |
| 148 | + restart: always |
| 149 | + |
| 150 | +volumes: |
| 151 | + postgres-db-volume: |
0 commit comments