How to set connections and variables in airflow using docker-compose file
Asked Answered
D

3

7

I'm creating a dev environment to use airflow for testing. I'm using the docker-compose.yaml file available on Airflow website. I would like to know if it is possible to set my connections and variables in this file. I know that I can establish a connection using AIRFLOW_CONN_... with URI parameters. Is it possible use AIRFLOW_CONN_... and EXPORT VARIABLE inside the docker-compose.yaml file?

My docker-compose.yaml file:

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#

# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL.
#
# WARNING: This configuration is for local development. Do not use it in a production deployment.
#
# This configuration supports basic configuration using environment variables or an .env file
# The following variables are supported:
#
# AIRFLOW_IMAGE_NAME           - Docker image name used to run Airflow.
#                                Default: apache/airflow:2.1.4
# AIRFLOW_UID                  - User ID in Airflow containers
#                                Default: 50000
# AIRFLOW_GID                  - Group ID in Airflow containers
#                                Default: 0
#
# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode
#
# _AIRFLOW_WWW_USER_USERNAME   - Username for the administrator account (if requested).
#                                Default: airflow
# _AIRFLOW_WWW_USER_PASSWORD   - Password for the administrator account (if requested).
#                                Default: airflow
# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers.
#                                Default: ''
#
# Feel free to modify this file to suit your needs.
---
version: "3"
x-airflow-common: &airflow-common
  # In order to add custom dependencies or upgrade provider packages you can use your extended image.
  # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml
  # and uncomment the "build" line below, Then run `docker-compose build` to build the images.
  image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.1.4}
  # build: .
  environment: &airflow-common-env
    AIRFLOW__CORE__EXECUTOR: CeleryExecutor
    AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
    AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
    AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
    AIRFLOW__CORE__FERNET_KEY: ""
    AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
    AIRFLOW__CORE__LOAD_EXAMPLES: "false"
    AIRFLOW__API__AUTH_BACKEND: "airflow.api.auth.backend.basic_auth"
    _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:- pymssql pymongo unidecode apache-airflow-providers-mongo apache-airflow-providers-microsoft-mssql apache-airflow-providers-apache-spark}
  volumes:
    - ./dags:/opt/airflow/dags
    - ./logs:/opt/airflow/logs
    - ./plugins:/opt/airflow/plugins
  user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-0}"
  depends_on: &airflow-common-depends-on
    redis:
      condition: service_healthy
    postgres:
      condition: service_healthy

services:
  postgres:
    image: postgres:13
    environment:
      POSTGRES_USER: airflow
      POSTGRES_PASSWORD: airflow
      POSTGRES_DB: airflow
    volumes:
      - postgres-db-volume:/var/lib/postgresql/data
    healthcheck:
      test: ["CMD", "pg_isready", "-U", "airflow"]
      interval: 5s
      retries: 5
    restart: always

  redis:
    image: redis:latest
    expose:
      - 6379
    healthcheck:
      test: ["CMD", "redis-cli", "ping"]
      interval: 5s
      timeout: 30s
      retries: 50
    restart: always

  airflow-webserver:
    <<: *airflow-common
    command: webserver
    ports:
      - 8080:8080
    healthcheck:
      test: ["CMD", "curl", "--fail", "http://localhost:8080/health"]
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always
    depends_on:
      <<: *airflow-common-depends-on
      airflow-init:
        condition: service_completed_successfully

  airflow-scheduler:
    <<: *airflow-common
    command: scheduler
    healthcheck:
      test:
        [
          "CMD-SHELL",
          'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"',
        ]
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always
    depends_on:
      <<: *airflow-common-depends-on
      airflow-init:
        condition: service_completed_successfully

  airflow-worker:
    <<: *airflow-common
    command: celery worker
    healthcheck:
      test:
        - "CMD-SHELL"
        - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
      interval: 10s
      timeout: 10s
      retries: 5
    environment:
      <<: *airflow-common-env
      # Required to handle warm shutdown of the celery workers properly
      # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
      DUMB_INIT_SETSID: "0"
    restart: always
    depends_on:
      <<: *airflow-common-depends-on
      airflow-init:
        condition: service_completed_successfully

  airflow-init:
    <<: *airflow-common
    entrypoint: /bin/bash
    # yamllint disable rule:line-length
    command:
      - -c
      - |
        function ver() {
          printf "%04d%04d%04d%04d" $${1//./ }
        }
        airflow_version=$$(gosu airflow airflow version)
        airflow_version_comparable=$$(ver $${airflow_version})
        min_airflow_version=2.1.0
        min_airflow_version_comparable=$$(ver $${min_airflow_version})
        if (( airflow_version_comparable < min_airflow_version_comparable )); then
          echo
          echo -e "\033[1;31mERROR!!!: Too old Airflow version $${airflow_version}!\e[0m"
          echo "The minimum Airflow version supported: $${min_airflow_version}. Only use this or higher!"
          echo
          exit 1
        fi
        if [[ -z "${AIRFLOW_UID}" ]]; then
          echo
          echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
          echo "If you are on Linux, you SHOULD follow the instructions below to set "
          echo "AIRFLOW_UID and AIRFLOW_GID environment variables, otherwise files will be owned by root."
          echo "For other operating systems you can get rid of the warning with manually created .env file:"
          echo "    See: https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#setting-the-right-airflow-user"
          echo
        fi
        one_meg=1048576
        mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg))
        cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat)
        disk_available=$$(df / | tail -1 | awk '{print $$4}')
        warning_resources="false"
        if (( mem_available < 4000 )) ; then
          echo
          echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m"
          echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))"
          echo
          warning_resources="true"
        fi
        if (( cpus_available < 2 )); then
          echo
          echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m"
          echo "At least 2 CPUs recommended. You have $${cpus_available}"
          echo
          warning_resources="true"
        fi
        if (( disk_available < one_meg * 10 )); then
          echo
          echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m"
          echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))"
          echo
          warning_resources="true"
        fi
        if [[ $${warning_resources} == "true" ]]; then
          echo
          echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m"
          echo "Please follow the instructions to increase amount of resources available:"
          echo "   https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#before-you-begin"
          echo
        fi
        mkdir -p /sources/logs /sources/dags /sources/plugins
        chown -R "${AIRFLOW_UID}:${AIRFLOW_GID}" /sources/{logs,dags,plugins}
        exec /entrypoint airflow version
    # yamllint enable rule:line-length
    environment:
      <<: *airflow-common-env
      _AIRFLOW_DB_UPGRADE: "true"
      _AIRFLOW_WWW_USER_CREATE: "true"
      _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
      _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
    user: "0:${AIRFLOW_GID:-0}"
    volumes:
      - .:/sources

  airflow-cli:
    <<: *airflow-common
    profiles:
      - debug
    environment:
      <<: *airflow-common-env
      CONNECTION_CHECK_MAX_COUNT: "0"
    # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
    command:
      - bash
      - -c
      - airflow

  flower:
    <<: *airflow-common
    command: celery flower
    ports:
      - 5555:5555
    healthcheck:
      test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always
    depends_on:
      <<: *airflow-common-depends-on
      airflow-init:
        condition: service_completed_successfully

volumes:
  postgres-db-volume:

Should AIRFLOW_CONN_... be added in environment settings, something like:

environment: &airflow-common-env
    AIRFLOW__CONN__TEST: "uri/here"
    export AIRFLOW_VAR_FOO=JSON
Dressel answered 27/9, 2021 at 19:46 Comment(4)
Please include code! There is not enough information here for someone to help you. What have you tried? Include errors experienced! We cannot help without a minimal reproducible example: stackoverflow.com/help/minimal-reproducible-exampleCharcoal
I think this was not necessary in this case, since docker-compose.yaml file is available on Airflow website. But no problem, I'll edit the original post with the code and what I'm trying to do. If you could remove the downvote I ll be greatful.Dressel
Please refer to "How do I ask a good question?" for why it is necessary (and likely why someone downvoted): stackoverflow.com/help/how-to-askCharcoal
No problem, I've updated the question. My bad!Dressel
I
7

Only adding AIRFLOW_CONN_YOURCONNECTION or AIRFLOW_VAR_YOURVARIABLE is enough. Note that environment variables for connections and variables use single underscores, not double. You don't have to export environment variables in your docker-compose file, they are set upon starting containers.

For more information about environment variables and Docker Compose, see the documentation: https://docs.docker.com/compose/environment-variables/#pass-environment-variables-to-containers

Intellection answered 27/9, 2021 at 20:27 Comment(1)
The documentation states: "Connections created this way will not show up in the Airflow UI or using airflow connections list. You can use airflow connections get {CONN_ID} if you already know the CONN_ID."Phenol
M
7

The environment variables ARE used in the docker-compose.yaml and ARE used when the process starts. If you want to use them without modifying the docker-compose.yaml, then:

  1. Instead of environment use env_file, as:
env_file:
  - ./development.env
  - ./other-environment.env
  1. You include your variables in your development.env as (all these environment variables are being used per airflow docs):
AIRFLOW_CORE_EXECUTOR: CeleryExecutor
AIRFLOW_CORE_SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW_CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW_CELERY_BROKER_URL: redis://:@redis:6379/0
AIRFLOW_CORE_FERNET_KEY: ""
AIRFLOW_CORE_DAGS_ARE_PAUSED_AT_CREATION: "true"
AIRFLOW_CORE_LOAD_EXAMPLES: "false"
AIRFLOW_API_AUTH_BACKEND: "airflow.api.auth.backend.basic_auth"
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:- pymssql pymongo unidecode apache-airflow-providers-mongo apache-airflow-providers-microsoft-mssql apache-airflow-providers-apache-spark}
  1. You add your *.env file to your .gitignore so that is local (not added to git). Whatever you need added to your .git, then do not add it.
  2. You can test by running (this will show you all the variables replaced):
docker-compose config
  1. Troubleshoot by starting your composition, check your containers using docker ps -a and try to enter any of the container with docker run -it <sha> sh and check if anything was not initialized as expected.
Matter answered 27/9, 2021 at 20:36 Comment(0)
A
1

Variables and connections can also be set using JSON, YAML and .env files, with the Local Filesystem Secrets Backend. Just need to set the environment variable AIRFLOW__SECRETS__BACKEND to airflow.secrets.local_filesystem.LocalFilesystemBackend and AIRFLOW__SECRETS__BACKEND_KWARGS to the paths at which the files will be present inside the container. Sample files below:

variables.yml:

env_name: local
props: |-
  {
    "key_one": "value_one",
    "key_two": "value_two"
  }

connections.yml:

postgres_test:
  conn_type: postgres
  host: localhost
  schema: postgres
  login: testuser
  password: testpwd
  port: 5432

docker-compose.yml:(Uses SequentialExecutor. Should be the same with CeleryExecutor as well)

services:
  airflow:
    image: apache/airflow:slim-2.7.2-python3.11
    environment:
      AIRFLOW__CORE__EXECUTOR: SequentialExecutor
      AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
      AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
      AIRFLOW__CORE__DAGS_FOLDER: /data/dags
      _AIRFLOW_DB_MIGRATE: 'true'
      _AIRFLOW_WWW_USER_CREATE: 'true'
      _AIRFLOW_WWW_USER_USERNAME: airflow
      _AIRFLOW_WWW_USER_PASSWORD: airflow
      AIRFLOW__WEBSERVER__EXPOSE_CONFIG: 'true'
      AIRFLOW__SECRETS__BACKEND: airflow.secrets.local_filesystem.LocalFilesystemBackend
      AIRFLOW__SECRETS__BACKEND_KWARGS: '{"variables_file_path": "/data/variables.yml", "connections_file_path": "/data/connections.yml"}'
    ports:
      - '8080:8080'
      - '8793:8793'
      - '8794:8794'
    volumes:
      - ./airflow:/data
    command: 'standalone'
Asteriated answered 29/1 at 23:12 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.