yt-dlp-dags/airflow/init-airflow.sh
2025-08-26 18:00:55 +03:00

103 lines
4.1 KiB
Bash
Executable File

#!/bin/bash
#
# This script should be run on the Airflow host (master or worker)
# to initialize the environment. It creates the .env file and sets
# up permissions.
#
set -e
# --- Configuration ---
# The directory where docker-compose.yaml is located
AIRFLOW_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
cd "$AIRFLOW_DIR"
echo "--- Initializing Airflow Environment in $AIRFLOW_DIR ---"
# --- Step 1: Create or update .env file for Docker permissions ---
if [ -f ".env" ]; then
echo ".env file already exists. Ensuring correct permissions are set..."
# Ensure AIRFLOW_UID is set to the current user's ID.
if ! grep -q "^AIRFLOW_UID=" .env; then
echo "AIRFLOW_UID not found in .env. Appending..."
echo "AIRFLOW_UID=$(id -u)" >> .env
fi
# Ensure HOSTNAME is set for worker identity.
if ! grep -q "^HOSTNAME=" .env; then
echo "HOSTNAME not found in .env. Appending..."
echo "HOSTNAME=$(hostname)" >> .env
fi
# Force AIRFLOW_GID to be 0, as required by the Airflow image.
# This removes any existing AIRFLOW_GID line and adds the correct one.
if grep -q "^AIRFLOW_GID=" .env; then
echo "Found existing AIRFLOW_GID. Forcing it to 0..."
# The sed command works on both Linux and macOS, creating a .env.bak file.
sed -i.bak '/^AIRFLOW_GID=/d' .env
fi
echo "AIRFLOW_GID=0" >> .env
echo "Permissions updated in .env file."
else
echo "Creating .env file..."
# Note: On Linux hosts, this is crucial for permissions.
echo "AIRFLOW_UID=$(id -u)" > .env
echo "AIRFLOW_GID=0" >> .env
# Add HOSTNAME for worker-specific queueing and container identity
echo "HOSTNAME=$(hostname)" >> .env
# Add default passwords. These should be changed for production.
echo "POSTGRES_PASSWORD=pgdb_pwd_A7bC2xY9zE1wV5uP" >> .env
echo "REDIS_PASSWORD=redis_pwd_K3fG8hJ1mN5pQ2sT" >> .env
echo "AIRFLOW_ADMIN_PASSWORD=admin_pwd_X9yZ3aB1cE5dF7gH" >> .env
echo ".env file created. For a DL worker, you must also add MASTER_HOST_IP. Please review and update passwords."
fi
echo "Current .env contents:"
cat .env
echo "----------------------------------------"
# --- Step 2: Create directories and set permissions ---
# These directories are mounted into the containers and need to exist on the host.
echo "Ensuring mounted directories exist..."
# Define directories in an array for reuse
DIRS_TO_CREATE=(dags logs plugins config inputfiles downloadfiles addfiles)
mkdir -p "${DIRS_TO_CREATE[@]}"
echo "Directories checked/created."
# Load .env to get AIRFLOW_UID. The `set -o allexport` command exports all variables defined from now on.
if [ -f .env ]; then
set -o allexport
source .env
set +o allexport
else
echo "ERROR: .env file not found. Cannot determine AIRFLOW_UID for setting permissions."
exit 1
fi
# Set permissions on the directories. This is crucial for the Airflow user inside the container.
# The airflow-init container on the master does this, but for workers, we must do it here.
echo "Setting ownership for mounted directories to AIRFLOW_UID=${AIRFLOW_UID}..."
if command -v sudo &> /dev/null; then
sudo chown -R "${AIRFLOW_UID}:0" "${DIRS_TO_CREATE[@]}"
echo "Permissions set successfully."
else
echo "WARNING: 'sudo' command not found. Attempting 'chown' as current user."
chown -R "${AIRFLOW_UID}:0" "${DIRS_TO_CREATE[@]}" || (
echo "ERROR: Failed to set permissions. Please run the following command manually with appropriate privileges:"
echo "chown -R \"${AIRFLOW_UID}:0\" dags logs plugins config inputfiles downloadfiles addfiles"
exit 1
)
echo "Permissions set successfully."
fi
echo "----------------------------------------"
# --- Step 3: Instructions for creating admin user ---
echo "--- Next Steps ---"
echo "1. Ensure your docker-compose.yaml (and -master.yaml, -dl.yaml) files are present."
echo "2. Start Airflow services: docker compose up -d"
echo "3. The admin user will be created automatically with the password from your .env file."
echo " Default username: admin"
echo " Default password can be found in .env as AIRFLOW_ADMIN_PASSWORD"
echo
echo "Initialization complete."