--- - name: Check if Airflow worker deployment directory exists stat: path: "{{ airflow_worker_dir }}" register: worker_dir_stat - name: Ensure Airflow worker deployment directory exists file: path: "{{ airflow_worker_dir }}" state: directory owner: "{{ ssh_user }}" group: "{{ deploy_group }}" mode: '0755' become: yes when: not worker_dir_stat.stat.exists - name: Ensure Airflow worker configs directory exists file: path: "{{ airflow_worker_dir }}/configs" state: directory owner: "{{ ssh_user }}" group: "{{ deploy_group }}" mode: '0755' become: yes - name: Ensure Airflow worker config directory exists file: path: "{{ airflow_worker_dir }}/config" state: directory owner: "{{ ssh_user }}" group: "{{ deploy_group }}" mode: '0755' become: yes - name: Ensure Airflow operational directories exist with correct permissions file: path: "{{ airflow_worker_dir }}/{{ item }}" state: directory owner: "{{ airflow_uid }}" group: "{{ deploy_group }}" mode: '0775' become: yes loop: - "dags" - "logs" - "plugins" - "downloadfiles" - "addfiles" - "inputfiles" - name: "Log: Syncing Airflow core files" debug: msg: "Syncing DAGs, configs, and Python source code to the worker node." - name: Sync Airflow worker files synchronize: src: "../{{ item }}" dest: "{{ airflow_worker_dir }}/" archive: yes recursive: yes delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: - "airflow/Dockerfile" - "airflow/.dockerignore" - "airflow/dags" - "airflow/inputfiles" - "setup.py" - "yt_ops_services" - "thrift_model" - "VERSION" - "airflow/update-yt-dlp.sh" - "proxy_manager_client.py" - "utils" - name: Copy custom Python config files to worker copy: src: "../airflow/config/{{ item }}" dest: "{{ airflow_worker_dir }}/config/{{ item }}" owner: "{{ ssh_user }}" group: "{{ deploy_group }}" mode: '0644' become: yes loop: - "custom_task_hooks.py" - "airflow_local_settings.py" - name: Ensure any existing airflow.cfg directory is removed file: path: "{{ airflow_worker_dir }}/config/airflow.cfg" state: absent become: yes ignore_errors: yes - name: Copy airflow.cfg to worker copy: src: "../airflow/airflow.cfg" dest: "{{ airflow_worker_dir }}/config/airflow.cfg" owner: "{{ ssh_user }}" group: "{{ deploy_group }}" mode: '0644' become: yes - name: Check if source directories exist stat: path: "../{{ item }}" register: source_dirs loop: - "airflow/plugins" - "airflow/addfiles" - "airflow/bgutil-ytdlp-pot-provider" - name: Sync optional directories if they exist synchronize: src: "../{{ item.item }}/" dest: "{{ airflow_worker_dir }}/{{ item.item | basename }}/" archive: yes recursive: yes delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" loop: "{{ source_dirs.results }}" when: item.stat.exists - name: Sync pangramia thrift files synchronize: src: "../thrift_model/gen_py/pangramia/" dest: "{{ airflow_worker_dir }}/pangramia/" archive: yes recursive: yes delete: yes rsync_path: "sudo rsync" rsync_opts: "{{ rsync_default_opts }}" - name: Ensure config directory is group-writable for Airflow initialization file: path: "{{ airflow_worker_dir }}/config" state: directory mode: '0775' owner: "{{ ssh_user }}" group: "{{ deploy_group }}" become: yes - name: Ensure airflow.cfg is group-writable for Airflow initialization file: path: "{{ airflow_worker_dir }}/config/airflow.cfg" state: file mode: '0664' owner: "{{ ssh_user }}" group: "{{ deploy_group }}" become: yes - name: Template docker-compose file for worker template: src: "{{ playbook_dir }}/../airflow/configs/docker-compose-dl.yaml.j2" dest: "{{ airflow_worker_dir }}/configs/docker-compose-dl.yaml" mode: "{{ file_permissions }}" owner: "{{ ssh_user }}" group: "{{ deploy_group }}" become: yes - name: Create .env file for Airflow worker service template: src: "../../templates/.env.j2" dest: "{{ airflow_worker_dir }}/.env" mode: "{{ file_permissions }}" owner: "{{ ssh_user }}" group: "{{ deploy_group }}" become: yes vars: service_role: "worker" server_identity: "ytdlp-ops-service-worker-{{ inventory_hostname }}" - name: Template Minio connection file for worker template: src: "../airflow/config/minio_default_conn.json.j2" dest: "{{ airflow_worker_dir }}/config/minio_default_conn.json" mode: "{{ file_permissions }}" owner: "{{ ssh_user }}" group: "{{ deploy_group }}" become: yes - name: Create symlink for docker-compose.yaml file: src: "{{ airflow_worker_dir }}/configs/docker-compose-dl.yaml" dest: "{{ airflow_worker_dir }}/docker-compose.yaml" state: link owner: "{{ ssh_user }}" group: "{{ deploy_group }}" follow: no - name: Ensure correct permissions for build context file: path: "{{ airflow_worker_dir }}" state: directory owner: "{{ ssh_user }}" group: "{{ deploy_group }}" recurse: yes become: yes - name: Set proper ownership and permissions on worker logs directory contents shell: | chown -R {{ airflow_uid }}:{{ deploy_group }} {{ airflow_worker_dir }}/logs find {{ airflow_worker_dir }}/logs -type d -exec chmod g+rws {} + find {{ airflow_worker_dir }}/logs -type f -exec chmod g+rw {} + become: yes - name: Verify Dockerfile exists in build directory stat: path: "{{ airflow_worker_dir }}/Dockerfile" register: dockerfile_stat - name: Fail if Dockerfile is missing fail: msg: "Dockerfile not found in {{ airflow_worker_dir }}. Cannot build image." when: not dockerfile_stat.stat.exists - name: "Log: Building Airflow Docker image" debug: msg: "Building the main Airflow Docker image ({{ airflow_image_name }}) locally on the worker node. This may take a few minutes." - name: Build Airflow worker image community.docker.docker_image: name: "{{ airflow_image_name }}" build: path: "{{ airflow_worker_dir }}" dockerfile: "Dockerfile" source: build force_source: true when: not fast_deploy | default(false) - name: "Log: Starting Airflow services" debug: msg: "Starting Airflow worker services (celery worker) on the node using docker-compose." - name: Start Airflow worker service community.docker.docker_compose_v2: project_src: "{{ airflow_worker_dir }}" files: - "configs/docker-compose-dl.yaml" state: present remove_orphans: true pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}"