Compare commits

..

2 Commits

Author SHA1 Message Date
aperez
81f9739ea7 Update to ytops_client for docker direct 2025-12-26 10:05:00 +03:00
aperez
0103d13c3f Current changes on ytops docker direct invocation in batch mode 2025-12-25 14:32:53 +03:00
291 changed files with 16085 additions and 35157 deletions

View File

@ -1 +0,0 @@
ytdlp-ops

View File

@ -1,80 +0,0 @@
# Airflow/YT-DLP Cluster Deployment
This repository contains Ansible playbooks and configuration files for deploying a distributed Airflow cluster with YT-DLP services.
## Prerequisites
1. Install Ansible on your control machine
2. Ensure SSH access to all target nodes
3. Set up your vault password in `.vault_pass` file
## Initial Setup
Generate the inventory and configuration files from your cluster definition:
```bash
./tools/generate-inventory.py cluster.test.yml
```
**Note:** All Ansible commands should be run from the project root directory.
## Full Deployment
### Deploy entire cluster with proxies (recommended for new setups):
```bash
ansible-playbook ansible/playbook-full-with-proxies.yml
```
### Deploy cluster without proxies:
```bash
ansible-playbook ansible/playbook-full.yml
```
## Targeted Deployments
### Deploy only to master node:
```bash
ansible-playbook ansible/playbook-master.yml --limit="af-test"
```
### Deploy only to worker nodes:
```bash
ansible-playbook ansible/playbook-worker.yml
```
## Deploy Specific Steps
To start at a specific task (useful for debugging or partial deployments):
```bash
ansible-playbook ansible/playbook-master.yml --limit="af-test" --start-at-task="Prepare Caddy asset extraction directory"
```
## Debug Deployments
Run with dry-run and verbose output for debugging:
```bash
ansible-playbook ansible/playbook-full.yml --check --diff -vv
```
## DAGs Only Deployment
To update only DAG files and configurations:
```bash
ansible-playbook ansible/playbook-dags.yml
```
## Vault Management
All sensitive data is encrypted with Ansible Vault. The vault password should be stored in `.vault_pass` file in the project root.
To edit vault files:
```bash
ansible-vault edit group_vars/all/vault.yml
```

View File

@ -1 +0,0 @@
3.11.3-exp

View File

@ -1,7 +0,0 @@
redis-data
minio-data
postgres-data
logs
downloadfiles
addfiles
inputfiles

View File

@ -1,158 +0,0 @@
FROM apache/airflow:2.10.3
ENV AIRFLOW_VERSION=2.10.3
WORKDIR /app
# Install system dependencies
USER root
RUN apt-get update && \
apt-get install -y --no-install-recommends \
vim \
mc \
jq \
build-essential \
python3-dev \
wget \
tar \
xz-utils \
iputils-ping \
curl \
traceroute \
tcpdump \
unzip \
git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /usr/share/man /usr/share/doc /usr/share/doc-base
# Ensure the airflow user and group exist with the correct UID/GID and permissions.
# This is done early to allow `COPY --chown` to work correctly.
RUN if ! getent group airflow > /dev/null 2>&1; then \
groupadd -g 50000 airflow; \
fi && \
if ! id -u airflow > /dev/null 2>&1; then \
useradd -u 50000 -g 50000 -m -s /bin/bash airflow; \
else \
usermod -g 50000 airflow; \
fi && \
chown -R airflow:airflow /app && \
chmod -R g+w /app
# Download and install mc (MinIO client)
RUN wget https://dl.min.io/client/mc/release/linux-amd64/mc -O /usr/local/bin/mc && \
chmod +x /usr/local/bin/mc
# Install FFmpeg
RUN FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linux64-gpl.tar.xz" && \
wget -qO /tmp/ffmpeg.tar.xz "$FFMPEG_URL" && \
mkdir -p /opt/ffmpeg && \
tar -xf /tmp/ffmpeg.tar.xz -C /opt/ffmpeg --strip-components=1 && \
ln -sf /opt/ffmpeg/bin/ffmpeg /usr/local/bin/ffmpeg && \
ln -sf /opt/ffmpeg/bin/ffprobe /usr/local/bin/ffprobe && \
rm -rf /tmp/ffmpeg.tar.xz
# Install s5cmd
RUN S5CMD_URL="https://github.com/peak/s5cmd/releases/download/v2.3.0/s5cmd_2.3.0_linux_amd64.deb" && \
wget -qO /tmp/s5cmd.deb "$S5CMD_URL" && \
dpkg -i /tmp/s5cmd.deb && \
rm /tmp/s5cmd.deb
# Install yt-dlp from master
# Temporarily rename pip to bypass the root check in the base image's pip wrapper,
# ensuring a system-wide installation.
RUN mv /usr/local/bin/pip /usr/local/bin/pip.orig && \
python3 -m pip install --no-cache-dir -U pip hatchling wheel && \
python3 -m pip install --no-cache-dir --force-reinstall "yt-dlp[default] @ https://github.com/yt-dlp/yt-dlp/archive/master.tar.gz" && \
chmod a+x "$(which yt-dlp)" && \
mv /usr/local/bin/pip.orig /usr/local/bin/pip
# Install Deno
RUN curl -fsSL https://github.com/denoland/deno/releases/latest/download/deno-x86_64-unknown-linux-gnu.zip -o deno.zip && \
unzip deno.zip && mv deno /usr/local/bin/ && rm deno.zip
# Install aria2c and gost
RUN curl -fsSL https://raw.githubusercontent.com/P3TERX/aria2-builder/master/aria2-install.sh | bash
# Install gost (direct download of binary)
RUN wget -q https://github.com/ginuerzh/gost/releases/download/v2.12.0/gost_2.12.0_linux_amd64.tar.gz && \
tar -xzf gost_2.12.0_linux_amd64.tar.gz -C /usr/local/bin/ && \
rm gost_2.12.0_linux_amd64.tar.gz
# Verify installations
RUN ffmpeg -version && deno --version && yt-dlp --version && aria2c --version && gost -V && s5cmd version
# Create version information files
RUN ( \
echo "--- yt-dlp ---" && \
yt-dlp --version && \
echo "" && \
echo "--- deno ---" && \
deno --version && \
echo "" && \
echo "--- ffmpeg ---" && \
ffmpeg -version | head -n 1 \
) > VERSION-airflow-latest.txt && \
cp VERSION-airflow-latest.txt VERSION-airflow-$(date +%Y%m%d-%H%M%S).txt
# Install base Airflow dependencies as root (system-wide)
# [FIX] Explicitly install a version of botocore compatible with Python 3.12
# to fix a RecursionError when handling S3 remote logs.
# Temporarily rename pip to bypass the root check in the base image's pip wrapper.
RUN mv /usr/local/bin/pip /usr/local/bin/pip.orig && \
python3 -m pip install --no-cache-dir \
"apache-airflow==${AIRFLOW_VERSION}" \
apache-airflow-providers-docker \
apache-airflow-providers-http \
apache-airflow-providers-amazon \
"apache-airflow-providers-celery>=3.3.0" \
apache-airflow-providers-redis \
"botocore>=1.34.118" \
psycopg2-binary \
"gunicorn==20.1.0" \
"python-ffmpeg==2.0.12" \
"ffprobe3" \
"python-dotenv" \
"PyYAML" \
"aria2p" \
"s5cmdpy" && \
mv /usr/local/bin/pip.orig /usr/local/bin/pip
# --- Install the custom yt_ops_services package ---
# Copy all the necessary source code for the package.
# The deploy script ensures these files are in the build context.
COPY --chown=airflow:airflow setup.py ./
COPY --chown=airflow:airflow VERSION ./
COPY --chown=airflow:airflow yt_ops_services ./yt_ops_services/
COPY --chown=airflow:airflow thrift_model ./thrift_model/
COPY --chown=airflow:airflow pangramia ./pangramia/
# Copy the ytops-client tool and its executable
COPY --chown=airflow:airflow ytops_client ./ytops_client/
COPY --chown=airflow:airflow bin/ytops-client /app/bin/ytops-client
RUN chmod +x /app/bin/ytops-client
ENV PATH="/app/bin:${PATH}"
# Install the package in editable mode. This runs setup.py and installs all dependencies
# listed in `install_requires`, making the `yt_ops_services` module available everywhere.
# Bypass the pip root check again.
RUN mv /usr/local/bin/pip /usr/local/bin/pip.orig && \
python3 -m pip install --no-cache-dir -e . && \
mv /usr/local/bin/pip.orig /usr/local/bin/pip
# Copy token generator scripts and utils with correct permissions
# COPY --chown=airflow:airflow generate_tokens_direct.mjs ./
# COPY --chown=airflow:airflow utils ./utils/
# COPY --chown=airflow:airflow token_generator ./token_generator/
# Ensure the home directory and all its contents are owned by the airflow user before switching to it.
# This fixes permission issues that can occur if previous RUN commands created files in /home/airflow as root.
# We also make it world-writable to accommodate running the container with a different user ID, which can
# happen in some environments (e.g., OpenShift or with docker-compose user overrides).
RUN mkdir -p /home/airflow/.aws && chown -R airflow:airflow /home/airflow/.aws
RUN chown -R airflow:airflow /home/airflow && chmod -R 777 /home/airflow
# Switch to airflow user for all subsequent operations
USER airflow
# Expose bgutil plugin to worker path
ENV PYTHONPATH=/opt/bgutil-ytdlp-pot-provider/plugin:$PYTHONPATH

View File

@ -1,14 +0,0 @@
# Build the final Caddy image
FROM caddy:2-alpine
# Copy the pre-compressed static assets from the build context.
# These assets are extracted from the main Airflow image by the Ansible playbook.
COPY caddy_build_assets/appbuilder /usr/share/caddy/static/appbuilder
COPY caddy_build_assets/dist /usr/share/caddy/static/dist
# Copy the Caddyfile configuration. The build context is the project root,
# so the path is relative to that.
COPY configs/Caddyfile /etc/caddy/Caddyfile
# Expose the port Caddy listens on
EXPOSE 8080

View File

@ -1,125 +0,0 @@
FROM apache/airflow:2.10.3
ENV AIRFLOW_VERSION=2.10.3
WORKDIR /app
# Install system dependencies
USER root
RUN apt-get update && \
apt-get install -y --no-install-recommends \
vim \
mc \
jq \
build-essential \
python3-dev \
wget \
tar \
xz-utils \
iputils-ping \
curl \
traceroute \
tcpdump \
unzip \
git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /usr/share/man /usr/share/doc /usr/share/doc-base
# Download and install mc (MinIO client)
RUN wget https://dl.min.io/client/mc/release/linux-amd64/mc -O /usr/local/bin/mc && \
chmod +x /usr/local/bin/mc
# Install FFmpeg
RUN FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linux64-gpl.tar.xz" && \
wget -qO /tmp/ffmpeg.tar.xz "$FFMPEG_URL" && \
mkdir -p /opt/ffmpeg && \
tar -xf /tmp/ffmpeg.tar.xz -C /opt/ffmpeg --strip-components=1 && \
ln -sf /opt/ffmpeg/bin/ffmpeg /usr/local/bin/ffmpeg && \
ln -sf /opt/ffmpeg/bin/ffprobe /usr/local/bin/ffprobe && \
rm -rf /tmp/ffmpeg.tar.xz
# Install yt-dlp from master
RUN python3 -m pip install -U pip hatchling wheel && \
python3 -m pip install --force-reinstall "yt-dlp[default] @ https://github.com/yt-dlp/yt-dlp/archive/master.tar.gz"
# Install Deno
RUN curl -fsSL https://github.com/denoland/deno/releases/latest/download/deno-x86_64-unknown-linux-gnu.zip -o deno.zip && \
unzip deno.zip && mv deno /usr/local/bin/ && rm deno.zip
# Install aria2c and gost
RUN curl -fsSL https://raw.githubusercontent.com/P3TERX/aria2-builder/master/aria2-install.sh | bash
# Install gost (direct download of binary)
RUN wget -q https://github.com/ginuerzh/gost/releases/download/v2.12.0/gost_2.12.0_linux_amd64.tar.gz && \
tar -xzf gost_2.12.0_linux_amd64.tar.gz -C /usr/local/bin/ && \
rm gost_2.12.0_linux_amd64.tar.gz
# Verify installations
RUN ffmpeg -version && deno --version && yt-dlp --version && aria2c --version && gost -V
# Check if airflow group exists, create it if it doesn't, then ensure proper setup
RUN if ! getent group airflow > /dev/null 2>&1; then \
groupadd -g 1001 airflow; \
fi && \
# Check if airflow user exists and is in the airflow group
if id -u airflow > /dev/null 2>&1; then \
usermod -a -G airflow airflow; \
else \
useradd -u 1003 -g 1001 -m -s /bin/bash airflow; \
fi && \
chown -R airflow:airflow /app && \
chmod g+w /app
# Install base Airflow dependencies
# [FIX] Explicitly install a version of botocore compatible with Python 3.12
# to fix a RecursionError when handling S3 remote logs.
RUN pip install --no-cache-dir \
"apache-airflow==${AIRFLOW_VERSION}" \
apache-airflow-providers-docker \
apache-airflow-providers-http \
apache-airflow-providers-amazon \
"botocore>=1.34.118" \
psycopg2-binary \
"gunicorn==20.1.0" \
"python-ffmpeg==2.0.12" \
"ffprobe3" \
"python-dotenv"
# Switch to airflow user for package installation
USER airflow
# --- Install the custom yt_ops_services package ---
# Copy all the necessary source code for the package.
# The deploy script ensures these files are in the build context.
COPY --chown=airflow:airflow setup.py ./
COPY --chown=airflow:airflow VERSION ./
COPY --chown=airflow:airflow yt_ops_services ./yt_ops_services/
COPY --chown=airflow:airflow thrift_model ./thrift_model/
COPY --chown=airflow:airflow pangramia ./pangramia/
# Install the package in editable mode. This runs setup.py and installs all dependencies
# listed in `install_requires`, making the `yt_ops_services` module available everywhere.
RUN pip install --no-cache-dir -e .
# Copy token generator scripts and utils with correct permissions
# COPY --chown=airflow:airflow generate_tokens_direct.mjs ./
# COPY --chown=airflow:airflow utils ./utils/
# COPY --chown=airflow:airflow token_generator ./token_generator/
# Create version information files
RUN ( \
echo "--- yt-dlp ---" && \
yt-dlp --version && \
echo "" && \
echo "--- deno ---" && \
deno --version && \
echo "" && \
echo "--- ffmpeg ---" && \
ffmpeg -version | head -n 1 \
) > VERSION-airflow-latest.txt && \
cp VERSION-airflow-latest.txt VERSION-airflow-$(date +%Y%m%d-%H%M%S).txt
# Expose bgutil plugin to worker path
ENV PYTHONPATH=/opt/bgutil-ytdlp-pot-provider/plugin:$PYTHONPATH

View File

@ -1,108 +0,0 @@
# Airflow Cluster for YT-DLP Operations
This directory contains the configuration and deployment files for an Apache Airflow cluster designed to manage distributed YouTube video downloading tasks using the `ytdlp-ops` service.
## Overview
The cluster consists of:
- **Master Node:** Runs the Airflow webserver, scheduler, and Flower (Celery monitoring). It also hosts shared services like Redis (broker/backend) and MinIO (artifact storage).
- **Worker Nodes:** Run Celery workers that execute download tasks. Each worker node also runs the `ytdlp-ops-service` (Thrift API server), Envoy proxy (load balancer for Thrift traffic), and Camoufox (remote browser instances for token generation).
## Key Components
### Airflow DAGs
- `ytdlp_ops_dispatcher.py`: The "Sensor" part of a Sensor/Worker pattern. It monitors a Redis queue for URLs to process and triggers a `ytdlp_ops_worker_per_url` DAG run for each URL.
- `ytdlp_ops_worker_per_url.py`: The "Worker" DAG. It processes a single URL passed via DAG run configuration. It implements worker affinity (all tasks for a URL run on the same machine) and handles account management (retrying with different accounts, banning failed accounts based on sliding window checks).
### Configuration Files
- `airflow.cfg`: Main Airflow configuration file.
- `config/airflow_local_settings.py`: Contains the `task_instance_mutation_hook` which implements worker affinity by dynamically assigning tasks to queues based on the worker node's hostname.
- `config/custom_task_hooks.py`: Contains the `task_instance_mutation_hook` (duplicated here, but `airflow_local_settings.py` is the active one).
- `config/redis_default_conn.json.j2`: Jinja2 template for the Airflow Redis connection configuration.
- `config/minio_default_conn.json.j2`: Jinja2 template for the Airflow MinIO connection configuration.
### Docker & Compose
- `Dockerfile`: Defines the Airflow image, including necessary dependencies like `yt-dlp`, `ffmpeg`, and Python packages.
- `Dockerfile.caddy`: Defines a Caddy image used as a reverse proxy for serving Airflow static assets.
- `configs/docker-compose-master.yaml.j2`: Jinja2 template for the Docker Compose configuration on the Airflow master node.
- `configs/docker-compose-dl.yaml.j2`: Jinja2 template for the Docker Compose configuration on the Airflow worker nodes.
- `configs/docker-compose-ytdlp-ops.yaml.j2`: Jinja2 template for the Docker Compose configuration for the `ytdlp-ops` services (Thrift API, Envoy, Camoufox) on both master (management role) and worker nodes.
- `configs/docker-compose.camoufox.yaml.j2`: Jinja2 template (auto-generated by `generate_envoy_config.py`) for the Camoufox browser service definitions.
- `configs/docker-compose.config-generate.yaml`: Docker Compose file used to run the `generate_envoy_config.py` script in a container to create the final service configuration files.
- `generate_envoy_config.py`: Script that generates `envoy.yaml`, `docker-compose.camoufox.yaml`, and `camoufox_endpoints.json` based on environment variables.
- `configs/envoy.yaml.j2`: Jinja2 template (used by `generate_envoy_config.py`) for the Envoy proxy configuration.
### Camoufox (Remote Browsers)
- `camoufox/`: Directory containing the Camoufox browser setup.
- `Dockerfile`: Defines the Camoufox image.
- `requirements.txt`: Python dependencies for the Camoufox server.
- `camoufox_server.py`: The core server logic for managing remote browser instances.
- `start_camoufox.sh`: Wrapper script to start the Camoufox server with Xvfb and VNC.
- `*.xpi`: Browser extensions used by Camoufox.
## Deployment Process
Deployment is managed by Ansible playbooks located in the `ansible/` directory.
1. **Inventory Generation:** The `tools/generate-inventory.py` script reads `cluster.yml` and generates `ansible/inventory.ini`, `ansible/host_vars/`, and `ansible/group_vars/all/generated_vars.yml`.
2. **Full Deployment:** `ansible-playbook playbook-full.yml` is the main command.
- Installs prerequisites (Docker, pipx, Glances).
- Ensures the `airflow_proxynet` Docker network exists.
- Imports and runs `playbook-master.yml` for the master node.
- Imports and runs `playbook-worker.yml` for worker nodes.
3. **Master Deployment (`playbook-master.yml`):**
- Sets system configurations (timezone, NTP, swap, sysctl).
- Calls `airflow-master` role:
- Syncs files to `/srv/airflow_master/`.
- Templates `configs/docker-compose-master.yaml`.
- Builds the Airflow image.
- Extracts static assets and builds the Caddy image.
- Starts services using `docker compose`.
- Calls `ytdlp-master` role:
- Syncs `generate_envoy_config.py` and templates.
- Creates `.env` file.
- Runs `generate_envoy_config.py` to create service configs.
- Creates a dummy `docker-compose.camoufox.yaml`.
- Starts `ytdlp-ops` management services using `docker compose`.
4. **Worker Deployment (`playbook-worker.yml`):**
- Sets system configurations (timezone, NTP, swap, sysctl, system limits).
- Calls `ytdlp-worker` role:
- Syncs files (including `camoufox/` directory) to `/srv/airflow_dl_worker/`.
- Creates `.env` file.
- Runs `generate_envoy_config.py` to create service configs (including `docker-compose.camoufox.yaml`).
- Builds the Camoufox image.
- Starts `ytdlp-ops` worker services using `docker compose`.
- Calls `airflow-worker` role:
- Syncs files to `/srv/airflow_dl_worker/`.
- Templates `configs/docker-compose-dl.yaml`.
- Builds the Airflow image.
- Starts services using `docker compose`.
- Verifies Camoufox services are running.
## Service Interaction Flow (Worker Node)
1. **Airflow Worker:** Pulls tasks from the Redis queue.
2. **`ytdlp_ops_worker_per_url` DAG:** Executes tasks on the local worker node.
3. **Thrift Client (in DAG task):** Connects to `localhost:9080` (Envoy's public port).
4. **Envoy Proxy:** Listens on `:9080`, load balances Thrift requests across internal ports (`9090`, `9091`, `9092` - based on `YTDLP_WORKERS`) of the local `ytdlp-ops-service`.
5. **`ytdlp-ops-service`:** Receives the Thrift request.
6. **Token Generation:** If needed, `ytdlp-ops-service` connects to a local Camoufox instance via WebSocket (using `camoufox_endpoints.json` for the address) to generate tokens.
7. **Camoufox:** Runs a headless Firefox browser, potentially using a SOCKS5 proxy, to interact with YouTube and generate the required tokens.
8. **Download:** The DAG task uses the token (via `info.json`) and potentially the SOCKS5 proxy to run `yt-dlp` for the actual download.
## Environment Variables
Key environment variables used in `.env` files (generated by Ansible templates) control service behavior:
- `HOSTNAME`: The Ansible inventory hostname.
- `SERVICE_ROLE`: `management` (master) or `worker`.
- `SERVER_IDENTITY`: Unique identifier for the `ytdlp-ops-service` instance.
- `YTDLP_WORKERS`: Number of internal Thrift worker endpoints and Camoufox browser instances.
- `CAMOUFOX_PROXIES`: Comma-separated list of SOCKS5 proxy URLs for Camoufox.
- `MASTER_HOST_IP`: IP address of the Airflow master node (for connecting back to Redis).
- Various passwords and ports.
This setup allows for a scalable and robust system for managing YouTube downloads with account rotation and proxy usage.

File diff suppressed because it is too large Load Diff

View File

@ -1,126 +0,0 @@
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
# Using Debian Bullseye as a more stable base than EOL Alpine
FROM debian:bullseye-slim
# Install s6-overlay and build aria2 in a single layer to reduce image size
# renovate: datasource=github-releases depName=just-containers/s6-overlay
ARG S6_OVERLAY_VERSION=v3.1.6.2
RUN BUILD_DEPS=" \
build-essential \
autoconf \
automake \
autotools-dev \
libtool \
pkg-config \
git \
gettext \
autopoint \
gettext-base \
libssl-dev \
libssh2-1-dev \
libc-ares-dev \
libexpat1-dev \
libc-ares-dev \
vim \
libexpat1 \
zlib1g-dev \
libsqlite3-dev \
" && \
apt-get update && \
apt-get install -y --no-install-recommends \
jq \
findutils \
ca-certificates \
curl \
xz-utils \
dos2unix \
$BUILD_DEPS && \
curl -sSL https://github.com/just-containers/s6-overlay/releases/download/${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz -o /tmp/s6-overlay-noarch.tar.xz && \
curl -sSL https://github.com/just-containers/s6-overlay/releases/download/${S6_OVERLAY_VERSION}/s6-overlay-x86_64.tar.xz -o /tmp/s6-overlay-x86_64.tar.xz && \
tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz && \
tar -C / -Jxpf /tmp/s6-overlay-x86_64.tar.xz && \
git clone https://github.com/aria2/aria2.git /tmp/aria2 && \
cd /tmp/aria2 && \
git checkout 8985d66e71f980e7d2765753800078f47761f1ba && \
sed -i "s/\"1\", 1, 16, 'x'));/\"1\", 1, 128, 'x'));/" src/OptionHandlerFactory.cc && \
autoreconf -i && \
./configure \
--disable-dependency-tracking \
--enable-static \
--disable-shared \
--with-ca-bundle=/etc/ssl/certs/ca-certificates.crt \
--without-libxml2 \
--with-libexpat \
--without-libgcrypt \
--with-openssl \
--with-libcares \
--with-libsqlite3 \
--with-libssh2 \
--with-zlib && \
make -j$(nproc) && \
make install && \
cd / && \
# No purge runtime dev apt-get purge -y --auto-remove $BUILD_DEPS && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* /tmp/*
COPY rootfs /
RUN find /etc/cont-init.d /etc/services.d -type f -exec dos2unix {} + && \
find /etc/cont-init.d /etc/services.d -type f -exec chmod +x {} +
ENV S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
RCLONE_CONFIG=/config/rclone.conf \
UPDATE_TRACKERS=true \
CUSTOM_TRACKER_URL= \
LISTEN_PORT=6888 \
RPC_PORT=6800 \
RPC_SECRET= \
PUID= PGID= \
DISK_CACHE= \
IPV6_MODE= \
UMASK_SET= \
SPECIAL_MODE=
EXPOSE \
6800 \
6888 \
6888/udp
VOLUME \
/config \
/downloads
#ENTRYPOINT ["/init"]
CMD ["aria2c", \
"--enable-rpc=true", \
"--rpc-listen-all=true", \
"--rpc-listen-port=6800", \
"--listen-port=6888", \
"--disable-ipv6=true", \
"--max-concurrent-downloads=128", \
"--max-connection-per-server=32", \
"--split=6", \
"--min-split-size=2M", \
"--file-allocation=falloc", \
"--continue=false", \
"--check-integrity=false", \
"--log-level=info", \
"--console-log-level=info", \
"--save-session-interval=5", \
"--dir=/downloads", \
"--disk-cache=64M", \
"--input-file=/config/aria2.session", \
"--save-session=/config/aria2.session"]

View File

@ -1,17 +0,0 @@
----------------------------------------------------------------
█████╗ ██████╗ ██╗ █████╗ ██████╗ ██████╗ ██████╗ ██████╗
██╔══██╗██╔══██╗██║██╔══██╗╚════██╗ ██╔══██╗██╔══██╗██╔═══██╗
███████║██████╔╝██║███████║ █████╔╝ ██████╔╝██████╔╝██║ ██║
██╔══██║██╔══██╗██║██╔══██║██╔═══╝ ██╔═══╝ ██╔══██╗██║ ██║
██║ ██║██║ ██║██║██║ ██║███████╗ ██║ ██║ ██║╚██████╔╝
╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝
https://github.com/P3TERX/Aria2-Pro-Docker
Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
Version: COMMIT_HASH | Build Time: DATE_TIME
----------------------------------------------------------------

View File

@ -1,39 +0,0 @@
#!/usr/bin/with-contenv bash
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
. /etc/init-base
mkdir -p ${ARIA2_CONF_DIR} ${SCRIPT_DIR} ${DOWNLOAD_DIR}
PROFILES="
aria2.conf
"
DOWNLOAD_PROFILE
[[ ! -f "${ARIA2_CONF_DIR}/aria2.session" ]] && {
rm -rf "${ARIA2_CONF_DIR}/aria2.session"
touch "${ARIA2_CONF_DIR}/aria2.session"
}
if ! [[ "${UPDATE_TRACKERS}" = "false" || "${UPDATE_TRACKERS}" = "disable" ]]; then
rm -f /etc/services.d/crond/down
PROFILES="tracker.sh"
DOWNLOAD_PROFILE
bash ${SCRIPT_DIR}/tracker.sh ${ARIA2_CONF}
else
touch /etc/services.d/crond/down
fi
exit 0

View File

@ -1,35 +0,0 @@
#!/usr/bin/with-contenv bash
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
. /etc/init-base
INSTALL_RCLONE() {
if [[ ! -f /usr/local/bin/rclone ]]; then
echo
echo -e "${INFO} Installing RCLONE ..."
[[ -L /usr/bin/unzip ]] && rm -f /usr/bin/unzip
curl -fsSL https://rclone.org/install.sh | bash
fi
}
if [[ "${SPECIAL_MODE}" = "rclone" ]]; then
INSTALL_RCLONE
PROFILES="upload.sh rclone.env"
DOWNLOAD_PROFILE
elif [[ "${SPECIAL_MODE}" = "move" ]]; then
PROFILES="move.sh"
DOWNLOAD_PROFILE
fi
exit 0

View File

@ -1,61 +0,0 @@
#!/usr/bin/with-contenv bash
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
. /etc/init-base
[[ -e ${ARIA2_CONF_DIR}/delete.sh ]] && {
rm -f ${ARIA2_CONF_DIR}/*.sh
sed -i "s@^\(on-download-stop=\).*@\1${SCRIPT_DIR}/delete.sh@" ${ARIA2_CONF}
sed -i "s@^\(on-download-complete=\).*@\1${SCRIPT_DIR}/clean.sh@" ${ARIA2_CONF}
}
sed -i "s@^\(dir=\).*@\1/downloads@" ${ARIA2_CONF}
sed -i "s@^\(input-file=\).*@\1${ARIA2_CONF_DIR}/aria2.session@" ${ARIA2_CONF}
sed -i "s@^\(save-session=\).*@\1${ARIA2_CONF_DIR}/aria2.session@" ${ARIA2_CONF}
sed -i "s@^\(dht-file-path=\).*@\1${ARIA2_CONF_DIR}/dht.dat@" ${ARIA2_CONF}
sed -i "s@^\(dht-file-path6=\).*@\1${ARIA2_CONF_DIR}/dht6.dat@" ${ARIA2_CONF}
[[ -e ${ARIA2_CONF_DIR}/HelloWorld ]] && exit 0
[[ ${RPC_PORT} ]] &&
sed -i "s@^\(rpc-listen-port=\).*@\1${RPC_PORT}@" ${ARIA2_CONF}
[[ ${LISTEN_PORT} ]] && {
sed -i "s@^\(listen-port=\).*@\1${LISTEN_PORT}@" ${ARIA2_CONF}
sed -i "s@^\(dht-listen-port=\).*@\1${LISTEN_PORT}@" ${ARIA2_CONF}
}
[[ ${RPC_SECRET} ]] &&
sed -i "s@^\(rpc-secret=\).*@\1${RPC_SECRET}@" ${ARIA2_CONF}
[[ ${DISK_CACHE} ]] &&
sed -i "s@^\(disk-cache=\).*@\1${DISK_CACHE}@" ${ARIA2_CONF}
[[ "${IPV6_MODE}" = "true" || "${IPV6_MODE}" = "enable" ]] && {
sed -i "s@^\(disable-ipv6=\).*@\1false@" ${ARIA2_CONF}
sed -i "s@^\(enable-dht6=\).*@\1true@" ${ARIA2_CONF}
}
[[ "${IPV6_MODE}" = "false" || "${IPV6_MODE}" = "disable" ]] && {
sed -i "s@^\(disable-ipv6=\).*@\1true@" ${ARIA2_CONF}
sed -i "s@^\(enable-dht6=\).*@\1false@" ${ARIA2_CONF}
}
[[ "${SPECIAL_MODE}" = "rclone" ]] &&
sed -i "s@^\(on-download-complete=\).*@\1${SCRIPT_DIR}/upload.sh@" ${ARIA2_CONF}
[[ "${SPECIAL_MODE}" = "move" ]] &&
sed -i "s@^\(on-download-complete=\).*@\1${SCRIPT_DIR}/move.sh@" ${ARIA2_CONF}
exit 0

View File

@ -1,27 +0,0 @@
#!/usr/bin/with-contenv bash
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
. /etc/init-base
if [ -w ${DOWNLOAD_DIR} ]; then echo "Download DIR writeable, not changing owner."; else chown -R p3terx:p3terx ${DOWNLOAD_DIR}; fi
chown -R p3terx:p3terx ${ARIA2_CONF_DIR}
if [[ -z ${PUID} && -z ${PGID} ]] || [[ ${PUID} = 65534 && ${PGID} = 65534 ]]; then
echo -e "${WARN} Ignore permission settings."
chmod -v 777 ${DOWNLOAD_DIR}
chmod -vR 777 ${ARIA2_CONF_DIR}
else
if [ -w ${DOWNLOAD_DIR} ]; then echo "Download DIR writeable, not modifying permission."; else chmod -v u=rwx ${DOWNLOAD_DIR}; fi
chmod -v 600 ${ARIA2_CONF_DIR}/*
chmod -v 755 ${SCRIPT_DIR}
chmod -v 700 ${SCRIPT_DIR}/*
fi

View File

@ -1,2 +0,0 @@
#!/bin/sh
cat /Aria2-Pro

View File

@ -1 +0,0 @@
# BT tracker updates disabled.

View File

@ -1,118 +0,0 @@
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Docker-Aria2-Pro
#
# Copyright (c) 2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
Green_font_prefix="\033[32m"
Red_font_prefix="\033[31m"
Green_background_prefix="\033[42;37m"
Red_background_prefix="\033[41;37m"
Font_color_suffix="\033[0m"
INFO="[${Green_font_prefix}INFO${Font_color_suffix}]"
ERROR="[${Red_font_prefix}ERROR${Font_color_suffix}]"
WARN="[${Yellow_font_prefix}WARN${Font_color_suffix}]"
DOWNLOAD_DIR="/downloads"
ARIA2_CONF_DIR="/config"
ARIA2_CONF="${ARIA2_CONF_DIR}/aria2.conf"
SCRIPT_CONF="${ARIA2_CONF_DIR}/script.conf"
SCRIPT_DIR="${ARIA2_CONF_DIR}/script"
CURL_OPTIONS="-fsSL --connect-timeout 3 --max-time 3"
PROFILE_URL1="https://p3terx.github.io/aria2.conf"
PROFILE_URL2="https://aria2c.now.sh"
PROFILE_URL3="https://cdn.jsdelivr.net/gh/P3TERX/aria2.conf"
FILE_ALLOCATION_SET() {
TMP_FILE="/downloads/P3TERX.COM"
if fallocate -l 5G ${TMP_FILE}; then
FILE_ALLOCATION=falloc
else
FILE_ALLOCATION=none
fi
rm -f ${TMP_FILE}
sed -i "s@^\(file-allocation=\).*@\1${FILE_ALLOCATION}@" "${ARIA2_CONF}"
}
CONVERSION_ARIA2_CONF() {
sed -i "s@^\(rpc-listen-port=\).*@\1${RPC_PORT:-6800}@" "${ARIA2_CONF}"
sed -i "s@^\(listen-port=\).*@\1${LISTEN_PORT:-6888}@" "${ARIA2_CONF}"
sed -i "s@^\(dht-listen-port=\).*@\1${LISTEN_PORT:-6888}@" "${ARIA2_CONF}"
sed -i "s@^\(dir=\).*@\1/downloads@" "${ARIA2_CONF}"
sed -i "s@/root/.aria2@${ARIA2_CONF_DIR}@" "${ARIA2_CONF}"
sed -i "s@^#\(retry-on-.*=\).*@\1true@" "${ARIA2_CONF}"
sed -i "s@^\(max-connection-per-server=\).*@\1128@" "${ARIA2_CONF}"
sed -i "/^on-download-stop=/d" "${ARIA2_CONF}"
sed -i "/^on-download-complete=/d" "${ARIA2_CONF}"
# Custom settings from user
sed -i "s@^\(continue=\).*@\1false@" "${ARIA2_CONF}"
sed -i "s@^\(always-resume=\).*@\1false@" "${ARIA2_CONF}"
sed -i "s@^\(max-concurrent-downloads=\).*@\1500@" "${ARIA2_CONF}"
sed -i "s@^\(enable-dht=\).*@\1false@" "${ARIA2_CONF}"
sed -i "s@^\(enable-dht6=\).*@\1false@" "${ARIA2_CONF}"
sed -i "s@^\(bt-enable-lpd=\).*@\1true@" "${ARIA2_CONF}"
sed -i "s@^\(enable-peer-exchange=\).*@\1false@" "${ARIA2_CONF}"
sed -i "s@^\(max-overall-upload-limit=\).*@\12M@" "${ARIA2_CONF}"
sed -i "s@^\(seed-time=\).*@\11@" "${ARIA2_CONF}"
sed -i "s@^\(user-agent=\).*@\1Mozilla/5.0 (ChromiumStylePlatform) Cobalt/Version@" "${ARIA2_CONF}"
sed -i "s@^\(peer-id-prefix=\).*@\1-DE13F0-@" "${ARIA2_CONF}"
sed -i "s@^\(summary-interval=\).*@\11@" "${ARIA2_CONF}"
sed -i "s@^\(show-console-readout=\).*@\1false@" "${ARIA2_CONF}"
sed -i "s@^\(console-log-level=\).*@\1notice@" "${ARIA2_CONF}"
# Add settings not present in default config
echo "" >>"${ARIA2_CONF}"
echo "# Custom settings added" >>"${ARIA2_CONF}"
echo "disable-metalink=true" >>"${ARIA2_CONF}"
echo "follow-torrent=false" >>"${ARIA2_CONF}"
echo "retry-on-400=false" >>"${ARIA2_CONF}"
echo "retry-on-403=false" >>"${ARIA2_CONF}"
echo "retry-on-406=false" >>"${ARIA2_CONF}"
echo "retry-on-unknown=true" >>"${ARIA2_CONF}"
echo "rpc-listen-all=true" >>"${ARIA2_CONF}"
[[ $TZ != "Asia/Shanghai" ]] && sed -i '11,$s/#.*//;/^$/d' "${ARIA2_CONF}"
FILE_ALLOCATION_SET
}
CONVERSION_SCRIPT_CONF() {
sed -i "s@\(upload-log=\).*@\1${ARIA2_CONF_DIR}/upload.log@" "${SCRIPT_CONF}"
sed -i "s@\(move-log=\).*@\1${ARIA2_CONF_DIR}/move.log@" "${SCRIPT_CONF}"
sed -i "s@^\(dest-dir=\).*@\1${DOWNLOAD_DIR}/completed@" "${SCRIPT_CONF}"
}
CONVERSION_CORE() {
sed -i "s@\(ARIA2_CONF_DIR=\"\).*@\1${ARIA2_CONF_DIR}\"@" "${SCRIPT_DIR}/core"
}
DOWNLOAD_PROFILE() {
for PROFILE in ${PROFILES}; do
[[ ${PROFILE} = *.sh || ${PROFILE} = core ]] && cd "${SCRIPT_DIR}" || cd "${ARIA2_CONF_DIR}"
while [[ ! -f ${PROFILE} ]]; do
rm -rf ${PROFILE}
echo
echo -e "${INFO} Downloading '${PROFILE}' ..."
curl -O ${CURL_OPTIONS} ${PROFILE_URL1}/${PROFILE} ||
curl -O ${CURL_OPTIONS} ${PROFILE_URL2}/${PROFILE} ||
curl -O ${CURL_OPTIONS} ${PROFILE_URL3}/${PROFILE}
[[ -s ${PROFILE} ]] && {
[[ "${PROFILE}" = "aria2.conf" ]] && CONVERSION_ARIA2_CONF
[[ "${PROFILE}" = "script.conf" ]] && CONVERSION_SCRIPT_CONF
[[ "${PROFILE}" = "core" ]] && CONVERSION_CORE
echo
echo -e "${INFO} '${PROFILE}' download completed !"
} || {
echo
echo -e "${ERROR} '${PROFILE}' download error, retry ..."
sleep 3
}
done
done
}

View File

@ -1,15 +0,0 @@
#!/usr/bin/execlineb -S0
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
s6-svscanctl -t /var/run/s6/services

View File

@ -1,18 +0,0 @@
#!/usr/bin/with-contenv bash
# _ _ ____ ____
# / \ _ __(_) __ _|___ \ | _ \ _ __ ___
# / _ \ | '__| |/ _` | __) | | |_) | '__/ _ \
# / ___ \| | | | (_| |/ __/ | __/| | | (_) |
# /_/ \_\_| |_|\__,_|_____| |_| |_| \___/
#
# https://github.com/P3TERX/Aria2-Pro-Docker
#
# Copyright (c) 2020-2021 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
umask ${UMASK_SET:-022}
exec s6-setuidgid p3terx aria2c \
--conf-path=/config/aria2.conf

@ -1 +0,0 @@
Subproject commit c79e8dc48151c8dd7c0349b85ada2ccfcdfeb75b

View File

@ -1,114 +0,0 @@
# Use ubuntu:22.04 as the base image
FROM ubuntu:22.04
# Set working directory
WORKDIR /app
# Set timezone and non-interactive frontend for apt
ARG DEBIAN_FRONTEND=noninteractive
ARG TZ=Europe/Minsk
ENV TZ=${TZ} LANG=C.UTF-8 LC_ALL=C.UTF-8
# Install necessary system packages for Playwright, GeoIP, Xvfb, and VNC
RUN apt-get update && apt-get install -y --no-install-recommends \
# From user example
vim lsof unzip wget ca-certificates \
# From existing Dockerfile, kept for completeness
libgeoip1 \
dbus-x11 \
xvfb \
xserver-common \
xauth \
x11-xkb-utils \
xfonts-base \
procps \
libgl1-mesa-dri \
x11vnc \
fluxbox \
libnss3 libnspr4 libdbus-1-3 libatk1.0-0 libatk-bridge2.0-0 libcups2 libdrm2 libxkbcommon0 libxcomposite1 libxdamage1 libxfixes3 libxrandr2 libgbm1 libpango-1.0-0 libcairo2 libasound2 \
libgtk-3-0 libx11-xcb1 fonts-liberation tzdata \
xauth util-linux x11-xserver-utils \
curl \
&& \
# Configure timezone
ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \
dpkg-reconfigure -f noninteractive tzdata && \
rm -rf /var/lib/apt/lists/*
# Add build-time argument for VNC password
ARG VNC_PASSWORD="vncpassword"
# Set up VNC password from build argument
RUN mkdir -p /root/.vnc && \
x11vnc -storepasswd "${VNC_PASSWORD}" /root/.vnc/passwd
# Install Miniconda
RUN wget --no-check-certificate https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh && \
bash /tmp/miniconda.sh -b -p /opt/conda && \
rm /tmp/miniconda.sh
ENV PATH="/opt/conda/bin:$PATH"
# Create conda environment and configure it
RUN conda init bash && \
conda config --set always_yes yes && \
conda tos accept --override-channels --channel defaults && \
conda create -n camo python=3.11 -y
# Install Python dependencies in conda environment
COPY requirements.txt .
RUN conda run -n camo pip install --no-cache-dir -r requirements.txt
# Install Playwright browsers for version 1.49
RUN conda run -n camo playwright install --with-deps
# Pre-download and cache Camoufox to speed up startup
RUN conda run -n camo camoufox fetch
# Copy the server script into the image
COPY camoufox_server.py .
# Create directory for extensions and copy them
RUN mkdir /app/extensions
COPY google_sign_in_popup_blocker-1.0.2.xpi /app/extensions/
COPY spoof_timezone-0.3.4.xpi /app/extensions/
COPY youtube_ad_auto_skipper-0.6.0.xpi /app/extensions/
# Expose the default port Camoufox might use (adjust if needed)
# This is informational; the actual port mapping is in docker-compose.
EXPOSE 12345
# Expose VNC port
EXPOSE 5900
# Copy the wrapper script and make it executable
COPY start_camoufox.sh /app/
RUN chmod +x /app/start_camoufox.sh && \
sed -i 's/\r$//' /app/start_camoufox.sh
# Configure Xvfb resolution via build arguments
ARG RESOLUTION="1920x1080x24"
ENV XVFB_RES="${RESOLUTION}" \
DISPLAY=":99" \
XAUTHORITY="/tmp/.Xauth"
# Create Xauth setup (mcookie installed in previous apt-get)
RUN touch /tmp/.Xauth && \
chmod 644 /tmp/.Xauth && \
echo "#!/bin/bash" > /init_x11.sh && \
echo "xauth add \$DISPLAY . \$(mcookie)" >> /init_x11.sh && \
echo "xhost +local:" >> /init_x11.sh && \
chmod +x /init_x11.sh
# Proper ENTRYPOINT using shell form
#ENTRYPOINT ["/bin/bash", "-c", "source /init_x11.sh && exec xvfb-run --auto-servernum --server-args \"-screen 0 ${XVFB_RES} ${XVFB_ARGS}\" /app/start_camoufox.sh"]
ENTRYPOINT ["/bin/bash", "-c", "\
rm -f /tmp/.X99-lock && \
Xvfb :99 -screen 0 ${XVFB_RES} -ac & \
export DISPLAY=:99 && \
sleep 1 && \
touch /tmp/.Xauth && \
xauth add :99 . $(mcookie) && \
xhost +local: && \
source /init_x11.sh && \
exec /app/start_camoufox.sh \"$@\"", "camoufox-entrypoint"]

View File

@ -1,452 +0,0 @@
#!/usr/bin/env python3
import re
import argparse
import atexit
import shutil
import logging
import sys
import os
import psutil
import time
import threading
import signal
import asyncio
import websockets
from collections import deque, defaultdict
from datetime import datetime, timedelta
from camoufox.server import launch_server
# Global variables for resource tracking
active_connections = defaultdict(int) # Track connections per endpoint
max_connections = defaultdict(int)
resource_stats = {}
server_instances = {} # Track multiple server instances
shutdown_requested = False
endpoint_locks = defaultdict(threading.Lock) # Locks for each endpoint
memory_restart_threshold = 1800 # MB - warn when exceeded
restart_in_progress = False
# Enhanced monitoring metrics
connection_pool_metrics = {
'total_acquired': 0,
'total_released': 0,
'total_reused': 0,
'pool_size': 0,
'active_contexts': 0
}
def parse_proxy_url(url):
"""Parse proxy URL in format proto://user:pass@host:port"""
pattern = r'([^:]+)://(?:([^:]+):([^@]+)@)?([^:]+):(\d+)'
match = re.match(pattern, url)
if not match:
raise ValueError('Invalid proxy URL format. Expected proto://[user:pass@]host:port')
proto, username, password, host, port = match.groups()
# Ensure username and password are strings, not None
proxy_config = {
'server': f'{proto}://{host}:{port}',
'username': username or '',
'password': password or ''
}
# Remove empty credentials
if not proxy_config['username']:
del proxy_config['username']
if not proxy_config['password']:
del proxy_config['password']
return proxy_config
def monitor_resources(server_ports, proxy_url):
"""Monitor system resources and log warnings when thresholds are exceeded"""
global active_connections, max_connections, resource_stats, shutdown_requested, restart_in_progress
global connection_pool_metrics
logging.info(f"Resource monitor started for proxy '{proxy_url}' on ports {server_ports}")
log_counter = 0
while not shutdown_requested:
log_counter += 1
try:
# Get system resource usage
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
memory_percent = memory.percent
# Get current process info
current_process = psutil.Process()
process_memory = current_process.memory_info()
process_cpu = current_process.cpu_percent()
# Update active connections using psutil
all_connections = psutil.net_connections(kind='inet')
new_active_connections = defaultdict(int)
for conn in all_connections:
if conn.status == psutil.CONN_ESTABLISHED and conn.laddr.port in server_ports:
new_active_connections[conn.laddr.port] += 1
active_connections.clear()
active_connections.update(new_active_connections)
for port, count in active_connections.items():
max_connections[port] = max(max_connections.get(port, 0), count)
connection_pool_metrics['active_contexts'] = sum(active_connections.values())
# Update resource stats
resource_stats = {
'cpu_percent': cpu_percent,
'memory_percent': memory_percent,
'process_memory_mb': process_memory.rss / 1024 / 1024,
'process_cpu_percent': process_cpu,
'total_active_connections': sum(active_connections.values()),
'active_connections_per_endpoint': dict(active_connections),
'max_connections': dict(max_connections),
'connection_pool_metrics': dict(connection_pool_metrics)
}
# Log resource usage periodically
if cpu_percent > 80 or memory_percent > 80:
logging.info(f"RESOURCE STATS - CPU: {cpu_percent}%, Memory: {memory_percent}%, "
f"Process Memory: {resource_stats['process_memory_mb']:.1f}MB, "
f"Total Active Connections: {resource_stats['total_active_connections']}")
# Log connection pool metrics
pool_metrics = resource_stats['connection_pool_metrics']
logging.info(f"POOL METRICS - Acquired: {pool_metrics['total_acquired']}, "
f"Released: {pool_metrics['total_released']}, "
f"Reused: {pool_metrics['total_reused']}, "
f"Pool Size: {pool_metrics['pool_size']}, "
f"Active Contexts: {pool_metrics['active_contexts']}")
# Warning thresholds
if cpu_percent > 85:
logging.warning(f"HIGH CPU USAGE: {cpu_percent}%")
if memory_percent > 85:
logging.warning(f"HIGH MEMORY USAGE: {memory_percent}%")
if resource_stats['total_active_connections'] > 100:
logging.warning(f"HIGH TOTAL CONNECTION COUNT: {resource_stats['total_active_connections']} active connections")
if process_memory.rss > 2 * 1024 * 1024 * 1024: # 2GB
logging.warning(f"HIGH PROCESS MEMORY: {process_memory.rss / 1024 / 1024:.1f}MB")
# Safety net: Warn instead of restart if memory exceeds threshold
if resource_stats['process_memory_mb'] > memory_restart_threshold:
logging.warning(f"MEMORY THRESHOLD EXCEEDED: {resource_stats['process_memory_mb']}MB > {memory_restart_threshold}MB")
logging.warning("Manual intervention required - memory usage critical but restart disabled")
logging.warning("Consider adding new camoufox instances or reducing concurrent workers")
# Add metric for monitoring instead of restart
logging.info(f"MEMORY_ALERT: {resource_stats['process_memory_mb']}MB used on {sum(active_connections.values())} active connections")
# Add a heartbeat log every minute (30s * 2)
if log_counter % 2 == 0:
logging.info(
f"HEARTBEAT - Proxy: {proxy_url} | Ports: {server_ports} | "
f"Memory: {resource_stats.get('process_memory_mb', 0):.1f}MB | "
f"CPU: {resource_stats.get('cpu_percent', 0)}% | "
f"Active Connections: {resource_stats.get('total_active_connections', 0)}"
)
except Exception as e:
logging.error(f"Error in resource monitoring: {e}")
time.sleep(30) # Check every 30 seconds
def graceful_shutdown(signum, frame):
"""Handle graceful shutdown"""
global shutdown_requested, server_instances, restart_in_progress
logging.info("Graceful shutdown requested")
shutdown_requested = True
# Log final resource stats
if resource_stats:
logging.info(f"Final resource stats: {resource_stats}")
# Log final connection pool metrics
logging.info(f"Final connection pool metrics: {connection_pool_metrics}")
# The server instances are running in daemon threads and will be terminated
# when the main process exits. No explicit shutdown call is needed.
logging.info("Shutting down all Camoufox server instances...")
# If restart was requested, exit with special code
if restart_in_progress:
logging.info("Restarting Camoufox server...")
os.execv(sys.executable, [sys.executable] + sys.argv)
sys.exit(0)
def create_server_instance(port, base_config):
"""
Creates and runs a new Camoufox server instance on the specified port.
NOTE: The `launch_server` function is a blocking call that runs an event loop
and does not return. Therefore, any code after it in this function is unreachable.
"""
config = base_config.copy()
config['port'] = port
try:
# This function blocks and runs the server indefinitely.
launch_server(**config)
except Exception as e:
# If an error occurs, log it. The daemon thread will then terminate.
logging.error(f'Error launching server on port {port}: {str(e)}', exc_info=True)
def check_listening_ports(expected_ports, log_results=True):
"""Checks which of the expected ports are actively listening."""
successful_ports = []
failed_ports = []
try:
# Check all system-wide connections, not just for the current process,
# as the server may run in a child process.
listening_ports = {
conn.laddr.port for conn in psutil.net_connections(kind='inet')
if conn.status == psutil.CONN_LISTEN
}
for port in expected_ports:
if port in listening_ports:
successful_ports.append(port)
else:
failed_ports.append(port)
if log_results:
logging.info("--- Verifying Listening Ports ---")
if successful_ports:
logging.info(f"Successfully listening on ports: {sorted(successful_ports)}")
if failed_ports:
logging.error(f"FAILED to listen on ports: {sorted(failed_ports)}")
logging.info("---------------------------------")
except Exception as e:
if log_results:
logging.error(f"Could not verify listening ports: {e}")
return successful_ports, failed_ports
def main():
parser = argparse.ArgumentParser(description='Launch Camoufox server with optional proxy support')
parser.add_argument('--proxy-url', help='Optional proxy URL in format proto://user:pass@host:port (supports http, https, socks5)')
parser.add_argument('--ws-host', default='0.0.0.0', help='WebSocket server host address (e.g., localhost, 0.0.0.0)')
parser.add_argument('--port', type=int, default=12345, help='Base WebSocket server port')
parser.add_argument('--num-instances', type=int, default=4, help='Number of server instances to create')
parser.add_argument('--port-range', type=str, help='Port range in format start-end (e.g., 12345-12349)')
parser.add_argument('--base-proxy-port', type=int, default=1080, help='Base proxy port for mapping to camoufox instances')
parser.add_argument('--ws-path', default='camoufox', help='Base WebSocket server path')
parser.add_argument('--headless', action='store_true', help='Run browser in headless mode')
parser.add_argument('--geoip', nargs='?', const=True, default=False,
help='Enable geo IP protection. Can specify IP address or use True for automatic detection')
parser.add_argument('--locale', help='Locale(s) to use (e.g. "en-US" or "en-US,fr-FR")')
parser.add_argument('--block-images', action='store_true', help='Block image requests to save bandwidth')
parser.add_argument('--block-webrtc', action='store_true', help='Block WebRTC entirely')
parser.add_argument('--humanize', nargs='?', const=True, type=float,
help='Humanize cursor movements. Can specify max duration in seconds')
parser.add_argument('--extensions', type=str,
help='Comma-separated list of extension paths to enable (XPI files or extracted directories). Use quotes if paths contain spaces.')
parser.add_argument('--persistent-context', action='store_true', help='Enable persistent browser context.')
parser.add_argument('--user-data-dir', type=str, help='Directory to store persistent browser data.')
parser.add_argument('--preferences', type=str, help='Comma-separated list of Firefox preferences (e.g. "key1=value1,key2=value2")')
# Add resource monitoring arguments
parser.add_argument('--monitor-resources', action='store_true', help='Enable resource monitoring')
parser.add_argument('--max-connections-per-instance', type=int, default=50, help='Maximum concurrent connections per instance')
parser.add_argument('--connection-timeout', type=int, default=300, help='Connection timeout in seconds')
parser.add_argument('--memory-restart-threshold', type=int, default=1800, help='Memory threshold (MB) to trigger warning')
args = parser.parse_args()
# Set memory restart threshold
global memory_restart_threshold
memory_restart_threshold = args.memory_restart_threshold
# Set up signal handlers for graceful shutdown
signal.signal(signal.SIGTERM, graceful_shutdown)
signal.signal(signal.SIGINT, graceful_shutdown)
proxy_config = None
if args.proxy_url:
try:
proxy_config = parse_proxy_url(args.proxy_url)
print(f"Using proxy configuration: {args.proxy_url}")
except ValueError as e:
print(f'Error parsing proxy URL: {e}')
return
else:
print("No proxy URL provided. Running without proxy.")
# --- Basic Logging Configuration ---
log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
log_handler = logging.StreamHandler(sys.stdout)
log_handler.setFormatter(log_formatter)
root_logger = logging.getLogger()
for handler in root_logger.handlers[:]:
root_logger.removeHandler(handler)
root_logger.addHandler(log_handler)
root_logger.setLevel(logging.DEBUG)
logging.debug("DEBUG logging enabled. Starting Camoufox server setup...")
# --- End Logging Configuration ---
try:
# --- Check DISPLAY environment variable ---
display_var = os.environ.get('DISPLAY')
logging.info(f"Value of DISPLAY environment variable: {display_var}")
# --- End Check ---
# Build base config dictionary
base_config = {
'headless': False, # Force non-headless mode for VNC
'geoip': True, # Always enable GeoIP when a proxy is used
'host': args.ws_host,
'ws_path': args.ws_path,
'env': {'DISPLAY': os.environ.get('DISPLAY')}
}
# Add proxy to config only if it was successfully parsed
if proxy_config:
base_config['proxy'] = proxy_config
# Add optional parameters
if args.locale:
base_config['locale'] = args.locale
if args.block_images:
base_config['block_images'] = True
if args.block_webrtc:
base_config['block_webrtc'] = True
if args.humanize:
base_config['humanize'] = args.humanize if isinstance(args.humanize, float) else True
# Add persistent context options
if args.persistent_context:
base_config['persistent_context'] = True
if args.user_data_dir:
base_config['user_data_dir'] = args.user_data_dir
# Add Firefox preferences
if args.preferences:
base_config['preferences'] = {}
prefs_list = args.preferences.split(',')
for pref in prefs_list:
if '=' in pref:
key, value = pref.split('=', 1)
if value.lower() in ('true', 'false'):
base_config['preferences'][key.strip()] = value.lower() == 'true'
elif value.isdigit():
base_config['preferences'][key.strip()] = int(value)
else:
base_config['preferences'][key.strip()] = value.strip()
print(f"Applied Firefox preferences: {base_config['preferences']}")
# Exclude default addons including uBlock Origin
base_config['exclude_addons'] = ['ublock_origin', 'default_addons']
print('Excluded default addons including uBlock Origin')
# Add custom extensions if specified
if args.extensions:
from pathlib import Path
valid_extensions = []
extensions_list = [ext.strip() for ext in args.extensions.split(',')]
temp_dirs_to_cleanup = []
def cleanup_temp_dirs():
for temp_dir in temp_dirs_to_cleanup:
try:
shutil.rmtree(temp_dir)
print(f"Cleaned up temporary extension directory: {temp_dir}")
except Exception as e:
print(f"Warning: Failed to clean up temp dir {temp_dir}: {e}")
atexit.register(cleanup_temp_dirs)
for ext_path in extensions_list:
ext_path = Path(ext_path).absolute()
if not ext_path.exists():
print(f"Warning: Extension path does not exist: {ext_path}")
continue
if ext_path.is_file() and ext_path.suffix == '.xpi':
import tempfile
import zipfile
try:
temp_dir = tempfile.mkdtemp(prefix=f"camoufox_ext_{ext_path.stem}_")
temp_dirs_to_cleanup.append(temp_dir)
with zipfile.ZipFile(ext_path, 'r') as zip_ref:
zip_ref.extractall(temp_dir)
valid_extensions.append(temp_dir)
print(f"Successfully loaded extension: {ext_path.name} (extracted to {temp_dir})")
except Exception as e:
print(f"Error loading extension {ext_path}: {str(e)}")
if temp_dir in temp_dirs_to_cleanup:
temp_dirs_to_cleanup.remove(temp_dir)
continue
elif ext_path.is_dir():
if (ext_path / 'manifest.json').exists():
valid_extensions.append(str(ext_path))
print(f"Successfully loaded extension: {ext_path.name}")
else:
print(f"Warning: Directory is not a valid Firefox extension: {ext_path}")
else:
print(f"Warning: Invalid extension path: {ext_path}")
if valid_extensions:
base_config['addons'] = valid_extensions
print(f"Loaded {len(valid_extensions)} extensions")
else:
print("Warning: No valid extensions were loaded")
# Create multiple server instances
ports_to_create = []
if args.port_range:
start_port, end_port = map(int, args.port_range.split('-'))
ports_to_create = list(range(start_port, end_port + 1))
else:
# Create instances starting from base port
ports_to_create = [args.port + i for i in range(args.num_instances)]
# Start resource monitoring thread if enabled, passing it the ports to watch.
if args.monitor_resources:
# Pass the proxy URL to the monitor for more descriptive logging
monitor_thread = threading.Thread(target=monitor_resources, args=(ports_to_create, args.proxy_url), daemon=True)
monitor_thread.start()
print(f"Attempting to launch {len(ports_to_create)} Camoufox server instances on ports: {ports_to_create}")
for port in ports_to_create:
# launch_server is blocking, so we run each instance in its own thread.
thread = threading.Thread(target=create_server_instance, args=(port, base_config), daemon=True)
thread.start()
# Add a small delay between launching instances to avoid race conditions
# in the underlying Playwright/Camoufox library.
time.sleep(1)
# The script's main purpose is now to launch the daemon threads and then wait.
# The actual readiness is determined by the start_camoufox.sh script.
print("Server threads launched. Main process will now wait for shutdown signal.")
# Log startup resource usage
process = psutil.Process()
memory_info = process.memory_info()
logging.info(f"Server started. Initial memory usage: {memory_info.rss / 1024 / 1024:.1f}MB")
# Keep the main thread alive to host the daemon threads and handle shutdown signals
try:
while not shutdown_requested:
time.sleep(1)
except KeyboardInterrupt:
logging.info("Received KeyboardInterrupt, shutting down...")
except Exception as e:
print(f'Error launching server: {str(e)}')
logging.error(f'Error launching server: {str(e)}', exc_info=True)
if 'Browser.setBrowserProxy' in str(e):
print('Note: The browser may not support SOCKS5 proxy authentication')
return
if __name__ == '__main__':
main()

View File

@ -1,4 +0,0 @@
camoufox[geoip]
playwright==1.49
psutil
websockets

View File

@ -1,102 +0,0 @@
#!/bin/bash
set -e
# Global PIDs for cleanup
VNC_PID=""
FLUXBOX_PID=""
# Cleanup function to terminate background processes on script exit
cleanup() {
echo "Cleaning up background processes..."
# Kill processes in reverse order of startup. The '|| true' prevents errors if a process is already dead.
if [ -n "$FLUXBOX_PID" ]; then kill -TERM $FLUXBOX_PID 2>/dev/null || true; fi
if [ -n "$VNC_PID" ]; then kill -TERM $VNC_PID 2>/dev/null || true; fi
echo "Cleanup complete."
}
trap cleanup EXIT
# Xvfb is now started by xvfb-run in the Dockerfile ENTRYPOINT.
# The DISPLAY variable will be set automatically by xvfb-run.
# It's safer to source conda.sh directly
source /opt/conda/etc/profile.d/conda.sh
conda activate camo
# Ensure the persistent data directory exists before we try to use it for the lock file.
mkdir -p /app/persistent-data
# --- One-time Initialization ---
# On first launch, multiple instances starting at once can cause a race condition
# during the download/extraction of the Camoufox distribution. To prevent this,
# we run a single dummy instance first, wait for it to become healthy (which
# indicates setup is complete), and then kill it. A lock file ensures this
# only happens on the very first start of the container.
INIT_LOCK_FILE="/app/persistent-data/camoufox.initialized"
if [ ! -f "$INIT_LOCK_FILE" ]; then
echo "First start detected. Performing one-time Camoufox initialization..."
# Start a single dummy instance in the background, logging to a file.
# It will perform the necessary downloads and setup.
INIT_LOG="/tmp/camoufox_init.log"
rm -f "$INIT_LOG" # Ensure log file is clean before starting
python3 -u camoufox_server.py --port 9999 --num-instances 1 > "$INIT_LOG" 2>&1 &
INIT_PID=$!
# Wait for the server to log that it's started, which is a reliable signal
# that all one-time downloads and setup tasks are complete.
echo "Waiting for initialization to complete (max 120s)..."
end_time=$((SECONDS + 120))
INIT_SUCCESS=false
while [ $SECONDS -lt $end_time ]; do
# The camoufox library logs "Websocket endpoint:" when it's ready.
# This is a more reliable signal than a custom log message from our script.
if grep -q "Websocket endpoint: ws://0.0.0.0:9999" "$INIT_LOG"; then
INIT_SUCCESS=true
break
fi
# Also check if the initialization process died unexpectedly
if ! ps -p $INIT_PID > /dev/null; then
echo "Initialization process died unexpectedly."
break
fi
sleep 2
done
if [ "$INIT_SUCCESS" = true ]; then
echo "Initialization successful."
else
echo "Initialization timed out or failed. The main server might fail to start."
echo "--- Initialization Log ---"
cat "$INIT_LOG"
echo "--------------------------"
fi
# Cleanly terminate the dummy server.
echo "Shutting down initialization server..."
kill -TERM $INIT_PID
wait $INIT_PID 2>/dev/null || true # Wait for it to exit, ignore error code
# Create the lock file to prevent this from running again.
touch "$INIT_LOCK_FILE"
echo "Initialization complete. Proceeding with normal startup."
else
echo "Initialization already complete. Skipping."
fi
# --- End Initialization ---
# Start supporting services (VNC, window manager)
echo "Starting VNC server on port 5900..."
# The -noxdamage flag is added to improve compatibility with VNC clients like the one on macOS.
# The '-localhost no' part was likely a typo and has been removed as the default is to allow non-localhost connections.
x11vnc -forever -usepw -display $DISPLAY -rfbport 5900 -o /var/log/x11vnc.log -shared -noxdamage &
VNC_PID=$!
echo "Starting Fluxbox window manager..."
fluxbox > /var/log/fluxbox.log 2>&1 &
FLUXBOX_PID=$!
# Start main application
echo "Starting Camoufox server with arguments: $@"
exec python3 -u camoufox_server.py "$@"

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -1,26 +0,0 @@
import logging
import os
import sys
from copy import deepcopy
from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG
logger = logging.getLogger(__name__)
# Add the config directory to the path to allow for local imports like `custom_task_hooks`.
# This is necessary because this file is executed by the Airflow scheduler in a context
# where the config directory is not automatically on the Python path.
config_dir = os.path.dirname(os.path.abspath(__file__))
if config_dir not in sys.path:
sys.path.insert(0, config_dir)
logger.info(f"Added '{config_dir}' to sys.path for local imports.")
LOGGING_CONFIG = deepcopy(DEFAULT_LOGGING_CONFIG)
# The task_instance_mutation_hook is now self-registering to be robust
# against different loading configurations. See custom_task_hooks.py for details.
try:
import custom_task_hooks
logger.info(f"Successfully imported custom_task_hooks module (Version: {getattr(custom_task_hooks, '__version__', 'unknown')}).")
except ImportError as e:
logger.warning(f"Could not import custom_task_hooks: {e}. Worker pinning will not function.", exc_info=True)

View File

@ -1,3 +0,0 @@
{
"endpoints": {}
}

View File

@ -1,58 +0,0 @@
# Version: 2025-09-22-08
__version__ = "2025-09-22-08"
# This file contains custom hooks for the Airflow environment.
import logging
from airflow import settings
from airflow.configuration import conf
logger = logging.getLogger(__name__)
def task_instance_mutation_hook(ti):
"""
This hook modifies the task instance queue at runtime for worker pinning.
It relies exclusively on parsing the queue from the run_id, which is guaranteed
to be set by the dispatcher DAG. This avoids database race conditions.
"""
logger.debug(f"MUTATION HOOK: Running for dag '{ti.dag_id}', task '{ti.task_id}'.")
# This hook targets all worker DAGs, which follow a naming convention.
if 'worker_per_url' in ti.dag_id:
# If the run_id isn't populated yet, just return. The hook may be called again.
if not ti.run_id:
logger.debug(f"MUTATION HOOK: run_id not yet available for task '{ti.task_id}'. Skipping this invocation.")
return
logger.debug(f"MUTATION HOOK: Matched DAG '{ti.dag_id}'. Attempting to pin task '{ti.task_id}' for run_id '{ti.run_id}'.")
worker_queue = None
# The dispatcher embeds the queue in the run_id like: ..._q_queue-dl-worker-hostname
if ti.run_id and '_q_' in ti.run_id:
try:
parsed_queue = ti.run_id.split('_q_')[-1]
# Check for valid v1 (dl) or v2 (auth/dl) queue prefixes.
if parsed_queue.startswith(('queue-dl-', 'queue-auth-')):
worker_queue = parsed_queue
except Exception as e:
logger.error(f"MUTATION HOOK: CRITICAL: Error parsing queue from run_id '{ti.run_id}': {e}.", exc_info=True)
if worker_queue:
logger.debug(f"MUTATION HOOK: Pinning task '{ti.task_id}' (run_id: {ti.run_id}) to queue '{worker_queue}' from run_id.")
ti.queue = worker_queue
else:
# If the queue is not found, it's a critical failure in the dispatching logic.
# We fall back to the default queue but log it as a high-severity warning.
fallback_queue = 'queue-auth' if 'auth' in ti.dag_id else 'queue-dl'
logger.warning(f"MUTATION HOOK: Could not find worker queue in run_id '{ti.run_id}'. Falling back to '{fallback_queue}'. Pinning will fail.")
ti.queue = fallback_queue
# --- Hook Registration ---
# This registration logic is placed here to work around environments where this file
# might be loaded directly as the local settings file via AIRFLOW__CORE__LOCAL_SETTINGS_PATH.
try:
if not conf.get('core', 'executor').lower().startswith('debug'):
settings.task_instance_mutation_hook = task_instance_mutation_hook
logger.info(f"Successfully self-registered task_instance_mutation_hook (Version: {__version__}) for worker pinning.")
else:
logger.info("Skipping self-registration of task_instance_mutation_hook due to DebugExecutor.")
except Exception as e:
logger.warning(f"Could not self-register custom_task_hooks: {e}. Worker pinning may not function.", exc_info=True)

View File

@ -1,9 +0,0 @@
{
"docker_hub":
{
"conn_type": "docker",
"host": "https://index.docker.io/v1/",
"login": "pangramia",
"password": "dckr_pat_PEDco1yeURKYFY9cSXTCokQNb4A"
}
}

View File

@ -1,52 +0,0 @@
# Jinja2 template for Envoy configuration
admin:
address:
socket_address:
address: 0.0.0.0
port_value: 9901
static_resources:
listeners:
# Listener for ytdlp-ops Thrift traffic
- name: ytdlp_ops_listener
address:
socket_address:
address: 0.0.0.0
port_value: 9080
filter_chains:
- filters:
- name: envoy.filters.network.thrift_proxy
typed_config:
"@type": type.googleapis.com/envoy.extensions.filters.network.thrift_proxy.v3.ThriftProxy
stat_prefix: thrift_ingress
transport: FRAMED
protocol: BINARY
route_config:
name: local_route
routes:
- match:
method_name: ""
route:
cluster: ytdlp_ops_cluster
clusters:
# Cluster for the ytdlp-ops workers
- name: ytdlp_ops_cluster
connect_timeout: 5s
type: STRICT_DNS
lb_policy: ROUND_ROBIN
health_checks:
- timeout: 1s
interval: 5s
unhealthy_threshold: 3
healthy_threshold: 2
tcp_health_check: {}
load_assignment:
cluster_name: ytdlp_ops_cluster
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: ytdlp-ops-server
port_value: 9090

View File

@ -1,16 +0,0 @@
{
"minio_default": {
"conn_type": "aws",
"host": "{{ hostvars[groups['airflow_master'][0]].ansible_host }}",
"login": "admin",
"password": "0153093693-0009",
"port": 80,
"extra": {
"endpoint_url": "http://{{ hostvars[groups['airflow_master'][0]].ansible_host }}:80",
"region_name": "us-east-1",
"aws_access_key_id": "admin",
"aws_secret_access_key": "0153093693-0009",
"verify": false
}
}
}

View File

@ -1,10 +0,0 @@
{
"redis_default":
{
"conn_type": "redis",
"host": "{{ hostvars[groups['airflow_master'][0]].ansible_host }}",
"port": 52909,
"password": "{{ vault_redis_password }}",
"extra": "{\"db\": 0}"
}
}

View File

@ -1,35 +0,0 @@
# This file is managed by Ansible. Do not edit manually.
# --- Common Settings ---
HOSTNAME="af-green"
COMPOSE_PROJECT_NAME="ytdlp-ops-management"
TZ="Europe/Moscow"
service_role="management"
# --- Docker Image Settings ---
YTDLP_OPS_IMAGE="pangramia/ytdlp-ops-server:latest"
AIRFLOW_IMAGE_NAME="pangramia/ytdlp-ops-airflow:latest"
# --- Network Settings ---
ENVOY_PORT=9080
ENVOY_ADMIN_PORT=9901
YTDLP_BASE_PORT=9090
YTDLP_WORKERS=3
MANAGEMENT_SERVICE_PORT=9091
REDIS_PORT=52909
POSTGRES_PORT=5432
# --- Security Settings ---
REDIS_PASSWORD="rOhTAIlTFFylXsjhqwxnYxDChFc"
POSTGRES_PASSWORD="pgdb_pwd_A7bC2xY9zE1wV5uP"
AIRFLOW_ADMIN_PASSWORD="2r234sdfrt3q454arq45q355"
FLOWER_PASSWORD="dO4eXm7UkF81OdMvT8E2tIKFtPYPCzyzwlcZ4RyOmCsmG4qzrNFqM5sNTOT9"
# --- User and Group IDs ---
AIRFLOW_UID=1003
AIRFLOW_GID=1001
# --- Master-specific settings ---
MASTER_HOST_IP=89.253.221.173
# Camoufox is not used on master, but the config generator expects the variable.
CAMOUFOX_PROXIES=

View File

@ -1,4 +0,0 @@
# This file should be generated from ansible/templates/.env.ytdlp.j2
# Do not edit manually - your changes will be overwritten.
#
# To generate this file, run the Ansible playbook that processes the templates.

View File

@ -1,33 +0,0 @@
:8080 {
# Serve pre-compressed static assets and enable on-the-fly compression for other assets.
encode gzip
# Define routes for static assets.
# Caddy will automatically look for pre-gzipped files (.gz) if available.
route /static/appbuilder* {
uri strip_prefix /static/appbuilder
root * /usr/share/caddy/static/appbuilder
file_server {
precompressed gzip
}
}
route /static/dist* {
uri strip_prefix /static/dist
root * /usr/share/caddy/static/dist
file_server {
precompressed gzip
}
}
# Reverse proxy all other requests to the Airflow webserver.
route {
reverse_proxy airflow-webserver:8080 {
# Set headers to ensure correct proxy behavior
header_up Host {http.request.host}
header_up X-Real-IP {http.request.remote.ip}
header_up X-Forwarded-For {http.request.remote.ip}
header_up X-Forwarded-Proto {http.request.scheme}
}
}
}

View File

@ -1,297 +0,0 @@
# Airflow remote DL worker configuration.
# This file should be used on a remote machine to run a download worker.
# It requires a master Airflow instance running with services exposed.
#
# Before running, create a .env file in this directory with:
# MASTER_HOST_IP=... a.b.c.d ... # IP address of the machine running docker-compose-master.yaml
# POSTGRES_PASSWORD=... # The password for the PostgreSQL database from the master compose file
# REDIS_PASSWORD=... # The password for Redis from the master compose file
# AIRFLOW_UID=... # User ID for file permissions, should match master
---
x-airflow-common:
&airflow-common
# This should point to the same image used by the master.
# If you built a custom image for master, you need to push it to a registry
# and reference it here.
image: ${AIRFLOW_IMAGE_NAME:-pangramia/ytdlp-ops-airflow:latest}
# Add extra hosts here to allow workers to resolve other hosts by name.
# This section is auto-generated by Ansible from the inventory.
extra_hosts:
{% for host in groups['all'] %}
- "{{ hostvars[host]['inventory_hostname'] }}:{{ hostvars[host]['ansible_host'] | default(hostvars[host]['inventory_hostname']) }}"
{% endfor %}
env_file:
# The .env file is located in the project root (e.g., /srv/airflow_dl_worker),
# so we provide an absolute path to it.
- "{{ airflow_worker_dir }}/.env"
environment:
&airflow-common-env
AIRFLOW__CORE__PARALLELISM: 128
AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64
AIRFLOW__SCHEDULER__PARSING_PROCESSES: 8
AIRFLOW__WEBSERVER__WORKERS: 5
AIRFLOW__WEBSERVER__WORKER_CLASS: "gevent"
AIRFLOW__LOGGING__SECRET_MASK_EXCEPTION_ARGS: False
# Prevent slow webserver when low memory?
GUNICORN_CMD_ARGS: --max-requests 20 --max-requests-jitter 3 --worker-tmp-dir /dev/shm
# Airflow Core
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__CORE__FERNET_KEY: '' # Should be same as master, but worker does not need it.
# Backend connections - These should point to the master node
# Set MASTER_HOST_IP, POSTGRES_PASSWORD, and REDIS_PASSWORD in your .env file
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:${REDIS_PASSWORD}@${MASTER_HOST_IP}:{{ redis_port }}/0
# Remote Logging - connection is configured directly via environment variables
#_PIP_ADDITIONAL_REQUIREMENTS: ${{ '{' }}_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-docker apache-airflow-providers-http thrift>=0.16.0,<=0.20.0 backoff>=2.2.1 python-dotenv==1.0.1 psutil>=5.9.0 apache-airflow-providers-amazon{{ '}' }}
AIRFLOW__LOGGING__REMOTE_LOGGING: "True"
AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER: "s3://videos/airflow-logs"
AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID: s3_delivery_connection
AIRFLOW__LOGGING__ENCRYPT_S3_LOGS: "False"
#AIRFLOW__LOGGING__LOG_ID_TEMPLATE: "{dag_id}-{task_id}-{run_id}-{try_number}"
AIRFLOW__WEBSERVER__SECRET_KEY: 'qmALu5JCAW0518WGAqkVZQ=='
AIRFLOW__CORE__INTERNAL_API_SECRET_KEY: 'qmALu5JCAW0518WGAqkVZQ=='
AIRFLOW__CORE__LOCAL_SETTINGS_PATH: "/opt/airflow/config/custom_task_hooks.py"
volumes:
# Mount dags to get any utility scripts, but the worker will pull the DAG from the DB
- ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
# Mount logs locally in case remote logging fails
- ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs
# Mount config for local settings and other configurations
- ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config
- ${AIRFLOW_PROJ_DIR:-.}/config/airflow.cfg:/opt/airflow/airflow.cfg
# Mount download directories
- ${AIRFLOW_PROJ_DIR:-.}/downloadfiles:/opt/airflow/downloadfiles
- ${AIRFLOW_PROJ_DIR:-.}/addfiles:/opt/airflow/addfiles
- ${AIRFLOW_PROJ_DIR:-.}/inputfiles:/opt/airflow/inputfiles
# Mount the generated pangramia package to ensure workers have the latest version
- ${AIRFLOW_PROJ_DIR:-.}/pangramia:/app/pangramia
# Use AIRFLOW_UID from .env file to fix permission issues. GID is set to 0 for compatibility with the Airflow image.
user: "${{ '{' }}AIRFLOW_UID:-50000{{ '}' }}:0"
services:
airflow-worker-dl:
<<: *airflow-common
container_name: airflow-worker-dl-1
hostname: ${HOSTNAME:-dl001}
# The DL worker listens on the generic dl queue AND its own dedicated queue.
command: airflow celery worker -q queue-dl,queue-dl-${HOSTNAME:-dl001}
deploy:
resources:
limits:
memory: ${AIRFLOW_WORKER_DOWNLOAD_MEM_LIMIT:-8G}
reservations:
memory: ${AIRFLOW_WORKER_DOWNLOAD_MEM_RESERV:-2G}
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "worker-dl@$$(hostname)"'
interval: 30s
timeout: 30s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
HOSTNAME: ${HOSTNAME:-dl001}
DUMB_INIT_SETSID: "0"
AIRFLOW__CELERY__WORKER_QUEUES: "queue-dl,queue-dl-${HOSTNAME:-dl001}"
AIRFLOW__CELERY__WORKER_TAGS: "dl"
AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1"
AIRFLOW__CELERY__WORKER_AUTOSCALE: "16,8"
AIRFLOW__CELERY__POOL: "prefork"
AIRFLOW__CELERY__TASK_ACKS_LATE: "False"
AIRFLOW__CELERY__OPERATION_TIMEOUT: "2.0"
AIRFLOW__CELERY__WORKER_NAME: "worker-dl@%h"
AIRFLOW__CELERY__WORKER_MAX_TASKS_PER_CHILD: "100"
AIRFLOW__CELERY__WORKER_MAX_MEMORY_PER_CHILD: "524288" # 512MB
ports:
- "8793:8793"
networks:
- default
- proxynet
restart: always
airflow-worker-s3:
<<: *airflow-common
container_name: airflow-worker-s3-1
hostname: ${HOSTNAME:-s3-001}
# The S3 worker listens on the generic s3 queue AND its own dedicated queue.
command: airflow celery worker -q queue-s3,queue-s3-${HOSTNAME:-s3-001}
deploy:
resources:
limits:
memory: ${AIRFLOW_WORKER_S3_MEM_LIMIT:-1G}
reservations:
memory: ${AIRFLOW_WORKER_S3_MEM_RESERV:-256M}
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "worker-s3@$$(hostname)"'
interval: 30s
timeout: 30s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
S3_DELIVERY_AWS_ACCESS_KEY_ID: "{{ vault_s3_delivery_access_key_id }}"
S3_DELIVERY_AWS_SECRET_ACCESS_KEY: "{{ vault_s3_delivery_secret_access_key }}"
S3_DELIVERY_AWS_REGION: "{{ vault_s3_delivery_aws_region }}"
S3_DELIVERY_ENDPOINT: "{{ vault_s3_delivery_endpoint }}"
S3_DELIVERY_BUCKET: "{{ vault_s3_delivery_bucket }}"
HOSTNAME: ${HOSTNAME:-s3-001}
DUMB_INIT_SETSID: "0"
AIRFLOW__CELERY__WORKER_QUEUES: "queue-s3,queue-s3-${HOSTNAME:-s3-001}"
AIRFLOW__CELERY__WORKER_TAGS: "s3"
AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1"
# S3 tasks are lightweight.
AIRFLOW__CELERY__WORKER_AUTOSCALE: "2,1"
AIRFLOW__CELERY__POOL: "prefork"
AIRFLOW__CELERY__TASK_ACKS_LATE: "False"
AIRFLOW__CELERY__OPERATION_TIMEOUT: "2.0"
AIRFLOW__CELERY__WORKER_NAME: "worker-s3@%h"
AIRFLOW__CELERY__WORKER_MAX_TASKS_PER_CHILD: "100"
AIRFLOW__CELERY__WORKER_MAX_MEMORY_PER_CHILD: "262144" # 256MB
networks:
- default
- proxynet
restart: always
airflow-worker-auth:
<<: *airflow-common
container_name: airflow-worker-auth-1
hostname: ${HOSTNAME:-auth001}
# The Auth worker listens on the generic auth queue AND its own dedicated queue.
command: airflow celery worker -q queue-auth,queue-auth-${HOSTNAME:-auth001}
deploy:
resources:
limits:
memory: ${AIRFLOW_WORKER_AUTH_MEM_LIMIT:-4G}
reservations:
memory: ${AIRFLOW_WORKER_AUTH_MEM_RESERV:-1G}
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "worker-auth@$$(hostname)"'
interval: 30s
timeout: 30s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
HOSTNAME: ${HOSTNAME:-auth001}
DUMB_INIT_SETSID: "0"
AIRFLOW__CELERY__WORKER_QUEUES: "queue-auth,queue-auth-${HOSTNAME:-auth001}"
AIRFLOW__CELERY__WORKER_TAGS: "auth"
AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1"
# Auth tasks are less resource intensive but we want fewer of them to avoid service overload.
AIRFLOW__CELERY__WORKER_AUTOSCALE: "2,1"
AIRFLOW__CELERY__POOL: "prefork"
AIRFLOW__CELERY__TASK_ACKS_LATE: "False"
AIRFLOW__CELERY__OPERATION_TIMEOUT: "2.0"
AIRFLOW__CELERY__WORKER_NAME: "worker-auth@%h"
AIRFLOW__CELERY__WORKER_MAX_TASKS_PER_CHILD: "100"
AIRFLOW__CELERY__WORKER_MAX_MEMORY_PER_CHILD: "262144" # 256MB
networks:
- default
- proxynet
restart: always
docker-socket-proxy:
profiles:
- disabled
image: tecnativa/docker-socket-proxy:0.1.1
environment:
CONTAINERS: 1
IMAGES: 1
AUTH: 1
POST: 1
privileged: true
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
restart: always
airflow-worker-mgmt:
<<: *airflow-common
container_name: airflow-worker-mgmt-1
hostname: ${HOSTNAME:-mgmt001}
# The Mgmt worker listens on the generic mgmt queue AND its own dedicated queue.
command: airflow celery worker -q queue-mgmt,queue-mgmt-${HOSTNAME:-mgmt001}
deploy:
resources:
limits:
memory: ${AIRFLOW_WORKER_MGMT_MEM_LIMIT:-2G}
reservations:
memory: ${AIRFLOW_WORKER_MGMT_MEM_RESERV:-512M}
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "worker-mgmt@$$(hostname)"'
interval: 30s
timeout: 30s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
HOSTNAME: ${HOSTNAME:-mgmt001}
DUMB_INIT_SETSID: "0"
AIRFLOW__CELERY__WORKER_QUEUES: "queue-mgmt,queue-mgmt-${HOSTNAME:-mgmt001}"
AIRFLOW__CELERY__WORKER_TAGS: "mgmt"
AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1"
# Mgmt tasks are lightweight.
AIRFLOW__CELERY__WORKER_AUTOSCALE: "4,2"
AIRFLOW__CELERY__POOL: "prefork"
AIRFLOW__CELERY__TASK_ACKS_LATE: "False"
AIRFLOW__CELERY__OPERATION_TIMEOUT: "2.0"
AIRFLOW__CELERY__WORKER_NAME: "worker-mgmt@%h"
AIRFLOW__CELERY__WORKER_MAX_TASKS_PER_CHILD: "100"
AIRFLOW__CELERY__WORKER_MAX_MEMORY_PER_CHILD: "262144" # 256MB
networks:
- default
- proxynet
restart: always
aria2-pro:
container_name: aria2-pro
build:
context: "{{ airflow_worker_dir }}/aria2-pro-docker"
environment:
- PUID=${AIRFLOW_UID:-50000}
- PGID=0
- UMASK_SET=022
- RPC_SECRET={{ vault_aria2_rpc_secret }}
- RPC_PORT=6800
- LISTEN_PORT=6888
- DISK_CACHE=64M
- IPV6_MODE=false
- UPDATE_TRACKERS=false
- CUSTOM_TRACKER_URL=
- TZ=Asia/Shanghai
volumes:
- ${AIRFLOW_PROJ_DIR:-.}/aria2-config:/config
- ${AIRFLOW_PROJ_DIR:-.}/downloadfiles/videos/in-progress:/downloads
ports:
- "127.0.0.1:6800:6800"
- "6888:6888"
- "6888:6888/udp"
networks:
- proxynet
restart: unless-stopped
logging:
driver: json-file
options:
max-size: 1m
networks:
proxynet:
name: airflow_proxynet
external: true

View File

@ -1,151 +0,0 @@
# Airflow remote DL worker configuration.
# This file should be used on a remote machine to run a download worker.
# It requires a master Airflow instance running with services exposed.
#
# Before running, create a .env file in this directory with:
# MASTER_HOST_IP=... a.b.c.d ... # IP address of the machine running docker-compose-master.yaml
# POSTGRES_PASSWORD=... # The password for the PostgreSQL database from the master compose file
# REDIS_PASSWORD=... # The password for Redis from the master compose file
# AIRFLOW_UID=... # User ID for file permissions, should match master
---
x-airflow-common:
&airflow-common
# This should point to the same image used by the master.
# If you built a custom image for master, you need to push it to a registry
# and reference it here.
image: ${AIRFLOW_IMAGE_NAME:-pangramia/ytdlp-ops-airflow:latest}
# Add extra hosts here to allow workers to resolve other hosts by name.
# This section is auto-generated by Ansible from the inventory.
extra_hosts:
{% for host in groups['all'] %}
- "{{ hostvars[host]['inventory_hostname'] }}:{{ hostvars[host]['ansible_host'] | default(hostvars[host]['inventory_hostname']) }}"
{% endfor %}
env_file:
# The .env file is located in the project root (e.g., /srv/airflow_dl_worker),
# so we provide an absolute path to it.
- "{{ airflow_worker_dir }}/.env"
environment:
&airflow-common-env
AIRFLOW__CORE__PARALLELISM: 128
AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64
AIRFLOW__SCHEDULER__PARSING_PROCESSES: 8
AIRFLOW__WEBSERVER__WORKERS: 5
AIRFLOW__WEBSERVER__WORKER_CLASS: "gevent"
AIRFLOW__LOGGING__SECRET_MASK_EXCEPTION_ARGS: False
# Prevent slow webserver when low memory?
GUNICORN_CMD_ARGS: --max-requests 20 --max-requests-jitter 3 --worker-tmp-dir /dev/shm
# Airflow Core
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__CORE__FERNET_KEY: '' # Should be same as master, but worker does not need it.
# Backend connections - These should point to the master node
# Set MASTER_HOST_IP, POSTGRES_PASSWORD, and REDIS_PASSWORD in your .env file
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@${{ '{' }}MASTER_HOST_IP{{ '}' }}:{{ postgres_port }}/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:${REDIS_PASSWORD}@${MASTER_HOST_IP}:{{ redis_port }}/0
# Remote Logging - connection is configured directly via environment variables
#_PIP_ADDITIONAL_REQUIREMENTS: ${{ '{' }}_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-docker apache-airflow-providers-http thrift>=0.16.0,<=0.20.0 backoff>=2.2.1 python-dotenv==1.0.1 psutil>=5.9.0 apache-airflow-providers-amazon{{ '}' }}
AIRFLOW__LOGGING__REMOTE_LOGGING: "True"
AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER: "s3://airflow-logs"
AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID: minio_default
AIRFLOW__LOGGING__ENCRYPT_S3_LOGS: "False"
#AIRFLOW__LOGGING__LOG_ID_TEMPLATE: "{dag_id}-{task_id}-{run_id}-{try_number}"
AIRFLOW__WEBSERVER__SECRET_KEY: 'qmALu5JCAW0518WGAqkVZQ=='
AIRFLOW__CORE__INTERNAL_API_SECRET_KEY: 'qmALu5JCAW0518WGAqkVZQ=='
AIRFLOW__CORE__LOCAL_SETTINGS_PATH: "/opt/airflow/config/custom_task_hooks.py"
volumes:
# Mount dags to get any utility scripts, but the worker will pull the DAG from the DB
- ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
# Mount logs locally in case remote logging fails
- ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs
# Mount config for local settings and other configurations
- ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config
- ${AIRFLOW_PROJ_DIR:-.}/config/airflow.cfg:/opt/airflow/airflow.cfg
# Mount download directories
- ${AIRFLOW_PROJ_DIR:-.}/downloadfiles:/opt/airflow/downloadfiles
- ${AIRFLOW_PROJ_DIR:-.}/addfiles:/opt/airflow/addfiles
- ${AIRFLOW_PROJ_DIR:-.}/inputfiles:/opt/airflow/inputfiles
# Mount the generated pangramia package to ensure workers have the latest version
- ${AIRFLOW_PROJ_DIR:-.}/pangramia:/app/pangramia
# Use AIRFLOW_UID from .env file to fix permission issues. GID is set to 0 for compatibility with the Airflow image.
user: "${{ '{' }}AIRFLOW_UID:-50000{{ '}' }}:0"
services:
airflow-worker:
<<: *airflow-common
container_name: airflow-dl-worker-1
hostname: ${HOSTNAME:-dl001}
# The worker now listens on the generic queue AND its own dedicated queue.
# The hostname is dynamically inserted into the queue name.
command: airflow celery worker -q queue-dl,queue-dl-${HOSTNAME:-dl001}
deploy:
resources:
limits:
# Increased from 4G to 8G to support higher memory per child process.
memory: ${AIRFLOW_WORKER_DOWNLOAD_MEM_LIMIT:-8G}
reservations:
memory: ${AIRFLOW_WORKER_DOWNLOAD_MEM_RESERV:-2G}
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "worker-dl@$$(hostname)"'
interval: 30s
timeout: 30s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
HOSTNAME: ${HOSTNAME:-dl001} # Explicitly set inside container
DUMB_INIT_SETSID: "0"
AIRFLOW__CELERY__WORKER_QUEUES: "queue-dl,queue-dl-${HOSTNAME:-dl001}"
AIRFLOW__CELERY__WORKER_TAGS: "dl"
AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1"
# Use autoscaling to adjust number of workers based on load.
# Format is max_concurrency,min_concurrency.
AIRFLOW__CELERY__WORKER_AUTOSCALE: "16,8"
# Use prefork pool for better compatibility with blocking libraries.
AIRFLOW__CELERY__POOL: "prefork"
AIRFLOW__CELERY__TASK_ACKS_LATE: "False"
AIRFLOW__CELERY__OPERATION_TIMEOUT: "2.0"
AIRFLOW__CELERY__WORKER_NAME: "worker-dl@%h"
AIRFLOW__CELERY__WORKER_MAX_TASKS_PER_CHILD: "100"
# Increased from 256MB to 512MB for memory-intensive yt-dlp tasks.
# This value is in KB. 512 * 1024 = 524288.
AIRFLOW__CELERY__WORKER_MAX_MEMORY_PER_CHILD: "524288" # 512MB
# The hostname is now managed by Docker Compose to ensure uniqueness when scaling.
# It will be generated based on project, service, and replica number (e.g., airflow-airflow-dl-worker-1).
# hostname: "dl-worker-${HOSTNAME_SUFFIX:-$$(hostname)}"
ports:
- "8793:8793"
networks:
- default
- proxynet
restart: always
docker-socket-proxy:
profiles:
- disabled
image: tecnativa/docker-socket-proxy:0.1.1
environment:
CONTAINERS: 1
IMAGES: 1
AUTH: 1
POST: 1
privileged: true
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
restart: always
networks:
proxynet:
name: airflow_proxynet
external: true

View File

@ -1,574 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL.
#
# WARNING: This configuration is for local development. Do not use it in a production deployment.
#
# This configuration supports basic configuration using environment variables or an .env file
# The following variables are supported:
#
# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow.
# Default: apache/airflow:2.10.5
# AIRFLOW_UID - User ID in Airflow containers
# Default: 50000
# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed.
# Default: .
# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode
#
# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested).
# Default: airflow
# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested).
# Default: airflow
# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers.
# Use this option ONLY for quick checks. Installing requirements at container
# startup is done EVERY TIME the service is started.
# A better way is to build a custom image or extend the official image
# as described in https://airflow.apache.org/docs/docker-stack/build.html.
# Default: ''
#
# Feel free to modify this file to suit your needs.
---
name: airflow-master
x-airflow-common:
&airflow-common
# In order to add custom dependencies or upgrade provider packages you can use your extended image.
# This will build the image from the Dockerfile in this directory and tag it.
image: ${{ '{' }}AIRFLOW_IMAGE_NAME:-pangramia/ytdlp-ops-airflow:latest{{ '}' }}
build: .
# Add extra hosts here to allow the master services (webserver, scheduler) to resolve
# the hostnames of your remote DL workers. This is crucial for fetching logs.
# Format: - "hostname:ip_address"
# This section is auto-generated by Ansible from the inventory.
extra_hosts:
{% for host in groups['all'] %}
- "{{ hostvars[host]['inventory_hostname'] }}:{{ hostvars[host]['ansible_host'] | default(hostvars[host]['inventory_hostname']) }}"
{% endfor %}
env_file:
# The .env file is located in the project root, one level above the 'configs' directory.
- ".env"
networks:
- proxynet
environment:
&airflow-common-env
AIRFLOW__CORE__PARALLELISM: 128
AIRFLOW__CORE__MAX_ACTIVE_TASKS_PER_DAG: 64
AIRFLOW__SCHEDULER__PARSING_PROCESSES: 4
AIRFLOW__WEBSERVER__WORKER_CLASS: sync
AIRFLOW__WEBSERVER__WORKERS: 8
AIRFLOW__LOGGING__SECRET_MASK_EXCEPTION_ARGS: 'false'
# Prevent slow webserver when low memory?
GUNICORN_CMD_ARGS: --worker-tmp-dir /dev/shm
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
# For master services, connect to Postgres and Redis using internal Docker service names.
# Passwords are sourced from the .env file.
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@postgres:5432/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql+psycopg2://airflow:${{ '{' }}POSTGRES_PASSWORD{{ '}' }}@postgres:5432/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:${{ '{' }}REDIS_PASSWORD{{ '}' }}@redis:6379/0
AIRFLOW__CORE__FERNET_KEY: ''
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
AIRFLOW__WEBSERVER__SECRET_KEY: 'qmALu5JCAW0518WGAqkVZQ=='
AIRFLOW__WEBSERVER__WORKER_TIMEOUT: '120'
AIRFLOW__CORE__INTERNAL_API_SECRET_KEY: 'qmALu5JCAW0518WGAqkVZZQ=='
# yamllint disable rule:line-length
# Use simple http server on scheduler for health checks
# See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server
# yamllint enable rule:line-length
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS: 'false'
AIRFLOW__LOGGING__REMOTE_LOGGING: 'true'
AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER: "s3://airflow-logs"
AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID: minio_default
AIRROW__LOGGING__ENCRYPT_S3_LOGS: 'false'
AIRFLOW__CORE__LOCAL_SETTINGS_PATH: "/opt/airflow/config/custom_task_hooks.py"
volumes:
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/dags:/opt/airflow/dags
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/logs:/opt/airflow/logs
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/config:/opt/airflow/config
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/config/airflow.cfg:/opt/airflow/airflow.cfg
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/plugins:/opt/airflow/plugins
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/downloadfiles:/opt/airflow/downloadfiles
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/addfiles:/opt/airflow/addfiles
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/inputfiles:/opt/airflow/inputfiles
# Mount the generated pangramia package to ensure master services have the latest version
- ${{ '{' }}AIRFLOW_PROJ_DIR:-.{{ '}' }}/pangramia:/app/pangramia
user: "${{ '{' }}AIRFLOW_UID:-50000{{ '}' }}:0"
depends_on:
&airflow-common-depends-on
redis:
condition: service_healthy
postgres:
condition: service_healthy
minio-init:
condition: service_completed_successfully
services:
postgres:
image: postgres:13
env_file:
- .env
networks:
- proxynet
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: ${{ '{' }}POSTGRES_PASSWORD:-pgdb_pwd_A7bC2xY9zE1wV5uP{{ '}' }}
POSTGRES_DB: airflow
command:
- "postgres"
- "-c"
- "shared_buffers=512MB"
- "-c"
- "effective_cache_size=1536MB"
volumes:
- ./postgres-data:/var/lib/postgresql/data
ports:
- "${{ '{' }}POSTGRES_PORT:-5432{{ '}' }}:5432"
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 10s
retries: 5
start_period: 5s
restart: always
user: "999:999"
redis:
# Redis is limited to 7.2-bookworm due to licencing change
# https://redis.io/blog/redis-adopts-dual-source-available-licensing/
image: redis:7.2-bookworm
env_file:
- .env
networks:
- proxynet
command:
- "redis-server"
- "--requirepass"
- "${{ '{' }}REDIS_PASSWORD:-rOhTAIlTFFylXsjhqwxnYxDChFc{{ '}' }}"
- "--bind"
- "*"
- "--protected-mode"
- "no"
- "--save"
- "60"
- "1"
- "--loglevel"
- "warning"
- "--appendonly"
- "yes"
volumes:
- redis-data:/data
expose:
- 6379
ports:
- "${{ '{' }}REDIS_PORT:-6379{{ '}' }}:6379"
healthcheck:
test: ["CMD", "redis-cli", "-a", "${{ '{' }}REDIS_PASSWORD:-rOhTAIlTFFylXsjhqwxnYxDChFc{{ '}' }}", "ping"]
interval: 10s
timeout: 30s
retries: 50
start_period: 30s
restart: always
sysctls:
- net.core.somaxconn=1024
ulimits:
memlock: -1
redis-proxy-account-clear:
image: redis:7.2-bookworm
container_name: redis-proxy-account-clear
env_file:
- .env
networks:
- proxynet
command: >
sh -c "
echo 'Clearing proxy and account statuses from Redis...';
redis-cli -h redis -a $${{ '{' }}REDIS_PASSWORD:-rOhTAIlTFFylXsjhqwxnYxDChFc{{ '}' }} --scan --pattern 'proxy_status:*' | xargs -r redis-cli -h redis -a $${{ '{' }}REDIS_PASSWORD:-rOhTAIlTFFylXsjhqwxnYxDChFc{{ '}' }} DEL;
redis-cli -h redis -a $${{ '{' }}REDIS_PASSWORD:-rOhTAIlTFFylXsjhqwxnYxDChFc{{ '}' }} --scan --pattern 'account_status:*' | xargs -r redis-cli -h redis -a $${{ '{' }}REDIS_PASSWORD:-rOhTAIlTFFylXsjhqwxnYxDChFc{{ '}' }} DEL;
echo 'Redis cleanup complete.'
"
depends_on:
redis:
condition: service_healthy
minio:
image: minio/minio:latest
container_name: minio
networks:
- proxynet
volumes:
- ./minio-data:/data
ports:
- "9001:9000"
- "9002:9001"
environment:
MINIO_ROOT_USER: ${{ '{' }}MINIO_ROOT_USER:-admin{{ '}' }}
MINIO_ROOT_PASSWORD: ${{ '{' }}MINIO_ROOT_PASSWORD:-0153093693-0009{{ '}' }}
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
restart: always
nginx-minio-lb:
image: nginx:alpine
container_name: nginx-minio-lb
networks:
- proxynet
ports:
- "80:80"
- "81:81"
volumes:
- ./configs/nginx.conf:/etc/nginx/nginx.conf:ro
depends_on:
minio:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:80/minio/health/live"]
interval: 30s
timeout: 10s
retries: 5
restart: always
minio-init:
image: minio/mc
container_name: minio-init
networks:
- proxynet
depends_on:
nginx-minio-lb:
condition: service_healthy
entrypoint: >
/bin/sh -c "
set -e;
/usr/bin/mc alias set minio http://nginx-minio-lb:80 $$MINIO_ROOT_USER $$MINIO_ROOT_PASSWORD;
# Retry loop for bucket creation
MAX_ATTEMPTS=10
SUCCESS=false
# Use a for loop for robustness, as it's generally more portable than `until`.
for i in $$(seq 1 $$MAX_ATTEMPTS); do
# Check if the bucket exists. If so, we're done.
if /usr/bin/mc ls minio/airflow-logs > /dev/null 2>&1; then
echo 'MinIO bucket already exists.'
SUCCESS=true
break
fi
# If not, try to create it. If successful, we're done.
# We redirect output because `mc mb` can error if another process creates it in the meantime.
if /usr/bin/mc mb minio/airflow-logs > /dev/null 2>&1; then
echo 'MinIO bucket created.'
SUCCESS=true
break
fi
# If we reach here, both checks failed. Wait and retry.
echo "Attempt $$i/$$MAX_ATTEMPTS: Waiting for MinIO bucket..."
sleep 2
done
# After the loop, check if we succeeded.
if [ "$$SUCCESS" = "false" ]; then
echo "Failed to create MinIO bucket after $$MAX_ATTEMPTS attempts."
exit 1
fi
/usr/bin/mc anonymous set download minio/airflow-logs;
echo 'MinIO initialized: bucket airflow-logs created and policy set to download.';
"
env_file:
- .env
environment:
MINIO_ROOT_USER: ${{ '{' }}MINIO_ROOT_USER:-admin{{ '}' }}
MINIO_ROOT_PASSWORD: ${{ '{' }}MINIO_ROOT_PASSWORD:-0153093693-0009{{ '}' }}
restart: on-failure
caddy:
build:
context: .
dockerfile: Dockerfile.caddy
image: pangramia/ytdlp-ops-caddy:latest
container_name: caddy
networks:
- proxynet
ports:
- "8080:8080"
depends_on:
airflow-webserver:
condition: service_started
restart: always
airflow-webserver:
<<: *airflow-common
command: webserver
expose:
- "8080"
environment:
<<: *airflow-common-env
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8080/health"]
interval: 30s
timeout: 30s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
airflow-scheduler:
<<: *airflow-common
command: scheduler
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8974/health"]
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
airflow-master-worker:
<<: *airflow-common
command: airflow celery worker -q main,default
healthcheck:
# yamllint disable rule:line-length
test:
- "CMD-SHELL"
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "worker-master@$$(hostname)"'
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
environment:
<<: *airflow-common-env
# Required to handle warm shutdown of the celery workers properly
# See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
DUMB_INIT_SETSID: 0
AIRFLOW__CELERY__WORKER_QUEUES: "main,default"
AIRFLOW__CELERY__WORKER_TAGS: "master"
AIRFLOW__CELERY__WORKER_CONCURRENCY: "16"
AIRFLOW__CELERY__WORKER_PREFETCH_MULTIPLIER: "1"
AIRFLOW__CELERY__TASK_ACKS_LATE: "True"
AIRFLOW__CELERY__OPERATION_TIMEOUT: "2.0"
AIRFLOW__CELERY__WORKER_NAME: "worker-master@%h"
AIRFLOW__CELERY__WORKER_MAX_TASKS_PER_CHILD: "100"
# Max memory per child process before it's recycled. Helps prevent memory leaks.
# 256MB is sufficient for master worker tasks. DL workers use a higher limit.
AIRFLOW__CELERY__WORKER_MAX_MEMORY_PER_CHILD: "262144" # 256MB
hostname: ${{ '{' }}HOSTNAME{{ '}' }}
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
airflow-triggerer:
<<: *airflow-common
command: triggerer
hostname: ${{ '{' }}HOSTNAME{{ '}' }}
environment:
<<: *airflow-common-env
PYTHONASYNCIODEBUG: "1"
healthcheck:
test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${{ '{' }}HOSTNAME{{ '}' }}"']
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
airflow-regression-runner:
<<: *airflow-common
entrypoint: ""
container_name: airflow-regression-runner
command: ["tail", "-f", "/dev/null"]
hostname: ${{ '{' }}HOSTNAME{{ '}' }}
environment:
<<: *airflow-common-env
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
airflow-init:
<<: *airflow-common
depends_on:
<<: *airflow-common-depends-on
redis-proxy-account-clear:
condition: service_completed_successfully
entrypoint: /bin/bash
# yamllint disable rule:line-length
command:
- -c
- |
# This container runs as root and is responsible for initializing the environment.
# It sets permissions on mounted directories to ensure the 'airflow' user (running with AIRFLOW_UID)
# can write to them. This is crucial for logs, dags, and plugins.
echo "Creating scheduler & dag processor log directories..."
mkdir -p /opt/airflow/logs/scheduler /opt/airflow/logs/dag_processor_manager
echo "Initializing permissions for Airflow directories..."
chown -R "${{ '{' }}AIRFLOW_UID{{ '}' }}:0" /opt/airflow/dags /opt/airflow/logs /opt/airflow/plugins /opt/airflow/config /opt/airflow/downloadfiles /opt/airflow/addfiles /opt/airflow/inputfiles
echo "Setting group-writable and setgid permissions on logs directory..."
find /opt/airflow/logs -type d -exec chmod g+rws {} +
find /opt/airflow/logs -type f -exec chmod g+rw {} +
echo "Permissions set."
# Install curl and setup MinIO connection
echo "Installing curl and setting up MinIO connection..."
apt-get update -yqq && apt-get install -yqq curl
echo "MinIO connection setup complete."
if [[ -z "${{ '{' }}AIRFLOW_UID{{ '}' }}" ]]; then
echo
echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
echo "If you are on Linux, you SHOULD follow the instructions below to set "
echo "AIRFLOW_UID environment variable, otherwise files will be owned by root."
echo "For other operating systems you can get rid of the warning with manually created .env file:"
echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user"
echo
fi
# This container's job is to initialize the database, create a user, and import connections.
# Wait for db to be ready.
airflow db check --retry 30 --retry-delay 5
# Initialize the database if needed
echo "Initializing Airflow database..."
airflow db init
echo "Database initialization complete."
# Run database migrations.
echo "Running database migrations..."
airflow db upgrade
echo "Database migrations complete."
# Create the admin user if it doesn't exist.
# The '|| true' prevents the script from failing if the user already exists.
echo "Checking for and creating admin user..."
airflow users create \
--username "admin" \
--password "${{ '{' }}AIRFLOW_ADMIN_PASSWORD:-admin_pwd_X9yZ3aB1cE5dF7gH{{ '}' }}" \
--firstname Admin \
--lastname User \
--role Admin \
--email admin@example.com || true
echo "Admin user check/creation complete."
# Create/update the redis_default connection to ensure password is correct
echo "Creating/updating redis_default connection..."
airflow connections add 'redis_default' \
--conn-uri "redis://:${{ '{' }}REDIS_PASSWORD{{ '}' }}@redis:6379/0" \
|| echo "Failed to add redis_default connection, but continuing."
echo "Redis connection setup complete."
# Import connections from any .json file in the config directory.
echo "Searching for connection files in /opt/airflow/config..."
if [ -d "/opt/airflow/config" ] && [ -n "$(ls -A /opt/airflow/config/*.json 2>/dev/null)" ]; then
for conn_file in /opt/airflow/config/*.json; do
if [ -f "$$conn_file" ]; then
# Exclude files that are not meant to be Airflow connections.
if [ "$(basename "$$conn_file")" = "camoufox_endpoints.json" ]; then
echo "Skipping '$$conn_file' as it is not an Airflow connection file."
continue
fi
echo "Importing connections from $$conn_file"
airflow connections import "$$conn_file" || echo "Failed to import $$conn_file, but continuing."
fi
done
else
echo "No connection files found to import, or /opt/airflow/config is empty/missing."
fi
echo "Connection import process complete."
# yamllint enable rule:line-length
environment:
<<: *airflow-common-env
_AIRFLOW_DB_MIGRATE: 'true'
_AIRFLOW_WWW_USER_CREATE: 'false' # Set to false as we handle it manually
user: "0:0"
airflow-cli:
<<: *airflow-common
profiles:
- debug
environment:
<<: *airflow-common-env
CONNECTION_CHECK_MAX_COUNT: "0"
# Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
command:
- bash
- -c
- airflow
flower:
<<: *airflow-common
command: celery flower
ports:
- "5555:5555"
environment:
<<: *airflow-common-env
FLOWER_BASIC_AUTH: "flower:${{ '{' }}FLOWER_PASSWORD{{ '}' }}"
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
docker-socket-proxy:
profiles:
- disabled
image: tecnativa/docker-socket-proxy:0.1.1
networks:
- proxynet
environment:
CONTAINERS: 1
IMAGES: 1
AUTH: 1
POST: 1
privileged: true
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
restart: always
volumes:
redis-data:
networks:
proxynet:
name: airflow_proxynet
external: true

View File

@ -1,144 +0,0 @@
name: ytdlp-ops
services:
bgutil-provider:
image: brainicism/bgutil-ytdlp-pot-provider
container_name: bgutil-provider
init: true
ports:
- "4416:4416"
restart: unless-stopped
networks:
- proxynet
context-prepper:
image: busybox:latest
restart: "no"
volumes:
- ./context:/app/context
networks:
- proxynet
command:
- "/bin/sh"
- "-c"
- |
set -e
CONTEXT_BASE_DIR="/app/context"
TIMESTAMP_DIR="$${CONTEXT_BASE_DIR}/context-data_$$(date +%Y%m%d_%H%M%S)"
mkdir -p "$${TIMESTAMP_DIR}"
ln -sfn "$${TIMESTAMP_DIR}" "$${CONTEXT_BASE_DIR}/context-data"
echo "Context prepper finished. Data will be in: $${TIMESTAMP_DIR}"
envoy:
image: envoyproxy/envoy:v1.29-latest
{% if service_role != 'management' %}
container_name: envoy-thrift-lb-${HOSTNAME}
{% else %}
container_name: envoy-thrift-lb
{% endif %}
restart: unless-stopped
volumes:
# Mount the generated config file from the host
- ./configs/envoy.yaml:/etc/envoy/envoy.yaml:ro
ports:
# This is the single public port for all Thrift traffic
- "${ENVOY_PORT:-9080}:${ENVOY_PORT:-9080}"
# Expose the admin port for debugging
- "${ENVOY_ADMIN_PORT:-9901}:${ENVOY_ADMIN_PORT:-9901}"
networks:
- proxynet
# This service depends on ytdlp-ops-service, which in turn waits for camoufox.
depends_on:
- ytdlp-ops-service
ytdlp-ops-service:
image: pangramia/ytdlp-ops-server:latest # Don't comment out or remove, build is performed externally
# container_name is omitted; Docker will use the service name for DNS.
# This service depends on the camoufox-group service, which ensures all camoufox
# instances are started before this service.
depends_on:
context-prepper:
condition: service_completed_successfully
# Ports are no longer exposed directly. Envoy will connect to them on the internal network.
# entrypoint:
# - /bin/sh
# - -c
# - |
# set -e
# echo "[$(date)] Updating yt-dlp to latest nightly master..."
# python3 -m pip install -U --pre "yt-dlp[default]" --upgrade-strategy eager --force-reinstall --no-cache-dir
# echo "[$(date)] yt-dlp updated to:"
# yt-dlp --version
# echo "[$(date)] Starting original entrypoint..."
# exec /usr/local/bin/docker-entrypoint.sh "$$@"
env_file:
- ./.env # Path is relative to the project directory
volumes:
- ./context:/app/context
- ./logs/yt-dlp-ops/communication_logs:/app/logs/yt-dlp-ops/communication_logs
{% if service_role != 'management' %}
# Mount the generated endpoints file to make it available to the server
- ./configs/camoufox_endpoints.json:/app/config/camoufox_endpoints.json:ro
{% endif %}
# Mount the plugin source code for live updates without rebuilding the image.
# Assumes the plugin source is in a 'bgutil-ytdlp-pot-provider' directory
# next to your docker-compose.yaml file.
#- ./bgutil-ytdlp-pot-provider:/app/bgutil-ytdlp-pot-provider
networks:
- proxynet
command:
# --- Parameters for ALL service roles ---
- "--port"
- "${YTDLP_BASE_PORT:-9090}"
- "--timeout"
- "${YTDLP_TIMEOUT:-600}"
- "--workers"
- "${YTDLP_WORKERS:-3}"
- "--verbose"
- "--server-identity"
- "${SERVER_IDENTITY:-ytdlp-ops-airflow-service}"
- "--redis-host"
- "${MASTER_HOST_IP:-redis}"
- "--redis-port"
- "${REDIS_PORT:-52909}"
- "--redis-password"
- "${REDIS_PASSWORD}"
- "--service-role"
- "{{ service_role }}"
# --- S3 Logging Parameters ---
#- "--s3-endpoint-url"
#- "${S3_ENDPOINT_URL}"
#- "--s3-access-key-id"
#- "${S3_ACCESS_KEY_ID}"
#- "--s3-secret-access-key"
#- "${S3_SECRET_ACCESS_KEY}"
#- "--s3-region-name"
#- "${S3_REGION_NAME}"
{% if service_role is defined and service_role != 'management' %}
# --- Parameters for worker/all-in-one roles ONLY ---
- "--script-dir"
- "/app"
- "--context-dir"
- "/app/context/context-data"
- "--clean-context-dir"
- "--clients"
- "${YT_CLIENTS:-web,mweb,ios,android}"
- "--proxies"
- "${CAMOUFOX_PROXIES}"
- "--camoufox-endpoints-file"
- "/app/config/camoufox_endpoints.json"
- "--print-tokens"
- "--stop-if-no-proxy"
- "--comms-log-root-dir"
- "/app/logs/yt-dlp-ops/communication_logs"
- "--bgutils-no-innertube"
- "--visitor-rotation-threshold"
- "250"
{% endif %}
restart: unless-stopped
pull_policy: always
networks:
proxynet:
name: airflow_proxynet
external: true

View File

@ -1,69 +0,0 @@
# THIS FILE IS AUTO-GENERATED BY generate_envoy_config.py
# DO NOT EDIT MANUALLY.
#
# It contains the service definitions for the camoufox instances
# and adds the necessary dependencies to the main services.
services:
{% for proxy in camoufox_proxies %}
{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %}
{% set container_base_port = camoufox_port + loop.index0 * worker_count %}
{% set host_base_port = container_base_port %}
camoufox-{{ proxy_port }}-{{ loop.index }}:
build:
context: ../camoufox
dockerfile: Dockerfile
args:
VNC_PASSWORD: "{{ vnc_password }}"
image: camoufox:latest
container_name: ytdlp-ops-camoufox-{{ proxy_port }}-{{ loop.index }}-1
restart: unless-stopped
shm_size: '2gb' # Mitigates browser crashes due to shared memory limitations
ports:
- "{{ host_base_port }}-{{ host_base_port + worker_count - 1 }}:{{ container_base_port }}-{{ container_base_port + worker_count - 1 }}"
environment:
- DISPLAY=:99
- MOZ_HEADLESS_STACKSIZE=2097152
- CAMOUFOX_MAX_MEMORY_MB=2048
- CAMOUFOX_MAX_CONCURRENT_CONTEXTS=8
- CAMOUFOX_RESTART_THRESHOLD_MB=1500
volumes:
- /tmp/.X11-unix:/tmp/.X11-unix:rw
- camoufox-data-{{ proxy_port }}-{{ loop.index }}:/app/context-data
- camoufox-browser-cache:/root/.cache/ms-playwright # Persist browser binaries
command: [
"--ws-host", "0.0.0.0",
"--port", "{{ container_base_port }}",
"--num-instances", "{{ worker_count }}",
"--ws-path", "mypath",
"--proxy-url", "{{ proxy.url }}",
"--headless",
"--monitor-resources",
"--memory-restart-threshold", "1800",
"--preferences", "layers.acceleration.disabled=true,dom.ipc.processCount=2,media.memory_cache_max_size=102400,browser.cache.memory.capacity=102400"
]
deploy:
resources:
limits:
memory: 2.5G
logging:
driver: "json-file"
options:
max-size: "100m"
max-file: "3"
networks:
- proxynet
{% endfor %}
volumes:
{% for proxy in camoufox_proxies %}
{% set proxy_port = _get_port_from_proxy_url(proxy.url) | int %}
camoufox-data-{{ proxy_port }}-{{ loop.index }}:
{% endfor %}
{% if camoufox_proxies %}
camoufox-browser-cache:
{% endif %}
networks:
proxynet:
name: airflow_proxynet
external: true

View File

@ -1,13 +0,0 @@
version: '3.8'
services:
config-generator:
image: python:3.12-slim
working_dir: /app
env_file:
- ./.env
volumes:
# Mount the entire project directory to access scripts and write output files
- .:/app
command: >
sh -c "pip install jinja2 && python3 /app/generate_envoy_config.py"

View File

@ -1,54 +0,0 @@
# Jinja2 template for Envoy configuration
admin:
address:
socket_address:
address: 0.0.0.0
port_value: {{ envoy_admin_port }}
static_resources:
listeners:
# Listener for ytdlp-ops Thrift traffic
- name: ytdlp_ops_listener
address:
socket_address:
address: 0.0.0.0
port_value: {{ envoy_port }}
filter_chains:
- filters:
- name: envoy.filters.network.thrift_proxy
typed_config:
"@type": type.googleapis.com/envoy.extensions.filters.network.thrift_proxy.v3.ThriftProxy
stat_prefix: thrift_ingress
transport: FRAMED
protocol: BINARY
route_config:
name: local_route
routes:
- match:
method_name: ""
route:
cluster: ytdlp_ops_cluster
clusters:
# Cluster for the ytdlp-ops workers
- name: ytdlp_ops_cluster
connect_timeout: 5s
type: {{ envoy_cluster_type }}
lb_policy: ROUND_ROBIN
health_checks:
- timeout: 1s
interval: 5s
unhealthy_threshold: 3
healthy_threshold: 2
tcp_health_check: {}
load_assignment:
cluster_name: ytdlp_ops_cluster
endpoints:
- lb_endpoints:
{% for i in range(worker_count) %}
- endpoint:
address:
socket_address:
address: {{ backend_address }}
port_value: {{ base_port + i }}
{% endfor %}

View File

@ -1,35 +0,0 @@
events {
worker_connections 1024;
}
http {
upstream minio_servers {
server 172.17.0.1:9001;
}
upstream minio_console_servers {
server 172.17.0.1:9002;
}
server {
listen 80;
location / {
proxy_pass http://minio_servers;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
server {
listen 81;
location / {
proxy_pass http://minio_console_servers;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
}

BIN
airflow/dags/.DS_Store vendored

Binary file not shown.

View File

@ -1,163 +0,0 @@
# Архитектура и описание YTDLP Airflow DAGs
Этот документ описывает архитектуру и назначение DAG'ов, используемых для скачивания видео с YouTube. Система построена на модели непрерывного, самоподдерживающегося цикла для параллельной и отказоустойчивой обработки.
## Основной цикл обработки
Обработка выполняется двумя основными DAG'ами, которые работают в паре: оркестратор и воркер.
### `ytdlp_ops_orchestrator` (Система "зажигания")
- **Назначение:** Этот DAG действует как "система зажигания" для запуска обработки. Он запускается вручную для старта указанного количества параллельных циклов-воркеров.
- **Принцип работы:**
- Он **не** обрабатывает URL-адреса самостоятельно.
- Его единственная задача — запустить сконфигурированное количество DAG'ов `ytdlp_ops_worker_per_url`.
- Он передает всю необходимую конфигурацию (пул аккаунтов, подключение к Redis и т.д.) воркерам.
### `ytdlp_ops_worker_per_url` (Самоподдерживающийся воркер)
- **Назначение:** Этот DAG обрабатывает один URL и спроектирован для работы в непрерывном цикле.
- **Принцип работы:**
1. **Запуск:** Начальный запуск инициируется `ytdlp_ops_orchestrator`.
2. **Получение задачи:** Воркер извлекает один URL из очереди `_inbox` в Redis. Если очередь пуста, выполнение воркера завершается, и его "линия" обработки останавливается.
3. **Обработка:** Он взаимодействует с сервисом `ytdlp-ops-server` для получения `info.json` и прокси, после чего скачивает видео.
4. **Продолжение или остановка:**
- **В случае успеха:** Он запускает новый экземпляр самого себя, создавая непрерывный цикл для обработки следующего URL.
- **В случае сбоя:** Цикл прерывается (если `stop_on_failure` установлено в `True`), останавливая эту "линию" обработки. Это предотвращает остановку всей системы из-за одного проблемного URL или аккаунта.
## Управляющие DAG'и
### `ytdlp_mgmt_proxy_account`
- **Назначение:** Это основной инструмент для мониторинга и управления состоянием ресурсов, используемых `ytdlp-ops-server`.
- **Функциональность:**
- **Просмотр статусов:** Позволяет увидеть текущий статус всех прокси и аккаунтов (например, `ACTIVE`, `BANNED`, `RESTING`).
- **Управление прокси:** Позволяет вручную банить, разбанивать или сбрасывать статус прокси.
- **Управление аккаунтами:** Позволяет вручную банить или разбанивать аккаунты.
### `ytdlp_mgmt_queues`
- **Назначение:** Предоставляет набор инструментов для управления очередями Redis, используемыми в конвейере обработки.
- **Функциональность (через параметр `action`):**
- `add_videos`: Добавление одного или нескольких URL-адресов YouTube в очередь.
- `clear_queue`: Очистка (удаление) указанного ключа Redis.
- `list_contents`: Просмотр содержимого ключа Redis (списка или хэша).
- `check_status`: Проверка общего состояния очередей (тип, размер).
- `requeue_failed`: Перемещение всех URL-адресов из очереди сбоев `_fail` обратно в очередь `_inbox` для повторной обработки.
## Стратегия управления ресурсами (Прокси и Аккаунты)
Система использует интеллектуальную стратегию для управления жизненным циклом и состоянием аккаунтов и прокси, чтобы максимизировать процент успеха и минимизировать блокировки.
- **Жизненный цикл аккаунта ("Cooldown"):**
- Чтобы предотвратить "выгорание", аккаунты автоматически переходят в состояние "отдыха" (`RESTING`) после периода интенсивного использования.
- По истечении периода отдыха они автоматически возвращаются в `ACTIVE` и снова становятся доступными для воркеров.
- **Умная стратегия банов:**
- **Сначала бан аккаунта:** При возникновении серьезной ошибки (например, `BOT_DETECTED`) система наказывает **только аккаунт**, который вызвал сбой. Прокси при этом продолжает работать.
- **Бан прокси по "скользящему окну":** Прокси банится автоматически, только если он демонстрирует **систематические сбои с РАЗНЫМИ аккаунтами** за короткий промежуток времени. Это является надежным индикатором того, что проблема именно в прокси.
- **Мониторинг:**
- DAG `ytdlp_mgmt_proxy_account` является основным инструментом для мониторинга. Он показывает текущий статус всех ресурсов, включая время, оставшееся до активации забаненных или отдыхающих аккаунтов.
- Граф выполнения DAG `ytdlp_ops_worker_per_url` теперь явно показывает шаги, такие как `assign_account`, `get_token`, `ban_account`, `retry_get_token`, что делает процесс отладки более наглядным.
## Внешние сервисы
### `ytdlp-ops-server` (Thrift Service)
- **Назначение:** Внешний сервис, который предоставляет аутентификационные данные (токены, cookies, proxy) для скачивания видео.
- **Взаимодействие:** Worker DAG (`ytdlp_ops_worker_per_url`) обращается к этому сервису перед началом загрузки для получения необходимых данных для `yt-dlp`.
## Логика работы Worker DAG (`ytdlp_ops_worker_per_url`)
Этот DAG является "рабочей лошадкой" системы. Он спроектирован как самоподдерживающийся цикл для обработки одного URL за запуск.
### Задачи и их назначение:
- **`pull_url_from_redis`**: Извлекает один URL из очереди `_inbox` в Redis. Если очередь пуста, DAG завершается со статусом `skipped`, останавливая эту "линию" обработки.
- **`assign_account`**: Выбирает аккаунт для выполнения задачи. Он будет повторно использовать тот же аккаунт, который был успешно использован в предыдущем запуске в своей "линии" (привязка аккаунта). Если это первый запуск, он выбирает случайный аккаунт.
- **`get_token`**: Основная задача. Она обращается к `ytdlp-ops-server` для получения `info.json`.
- **`handle_bannable_error_branch`**: Если `get_token` завершается с ошибкой, требующей бана, эта задача-развилка решает, что делать дальше, в зависимости от политики `on_bannable_failure`.
- **`ban_account_and_prepare_for_retry`**: Если политика разрешает повтор, эта задача банит сбойный аккаунт и выбирает новый для повторной попытки.
- **`retry_get_token`**: Выполняет вторую попытку получить токен с новым аккаунтом.
- **`ban_second_account_and_proxy`**: Если и вторая попытка неудачна, эта задача банит второй аккаунт и использованный прокси.
- **`download_and_probe`**: Если `get_token` (или `retry_get_token`) завершилась успешно, эта задача использует `yt-dlp` для скачивания медиа и `ffmpeg` для проверки целостности скачанного файла.
- **`mark_url_as_success`**: Если `download_and_probe` завершилась успешно, эта задача записывает результат в хэш `_result` в Redis.
- **`handle_generic_failure`**: Если любая из основных задач завершается с неисправимой ошибкой, эта задача записывает подробную информацию об ошибке в хэш `_fail` в Redis.
- **`decide_what_to_do_next`**: Задача-развилка, которая запускается после успеха или неудачи. Она решает, продолжать ли цикл.
- **`trigger_self_run`**: Задача, которая фактически запускает следующий экземпляр DAG, создавая непрерывный цикл.
## Управление Воркерами (Пауза/Возобновление)
Система предоставляет механизм для "охлаждения" или временной приостановки работы воркера. Это полезно для проведения технического обслуживания, безопасного выключения машины или уменьшения нагрузки на кластер без генерации ошибок.
### Принцип работы
Механизм основан на файле-блокировке (`lock file`), который создается на узле воркера с помощью Ansible.
1. **Пауза:** Администратор запускает Ansible-плейбук, который создает пустой файл `AIRFLOW.PREVENT_URL_PULL.lock` в рабочей директории воркера (`/srv/airflow_dl_worker`).
2. **Проверка:** DAG `ytdlp_ops_dispatcher`, который отвечает за распределение URL-адресов, перед тем как взять новую задачу из Redis, проверяет наличие этого файла.
3. **Пропуск задачи:** Если файл существует, `dispatcher` логирует, что воркер на паузе, и завершает свою задачу со статусом `skipped`. Это предотвращает получение новых URL-адресов этим воркером, но не влияет на уже запущенные задачи.
4. **Возобновление:** Администратор запускает другой Ansible-плейбук, который переименовывает файл блокировки (добавляя временную метку), тем самым "разблокируя" воркер. При следующем запуске `dispatcher` не найдет файл и продолжит работу в обычном режиме.
### Команды для управления
Для управления состоянием воркера используются специальные Ansible-плейбуки. Команды следует выполнять из корневой директории проекта.
**Поставить воркер на паузу:**
(Замените `"hostname"` на имя хоста из вашего inventory-файла)
```bash
ansible-playbook -i ansible/inventory.ini ansible/playbooks/pause_worker.yml --limit "hostname"
```
**Возобновить работу воркера:**
```bash
ansible-playbook -i ansible/inventory.ini ansible/playbooks/resume_worker.yml --limit "hostname"
```
## Механизм привязки воркеров к конкретным машинам (Worker Pinning / Affinity)
Для обеспечения того, чтобы все задачи, связанные с обработкой одного конкретного URL, выполнялись на одной и той же машине (воркере), система использует комбинацию из трех компонентов: Оркестратора, Диспетчера и специального хука Airflow.
### 1. `ytdlp_ops_orchestrator` (Оркестратор)
- **Роль:** Инициирует процесс обработки.
- **Действие:** При запуске он создает несколько DAG-запусков `ytdlp_ops_dispatcher`. Каждый такой запуск предназначен для обработки одного URL.
- **Передача параметров:** Оркестратор передает свои параметры конфигурации (например, `account_pool`, `redis_conn_id`, `service_ip`) каждому запуску диспетчера.
### 2. `ytdlp_ops_dispatcher` (Диспетчер)
- **Роль:** Основной механизм обеспечения привязки.
- **Действие:**
1. **Получает URL:** Извлекает один URL из очереди Redis (`_inbox`).
2. **Определяет воркер:** Использует `socket.gethostname()` для определения имени текущей машины (воркера), на которой он выполняется.
3. **Формирует имя очереди:** Создает уникальное имя очереди для этого воркера, например, `queue-dl-dl-worker-1`.
4. **Запускает Worker DAG:** Инициирует запуск DAG `ytdlp_ops_worker_per_url`, передавая ему:
* Извлеченный `url_to_process`.
* Сформированное имя очереди `worker_queue` через параметр `conf`.
* Все остальные параметры, полученные от оркестратора.
- **Ключевой момент:** Именно на этом этапе устанавливается связь между конкретным URL и конкретным воркером, на котором началась обработка этого URL.
### 3. `task_instance_mutation_hook` (Хук изменения задач)
- **Расположение:** `airflow/config/custom_task_hooks.py`
- **Роль:** Является механизмом, который обеспечивает выполнение *всех* задач Worker DAG на нужной машине.
- **Как это работает:**
1. **Регистрация:** Хук регистрируется в конфигурации Airflow и вызывается перед запуском *каждой* задачи.
2. **Проверка DAG ID:** Хук проверяет, принадлежит ли задача (`TaskInstance`) DAG `ytdlp_ops_worker_per_url`.
3. **Извлечение `conf`:** Если да, он безопасно извлекает `conf` из `DagRun`, связанного с этой задачей.
4. **Изменение очереди:**
* Если в `conf` найден ключ `worker_queue` (что будет true для всех запусков, инициированных диспетчером), хук *переопределяет* стандартную очередь задачи на это значение.
* Это означает, что Airflow планировщик поставит эту задачу именно в ту очередь, которая прослушивается нужным воркером.
5. **Резервный вариант:** Если `worker_queue` не найден (например, DAG запущен вручную), задача возвращается в стандартную очередь `queue-dl`.
- **Ключевой момент:** Этот хук гарантирует, что *все последующие задачи* в рамках одного запуска `ytdlp_ops_worker_per_url` (например, `get_token`, `download_and_probe`, `mark_url_as_success`) будут выполнены на том же воркере, который изначально получил URL в диспетчере.
### Резюме
Комбинация `Оркестратор -> Диспетчер -> Хук` эффективно реализует привязку задач к воркерам:
1. **Оркестратор** запускает процесс.
2. **Диспетчер** связывает конкретный URL с конкретным воркером, определяя его имя хоста и передавая его как `worker_queue` в Worker DAG.
3. **Хук** гарантирует, что все задачи Worker DAG выполняются в очереди, соответствующей этому воркеру.
Это позволяет системе использовать локальные ресурсы воркера (например, кэш, временные файлы) эффективно и предсказуемо для обработки каждого отдельного URL.

View File

@ -1,23 +0,0 @@
import socket
import logging
logger = logging.getLogger(__name__)
def get_ip_address():
"""
Get the primary IP address of the host.
This is used by Airflow workers to advertise their IP for log serving,
ensuring the webserver can reach them in a multi-host environment.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# This doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
ip_address = s.getsockname()[0]
logger.info(f"Determined host IP address as: {ip_address}")
except Exception as e:
logger.warning(f"Could not determine IP address, falling back to 127.0.0.1. Error: {e}")
ip_address = '127.0.0.1'
finally:
s.close()
return ip_address

View File

@ -1,86 +0,0 @@
import logging
import time
import requests
from datetime import datetime
from airflow.decorators import task
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.models.variable import Variable
logger = logging.getLogger(__name__)
# Get the master host IP from an Airflow variable, which is set via the .env file.
# This allows the default health check target to be dynamic based on cluster.yml.
DEFAULT_MASTER_IP = Variable.get("MASTER_HOST_IP", default_var="127.0.0.1")
with DAG(
dag_id='proxy_health_check',
start_date=datetime(2023, 1, 1),
schedule=None,
catchup=False,
tags=['monitoring', 'proxy'],
doc_md="""
### Proxy Health Check DAG
This DAG runs a continuous loop to check a target URL through a SOCKS5 proxy.
It is designed for monitoring proxy connectivity and performance. Once triggered, it will run forever
until the DAG run is manually stopped.
**Parameters:**
- `target_url`: The URL to check. Defaults to the internal nginx service.
- `socks5_host`: The SOCKS5 proxy host. For Docker, `host.docker.internal` often works to target the host machine.
- `socks5_port`: The SOCKS5 proxy port.
- `check_interval_seconds`: How often to run the check.
- `latency_threshold_seconds`: A warning will be logged if the request takes longer than this.
- `timeout_seconds`: The timeout for the web request.
""",
params={
'target_url': Param(f'http://{DEFAULT_MASTER_IP}:8888', type='string', description="The URL to check. Defaults to the master node's nginx healthcheck service."),
'socks5_host': Param('sslocal-rust-1087', type='string', description="SOCKS5 proxy host. Use 'host.docker.internal' for Docker host."),
'socks5_port': Param(1087, type='integer', description="SOCKS5 proxy port."),
'check_interval_seconds': Param(25, type='integer', description="Seconds to wait between checks."),
'latency_threshold_seconds': Param(2, type='integer', description="Log a warning if latency exceeds this threshold."),
'timeout_seconds': Param(10, type='integer', description="Request timeout in seconds."),
},
) as dag:
@task
def run_proxy_check_loop(**context):
"""
Continuously checks a URL through a SOCKS5 proxy and logs if latency is high.
This task will run indefinitely until the DAG run is manually stopped or fails.
"""
params = context['params']
target_url = params['target_url']
proxy_host = params['socks5_host']
proxy_port = params['socks5_port']
interval = params['check_interval_seconds']
threshold = params['latency_threshold_seconds']
timeout = params['timeout_seconds']
proxy_url = f"socks5h://{proxy_host}:{proxy_port}"
proxies = {
'http': proxy_url,
'https': proxy_url,
}
logger.info(f"Starting proxy health check loop. Target: {target_url}, Proxy: {proxy_url}, Interval: {interval}s, Threshold: {threshold}s")
while True:
start_time = time.time()
try:
response = requests.get(target_url, proxies=proxies, timeout=timeout)
response.raise_for_status()
latency = time.time() - start_time
if latency > threshold:
logger.warning(f"High latency detected! Latency: {latency:.2f}s, Threshold: {threshold}s, Target: {target_url}")
except requests.exceptions.RequestException as e:
latency = time.time() - start_time
logger.error(f"Proxy check failed for {target_url} via {proxy_url}. Latency: {latency:.2f}s. Error: {e}")
time.sleep(interval)
run_proxy_check_loop()

View File

@ -1,636 +0,0 @@
# -*- coding: utf-8 -*-
"""
Regression testing script for the ytdlp-ops system.
This script orchestrates a regression test by:
1. Populating a Redis queue with video URLs from an input file.
2. Triggering the `ytdlp_ops_orchestrator` Airflow DAG to start processing.
3. Monitoring the progress of the processing for a specified duration.
4. Generating a report of any failures.
5. Optionally cleaning up the Redis queues after the test.
"""
import argparse
import csv
import json
import logging
import os
import re
import requests
import subprocess
import signal
import sys
import time
from datetime import datetime, timedelta
from pathlib import Path
import redis
from tabulate import tabulate
# It's safe to import these as the script runs in the same container as Airflow
# where the yt_ops_services package is installed.
try:
from yt_ops_services.client_utils import get_thrift_client, format_timestamp
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
except ImportError:
logging.error("Could not import Thrift modules. Ensure this script is run in the 'airflow-regression-runner' container.")
sys.exit(1)
# --- Configuration ---
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s] [%(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
INTERRUPTED = False
def signal_handler(sig, frame):
"""Handles Ctrl+C interruption."""
global INTERRUPTED
if not INTERRUPTED:
logging.warning("Ctrl+C detected. Initiating graceful shutdown...")
INTERRUPTED = True
else:
logging.warning("Second Ctrl+C detected. Forcing exit.")
sys.exit(1)
# --- Helper Functions ---
def _get_redis_client(redis_url: str):
"""Gets a Redis client from a URL."""
try:
# from_url is the modern way to connect and handles password auth
client = redis.from_url(redis_url, decode_responses=True)
client.ping()
logging.info(f"Successfully connected to Redis at {client.connection_pool.connection_kwargs.get('host')}:{client.connection_pool.connection_kwargs.get('port')}")
return client
except redis.exceptions.ConnectionError as e:
logging.error(f"Failed to connect to Redis: {e}")
sys.exit(1)
except Exception as e:
logging.error(f"An unexpected error occurred while connecting to Redis: {e}")
sys.exit(1)
def _get_webserver_url():
"""
Determines the Airflow webserver URL, prioritizing MASTER_HOST_IP from .env.
"""
master_host_ip = os.getenv("MASTER_HOST_IP")
if master_host_ip:
url = f"http://{master_host_ip}:8080"
logging.info(f"Using MASTER_HOST_IP for webserver URL: {url}")
return url
# Fallback to AIRFLOW_WEBSERVER_URL or the default service name
url = os.getenv("AIRFLOW_WEBSERVER_URL", "http://airflow-webserver:8080")
logging.info(f"Using default webserver URL: {url}")
return url
def _normalize_to_url(item: str) -> str | None:
"""
Validates if an item is a recognizable YouTube URL or video ID,
and normalizes it to a standard watch URL format.
"""
if not item:
return None
video_id_pattern = r"^[a-zA-Z0-9_-]{11}$"
if re.match(video_id_pattern, item):
return f"https://www.youtube.com/watch?v={item}"
url_patterns = [r"(?:v=|\/v\/|youtu\.be\/|embed\/|shorts\/)([a-zA-Z0-9_-]{11})"]
for pattern in url_patterns:
match = re.search(pattern, item)
if match:
return f"https://www.youtube.com/watch?v={match.group(1)}"
logging.warning(f"Could not recognize '{item}' as a valid YouTube URL or video ID.")
return None
def _read_input_file(file_path: str) -> list[str]:
"""Reads video IDs/URLs from a file (CSV or JSON list)."""
path = Path(file_path)
if not path.is_file():
logging.error(f"Input file not found: {file_path}")
sys.exit(1)
content = path.read_text(encoding='utf-8')
# Try parsing as JSON list first
if content.strip().startswith('['):
try:
data = json.loads(content)
if isinstance(data, list):
logging.info(f"Successfully parsed {file_path} as a JSON list.")
return [str(item) for item in data]
except json.JSONDecodeError:
logging.warning("File looks like JSON but failed to parse. Will try treating as CSV/text.")
# Fallback to CSV/text (one item per line)
items = []
# Use io.StringIO to handle the content as a file for the csv reader
from io import StringIO
# Sniff to see if it has a header
try:
has_header = csv.Sniffer().has_header(content)
except csv.Error:
has_header = False # Not a CSV, treat as plain text
reader = csv.reader(StringIO(content))
if has_header:
next(reader) # Skip header row
for row in reader:
if row:
items.append(row[0].strip()) # Assume the ID/URL is in the first column
logging.info(f"Successfully parsed {len(items)} items from {file_path} as CSV/text.")
return items
def _get_api_auth():
"""Gets Airflow API credentials from environment variables."""
username = os.getenv("AIRFLOW_ADMIN_USERNAME", "admin")
password = os.getenv("AIRFLOW_ADMIN_PASSWORD")
if not password:
logging.error("AIRFLOW_ADMIN_PASSWORD not found in environment. Cannot interact with API.")
return None, None
return username, password
def _pause_dag(dag_id: str, is_paused: bool = True):
"""Pauses or unpauses an Airflow DAG via the REST API."""
logging.info(f"Attempting to {'pause' if is_paused else 'unpause'} DAG: {dag_id}...")
username, password = _get_api_auth()
if not username:
return
webserver_url = _get_webserver_url()
endpoint = f"{webserver_url}/api/v1/dags/{dag_id}"
payload = {"is_paused": is_paused}
try:
response = requests.patch(endpoint, auth=(username, password), json=payload, timeout=30)
response.raise_for_status()
logging.info(f"Successfully {'paused' if is_paused else 'unpaused'} DAG '{dag_id}'.")
except requests.exceptions.RequestException as e:
logging.error(f"Failed to {'pause' if is_paused else 'unpause'} DAG '{dag_id}': {e}")
if e.response is not None:
logging.error(f"Response: {e.response.text}")
def _fail_running_dag_runs(dag_id: str):
"""Finds all running DAG runs for a given DAG and marks them as failed."""
logging.info(f"Attempting to fail all running instances of DAG '{dag_id}'...")
username, password = _get_api_auth()
if not username:
return
webserver_url = _get_webserver_url()
list_endpoint = f"{webserver_url}/api/v1/dags/{dag_id}/dagRuns?state=running"
try:
# Get running DAGs
response = requests.get(list_endpoint, auth=(username, password), timeout=30)
response.raise_for_status()
running_runs = response.json().get("dag_runs", [])
if not running_runs:
logging.info(f"No running DAG runs found for '{dag_id}'.")
return
logging.info(f"Found {len(running_runs)} running DAG run(s) to fail.")
for run in running_runs:
dag_run_id = run["dag_run_id"]
update_endpoint = f"{webserver_url}/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}"
payload = {"state": "failed"}
try:
update_response = requests.patch(update_endpoint, auth=(username, password), json=payload, timeout=30)
update_response.raise_for_status()
logging.info(f" - Successfully marked DAG run '{dag_run_id}' as failed.")
except requests.exceptions.RequestException as e:
logging.error(f" - Failed to mark DAG run '{dag_run_id}' as failed: {e}")
except requests.exceptions.RequestException as e:
logging.error(f"Failed to list running DAG runs for '{dag_id}': {e}")
if e.response is not None:
logging.error(f"Response: {e.response.text}")
# --- Core Logic Functions ---
def step_0_populate_queue(redis_client, queue_name: str, input_file: str):
"""Reads URLs from a file and populates the Redis inbox queue."""
logging.info("--- Step 0: Populating Redis Queue ---")
raw_items = _read_input_file(input_file)
if not raw_items:
logging.error("No items found in the input file. Aborting.")
sys.exit(1)
valid_urls = []
for item in raw_items:
url = _normalize_to_url(item)
if url and url not in valid_urls:
valid_urls.append(url)
if not valid_urls:
logging.error("No valid YouTube URLs or IDs were found in the input file. Aborting.")
sys.exit(1)
inbox_queue = f"{queue_name}_inbox"
logging.info(f"Adding {len(valid_urls)} unique and valid URLs to Redis queue '{inbox_queue}'...")
with redis_client.pipeline() as pipe:
for url in valid_urls:
pipe.rpush(inbox_queue, url)
pipe.execute()
logging.info(f"Successfully populated queue. Total items in '{inbox_queue}': {redis_client.llen(inbox_queue)}")
return len(valid_urls)
def step_1_trigger_orchestrator(args: argparse.Namespace):
"""Triggers the ytdlp_ops_orchestrator DAG using the Airflow REST API."""
logging.info("--- Step 1: Triggering Orchestrator DAG via REST API ---")
# Get API details from environment variables
webserver_url = _get_webserver_url()
api_endpoint = f"{webserver_url}/api/v1/dags/ytdlp_ops_orchestrator/dagRuns"
# Default admin user is 'admin'
username = os.getenv("AIRFLOW_ADMIN_USERNAME", "admin")
password = os.getenv("AIRFLOW_ADMIN_PASSWORD")
if not password:
logging.error("AIRFLOW_ADMIN_PASSWORD not found in environment. Please set it in your .env file.")
sys.exit(1)
# Construct the configuration for the DAG run
conf = {
"total_workers": args.workers,
"workers_per_bunch": args.workers_per_bunch,
"clients": args.client,
}
payload = {
"conf": conf
}
logging.info(f"Triggering DAG at endpoint: {api_endpoint}")
try:
response = requests.post(
api_endpoint,
auth=(username, password),
json=payload,
timeout=30 # 30 second timeout
)
response.raise_for_status() # Raises an HTTPError for bad responses (4xx or 5xx)
logging.info("Successfully triggered the orchestrator DAG.")
logging.debug(f"Airflow API response:\n{response.json()}")
except requests.exceptions.RequestException as e:
logging.error("Failed to trigger the orchestrator DAG via REST API.")
logging.error(f"Error: {e}")
if e.response is not None:
logging.error(f"Response status code: {e.response.status_code}")
logging.error(f"Response text: {e.response.text}")
sys.exit(1)
def step_2_monitor_progress(args: argparse.Namespace, redis_client, queue_name: str, total_urls: int, run_time_min: int, interval_min: int, show_status: bool):
"""Monitors the Redis queues for the duration of the test."""
logging.info("--- Step 2: Monitoring Progress ---")
end_time = datetime.now() + timedelta(minutes=run_time_min)
inbox_q = f"{queue_name}_inbox"
progress_q = f"{queue_name}_progress"
result_q = f"{queue_name}_result"
fail_q = f"{queue_name}_fail"
while datetime.now() < end_time and not INTERRUPTED:
try:
inbox_len = redis_client.llen(inbox_q)
progress_len = redis_client.hlen(progress_q)
result_len = redis_client.hlen(result_q)
fail_len = redis_client.hlen(fail_q)
processed = result_len + fail_len
success_len = 0
if result_len > 0:
# This is inefficient but gives a more accurate success count
results = redis_client.hgetall(result_q)
success_len = sum(1 for v in results.values() if '"status": "success"' in v)
logging.info(
f"Progress: {processed}/{total_urls} | "
f"Success: {success_len} | Failed: {fail_len} | "
f"In Progress: {progress_len} | Inbox: {inbox_len}"
)
if show_status:
# This function now connects directly to services to get status
get_system_status(args, redis_client)
except Exception as e:
logging.error(f"Error while querying Redis for progress: {e}")
# Wait for the interval, but check for interruption every second
# for a more responsive shutdown.
wait_until = time.time() + interval_min * 60
while time.time() < wait_until and not INTERRUPTED:
# Check if we are past the main end_time
if datetime.now() >= end_time:
break
time.sleep(1)
if INTERRUPTED:
logging.info("Monitoring interrupted.")
else:
logging.info("Monitoring period has ended.")
# --- System Status Functions (Direct Connect) ---
def _list_proxy_statuses(client, server_identity=None):
"""Lists proxy statuses by connecting directly to the Thrift service."""
logging.info(f"--- Proxy Statuses (Server: {server_identity or 'ALL'}) ---")
try:
statuses = client.getProxyStatus(server_identity)
if not statuses:
logging.info("No proxy statuses found.")
return
status_list = []
headers = ["Server", "Proxy URL", "Status", "Success", "Failures", "Last Success", "Last Failure"]
for s in statuses:
status_list.append({
"Server": s.serverIdentity, "Proxy URL": s.proxyUrl, "Status": s.status,
"Success": s.successCount, "Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
})
logging.info("\n" + tabulate(status_list, headers='keys', tablefmt='grid'))
except (PBServiceException, PBUserException) as e:
logging.error(f"Failed to get proxy statuses: {e.message}")
except Exception as e:
logging.error(f"An unexpected error occurred while getting proxy statuses: {e}", exc_info=True)
def _list_account_statuses(client, redis_client, account_id=None):
"""Lists account statuses from Thrift, enriched with live Redis data."""
logging.info(f"--- Account Statuses (Account: {account_id or 'ALL'}) ---")
try:
statuses = client.getAccountStatus(accountId=account_id, accountPrefix=None)
if not statuses:
logging.info("No account statuses found.")
return
status_list = []
for s in statuses:
status_str = s.status
if 'RESTING' in status_str:
try:
expiry_ts_bytes = redis_client.hget(f"account_status:{s.accountId}", "resting_until")
if expiry_ts_bytes:
expiry_ts = float(expiry_ts_bytes)
now = datetime.now().timestamp()
if now < expiry_ts:
remaining_seconds = int(expiry_ts - now)
status_str = f"RESTING ({remaining_seconds}s left)"
except Exception:
pass # Ignore if parsing fails
last_success = float(s.lastSuccessTimestamp) if s.lastSuccessTimestamp else 0
last_failure = float(s.lastFailureTimestamp) if s.lastFailureTimestamp else 0
last_activity = max(last_success, last_failure)
status_list.append({
"Account ID": s.accountId, "Status": status_str, "Success": s.successCount,
"Failures": s.failureCount, "Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp), "Last Proxy": s.lastUsedProxy or "N/A",
"_last_activity": last_activity,
})
status_list.sort(key=lambda item: item.get('_last_activity', 0), reverse=True)
for item in status_list:
del item['_last_activity']
logging.info("\n" + tabulate(status_list, headers='keys', tablefmt='grid'))
except (PBServiceException, PBUserException) as e:
logging.error(f"Failed to get account statuses: {e.message}")
except Exception as e:
logging.error(f"An unexpected error occurred while getting account statuses: {e}", exc_info=True)
def _list_client_statuses(redis_client):
"""Lists client statistics from Redis."""
logging.info("--- Client Statuses ---")
try:
stats_key = "client_stats"
all_stats_raw = redis_client.hgetall(stats_key)
if not all_stats_raw:
logging.info("No client stats found in Redis.")
return
status_list = []
for client, stats_json in all_stats_raw.items():
try:
stats = json.loads(stats_json)
def format_latest(data):
if not data: return "N/A"
ts = format_timestamp(data.get('timestamp'))
url = data.get('url', 'N/A')
video_id_match = re.search(r'v=([a-zA-Z0-9_-]{11})', url)
video_id = video_id_match.group(1) if video_id_match else 'N/A'
return f"{ts} ({video_id})"
status_list.append({
"Client": client, "Success": stats.get('success_count', 0),
"Failures": stats.get('failure_count', 0),
"Last Success": format_latest(stats.get('latest_success')),
"Last Failure": format_latest(stats.get('latest_failure')),
})
except (json.JSONDecodeError, AttributeError):
status_list.append({"Client": client, "Success": "ERROR", "Failures": "ERROR", "Last Success": "Parse Error", "Last Failure": "Parse Error"})
status_list.sort(key=lambda item: item.get('Client', ''))
logging.info("\n" + tabulate(status_list, headers='keys', tablefmt='grid'))
except Exception as e:
logging.error(f"An unexpected error occurred while getting client statuses: {e}", exc_info=True)
def get_system_status(args: argparse.Namespace, redis_client):
"""Connects to services and prints status tables."""
logging.info("--- Getting System Status ---")
client, transport = None, None
try:
client, transport = get_thrift_client(args.management_host, args.management_port)
_list_proxy_statuses(client)
_list_account_statuses(client, redis_client)
_list_client_statuses(redis_client)
except Exception as e:
logging.error(f"Could not get system status: {e}")
finally:
if transport and transport.isOpen():
transport.close()
def step_3_generate_report(redis_client, queue_name: str, report_file: str | None):
"""Generates a CSV report of failed items."""
logging.info("--- Step 3: Generating Report ---")
fail_q = f"{queue_name}_fail"
failed_items = redis_client.hgetall(fail_q)
if not failed_items:
logging.info("No items found in the fail queue. No report will be generated.")
return
logging.info(f"Found {len(failed_items)} failed items. Writing to report...")
report_data = []
for url, data_json in failed_items.items():
try:
data = json.loads(data_json)
error_details = data.get('error_details', {})
report_data.append({
'url': url,
'video_id': _normalize_to_url(url).split('v=')[-1] if _normalize_to_url(url) else 'N/A',
'error_message': error_details.get('error_message', 'N/A'),
'error_code': error_details.get('error_code', 'N/A'),
'proxy_url': error_details.get('proxy_url', 'N/A'),
'timestamp': datetime.fromtimestamp(data.get('end_time', 0)).isoformat(),
})
except (json.JSONDecodeError, AttributeError):
report_data.append({'url': url, 'video_id': 'N/A', 'error_message': 'Could not parse error data', 'error_code': 'PARSE_ERROR', 'proxy_url': 'N/A', 'timestamp': 'N/A'})
if report_file:
try:
with open(report_file, 'w', newline='', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=report_data[0].keys())
writer.writeheader()
writer.writerows(report_data)
logging.info(f"Successfully wrote report to {report_file}")
except IOError as e:
logging.error(f"Could not write report to file {report_file}: {e}")
else:
# Print to stdout if no file is specified
logging.info("--- Failure Report (stdout) ---")
for item in report_data:
logging.info(f"URL: {item['url']}, Error: {item['error_code']} - {item['error_message']}")
logging.info("--- End of Report ---")
def handle_interruption(redis_client, queue_name, report_file):
"""Graceful shutdown logic for when the script is interrupted."""
logging.warning("--- Interruption Detected: Starting Shutdown Procedure ---")
# 1. Pause DAGs
_pause_dag("ytdlp_ops_orchestrator")
_pause_dag("ytdlp_ops_dispatcher")
# 2. Fail running per_url jobs
_fail_running_dag_runs("ytdlp_ops_worker_per_url")
# 3. Generate report
logging.info("Generating final report due to interruption...")
step_3_generate_report(redis_client, queue_name, report_file)
# Also print to stdout if a file was specified, so user sees it immediately
if report_file:
logging.info("Printing report to stdout as well...")
step_3_generate_report(redis_client, queue_name, None)
def step_4_cleanup_queues(redis_client, queue_name: str):
"""Cleans up the Redis queues used by the test."""
logging.info("--- Step 4: Cleaning Up Queues ---")
queues_to_delete = [
f"{queue_name}_inbox",
f"{queue_name}_progress",
f"{queue_name}_result",
f"{queue_name}_fail",
]
logging.warning(f"This will delete the following Redis keys: {queues_to_delete}")
deleted_count = redis_client.delete(*queues_to_delete)
logging.info(f"Cleanup complete. Deleted {deleted_count} key(s).")
def main():
"""Main function to parse arguments and run the regression test."""
# Register the signal handler for Ctrl+C
signal.signal(signal.SIGINT, signal_handler)
parser = argparse.ArgumentParser(description="Run a regression test for the ytdlp-ops system.")
# Environment
parser.add_argument("--redis-host", type=str, default="redis", help="Hostname or IP address of the Redis server. Defaults to 'redis' for in-container execution.")
parser.add_argument("--management-host", type=str, default=os.getenv("MANAGEMENT_SERVICE_HOST", "envoy-thrift-lb"), help="Hostname of the management Thrift service.")
parser.add_argument("--management-port", type=int, default=int(os.getenv("MANAGEMENT_SERVICE_PORT", 9080)), help="Port of the management Thrift service.")
# Test Configuration
parser.add_argument("--client", type=str, required=True, help="Client persona to test (e.g., 'mweb').")
parser.add_argument("--workers", type=int, required=True, help="Total number of worker loops to start.")
parser.add_argument("--workers-per-bunch", type=int, default=1, help="Number of workers per bunch.")
parser.add_argument("--run-time-min", type=int, required=True, help="How long to let the test run, in minutes.")
parser.add_argument("--input-file", type=str, help="Path to a file containing video IDs/URLs. If not provided, the existing queue will be used.")
# Monitoring & Reporting
parser.add_argument("--progress-interval-min", type=int, default=2, help="How often to query and print progress, in minutes.")
parser.add_argument("--report-file", type=str, help="Path to a CSV file to write the list of failed URLs to.")
parser.add_argument("--show-status", action="store_true", help="If set, show proxy and account statuses during progress monitoring.")
# Actions
parser.add_argument("--cleanup", action="store_true", help="If set, clear the Redis queues after the test completes.")
parser.add_argument("--skip-populate", action="store_true", help="If set, skip populating the queue (assumes it's already populated).")
parser.add_argument("--skip-trigger", action="store_true", help="If set, skip triggering the orchestrator (assumes it's already running).")
args = parser.parse_args()
# --- Setup ---
redis_password = os.getenv("REDIS_PASSWORD")
if not redis_password:
logging.error("REDIS_PASSWORD not found in environment. Please set it in your .env file.")
sys.exit(1)
# Use the provided redis-host, defaulting to 'redis' for in-container execution
redis_url = f"redis://:{redis_password}@{args.redis_host}:6379/0"
redis_client = _get_redis_client(redis_url)
queue_name = "video_queue" # Hardcoded for now, could be an arg
total_urls = 0
# --- Execution ---
if not args.skip_populate:
if args.input_file:
total_urls = step_0_populate_queue(redis_client, queue_name, args.input_file)
else:
logging.info("No input file provided, using existing queue.")
total_urls = redis_client.llen(f"{queue_name}_inbox")
if total_urls == 0:
logging.warning("Queue is empty and no input file was provided. The test may not have any work to do.")
else:
total_urls = redis_client.llen(f"{queue_name}_inbox")
logging.info(f"Skipping population. Found {total_urls} URLs in the inbox.")
if not args.skip_trigger:
step_1_trigger_orchestrator(args)
else:
logging.info("Skipping orchestrator trigger.")
step_2_monitor_progress(args, redis_client, queue_name, total_urls, args.run_time_min, args.progress_interval_min, args.show_status)
if INTERRUPTED:
handle_interruption(redis_client, queue_name, args.report_file)
else:
step_3_generate_report(redis_client, queue_name, args.report_file)
if args.cleanup:
step_4_cleanup_queues(redis_client, queue_name)
if INTERRUPTED:
logging.warning("Regression test script finished due to user interruption.")
sys.exit(130) # Standard exit code for Ctrl+C
else:
logging.info("Regression test script finished.")
if __name__ == "__main__":
main()

View File

@ -1,10 +0,0 @@
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2024 rl <rl@rlmbp>
#
# Distributed under terms of the MIT license.
"""
Airflow DAG Utilities
"""

View File

@ -1,32 +0,0 @@
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2024 rl <rl@rlmbp>
#
# Distributed under terms of the MIT license.
"""
Redis utility functions for Airflow DAGs.
"""
from airflow.exceptions import AirflowException
from airflow.providers.redis.hooks.redis import RedisHook
import logging
import redis
logger = logging.getLogger(__name__)
def _get_redis_client(redis_conn_id):
"""Gets a Redis client connection using RedisHook."""
try:
hook = RedisHook(redis_conn_id=redis_conn_id)
client = hook.get_conn()
client.ping()
logger.info(f"Successfully connected to Redis using connection '{redis_conn_id}'.")
return client
except redis.exceptions.AuthenticationError:
logger.error(f"Redis authentication failed for connection '{redis_conn_id}'. Check password.")
raise AirflowException(f"Redis authentication failed for '{redis_conn_id}'.")
except Exception as e:
logger.error(f"Failed to get Redis client for connection '{redis_conn_id}': {e}")
raise AirflowException(f"Redis connection failed for '{redis_conn_id}': {e}")

View File

@ -1,890 +0,0 @@
"""
DAG to manage the state of proxies and accounts used by the ytdlp-ops-server.
"""
from __future__ import annotations
import logging
import json
import re
import time
from datetime import datetime
import socket
from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.models.dagbag import DagBag
from airflow.models.dagrun import DagRun
from airflow.models.param import Param
from airflow.models.taskinstance import TaskInstance
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow.models.variable import Variable
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.session import create_session
# Configure logging
logger = logging.getLogger(__name__)
# Import and apply Thrift exceptions patch for Airflow compatibility
try:
from thrift_exceptions_patch import patch_thrift_exceptions
patch_thrift_exceptions()
logger.info("Applied Thrift exceptions patch for Airflow compatibility.")
except ImportError:
logger.warning("Could not import thrift_exceptions_patch. Compatibility may be affected.")
except Exception as e:
logger.error(f"Error applying Thrift exceptions patch: {e}")
# Thrift imports
try:
from pangramia.yt.exceptions.ttypes import PBServiceException, PBUserException
from yt_ops_services.client_utils import get_thrift_client, format_timestamp
except ImportError as e:
logger.critical(f"Could not import Thrift modules: {e}. Ensure yt_ops_services package is installed correctly.")
# Fail DAG parsing if thrift modules are not available
raise
DEFAULT_MANAGEMENT_SERVICE_IP = Variable.get("MANAGEMENT_SERVICE_HOST", default_var="172.17.0.1")
DEFAULT_MANAGEMENT_SERVICE_PORT = Variable.get("MANAGEMENT_SERVICE_PORT", default_var=9080)
DEFAULT_REDIS_CONN_ID = "redis_default"
# Version tracking for debugging
DAG_VERSION = "1.7.1" # Updated to handle Redis configuration errors
# Helper function to connect to Redis, similar to other DAGs
def _get_redis_client(redis_conn_id: str):
"""Gets a Redis client from an Airflow connection."""
try:
# Use the imported RedisHook
redis_hook = RedisHook(redis_conn_id=redis_conn_id)
# get_conn returns a redis.Redis client
return redis_hook.get_conn()
except Exception as e:
logger.error(f"Failed to connect to Redis using connection '{redis_conn_id}': {e}")
# Use the imported AirflowException
raise AirflowException(f"Redis connection failed: {e}")
def _list_proxy_statuses(client, server_identity):
"""Lists the status of proxies."""
logger.info(f"Listing proxy statuses for server: {server_identity or 'ALL'}")
logger.info("NOTE: Proxy statuses are read from server's internal state via Thrift service")
try:
statuses = client.getProxyStatus(server_identity)
except PBServiceException as e:
if "Redis is not configured for this server" in e.message:
logger.error(f"Redis not configured on server: {e.message}")
print(f"\nERROR: Server configuration issue - {e.message}\n")
print("This server does not have Redis configured for proxy management.\n")
return
else:
# Re-raise if it's a different PBServiceException
raise
except Exception as e:
logger.error(f"Unexpected error getting proxy statuses: {e}", exc_info=True)
print(f"\nERROR: Unexpected error getting proxy statuses: {e}\n")
return
if not statuses:
logger.info("No proxy statuses found.")
return
from tabulate import tabulate
status_list = []
# This is forward-compatible: it checks for new attributes before using them.
has_extended_info = hasattr(statuses[0], 'recentAccounts') or hasattr(statuses[0], 'recentMachines')
headers = ["Server", "Proxy URL", "Status", "Success", "Failures", "Last Success", "Last Failure"]
if has_extended_info:
headers.extend(["Recent Accounts", "Recent Machines"])
for s in statuses:
status_item = {
"Server": s.serverIdentity,
"Proxy URL": s.proxyUrl,
"Status": s.status,
"Success": s.successCount,
"Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
}
if has_extended_info:
recent_accounts = getattr(s, 'recentAccounts', [])
recent_machines = getattr(s, 'recentMachines', [])
status_item["Recent Accounts"] = "\n".join(recent_accounts) if recent_accounts else "N/A"
status_item["Recent Machines"] = "\n".join(recent_machines) if recent_machines else "N/A"
status_list.append(status_item)
print("\n--- Proxy Statuses ---")
# The f-string with a newline ensures the table starts on a new line in the logs.
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("----------------------\n")
if not has_extended_info:
logger.warning("Server does not seem to support 'recentAccounts' or 'recentMachines' fields yet.")
print("NOTE: To see Recent Accounts/Machines, the server's `getProxyStatus` method must be updated to return these fields.")
def _list_account_statuses(client, account_id, redis_conn_id):
"""Lists the status of accounts, enriching with live data from Redis."""
logger.info(f"Listing account statuses for account: {account_id or 'ALL'}")
logger.info("NOTE: Account statuses are read from the Thrift service and enriched with live data from Redis.")
redis_client = None
try:
redis_client = _get_redis_client(redis_conn_id)
logger.info("Successfully connected to Redis to fetch detailed account status.")
except Exception as e:
logger.warning(f"Could not connect to Redis to get detailed status. Will show basic status. Error: {e}")
redis_client = None
try:
# The thrift method takes accountId (specific) or accountPrefix.
# If account_id is provided, we use it. If not, we get all by leaving both params as None.
statuses = client.getAccountStatus(accountId=account_id, accountPrefix=None)
if not statuses:
print("\n--- Account Statuses ---\nNo account statuses found.\n------------------------\n")
return
from tabulate import tabulate
status_list = []
for s in statuses:
status_str = s.status
# If an account is resting, get the live countdown from Redis for accuracy.
if redis_client and 'RESTING' in status_str:
try:
status_key = f"account_status:{s.accountId}"
# The server stores resting expiry time in 'resting_until'.
expiry_ts_bytes = redis_client.hget(status_key, "resting_until")
if expiry_ts_bytes:
expiry_ts = float(expiry_ts_bytes)
now = datetime.now().timestamp()
if now >= expiry_ts:
status_str = "ACTIVE (was RESTING)"
else:
remaining_seconds = int(expiry_ts - now)
if remaining_seconds > 3600:
status_str = f"RESTING (active in {remaining_seconds // 3600}h {remaining_seconds % 3600 // 60}m)"
elif remaining_seconds > 60:
status_str = f"RESTING (active in {remaining_seconds // 60}m {remaining_seconds % 60}s)"
else:
status_str = f"RESTING (active in {remaining_seconds}s)"
except Exception as e:
logger.warning(f"Could not parse resting time for {s.accountId} from Redis: {e}. Using server status.")
# Determine the last activity timestamp for sorting
last_success = float(s.lastSuccessTimestamp) if s.lastSuccessTimestamp else 0
last_failure = float(s.lastFailureTimestamp) if s.lastFailureTimestamp else 0
last_activity = max(last_success, last_failure)
status_item = {
"Account ID": s.accountId,
"Status": status_str,
"Success": s.successCount,
"Failures": s.failureCount,
"Last Success": format_timestamp(s.lastSuccessTimestamp),
"Last Failure": format_timestamp(s.lastFailureTimestamp),
"Last Proxy": s.lastUsedProxy or "N/A",
"Last Machine": s.lastUsedMachine or "N/A",
"_last_activity": last_activity, # Add a temporary key for sorting
}
status_list.append(status_item)
# Sort the list by the last activity timestamp in descending order
status_list.sort(key=lambda item: item.get('_last_activity', 0), reverse=True)
# Remove the temporary sort key before printing
for item in status_list:
del item['_last_activity']
print("\n--- Account Statuses ---")
# The f-string with a newline ensures the table starts on a new line in the logs.
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("------------------------\n")
except (PBServiceException, PBUserException) as e:
logger.error(f"Failed to get account statuses: {e.message}", exc_info=True)
print(f"\nERROR: Could not retrieve account statuses. Server returned: {e.message}\n")
except Exception as e:
logger.error(f"An unexpected error occurred while getting account statuses: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def _list_client_statuses(redis_conn_id):
"""Lists the status of different client types from Redis."""
logger.info("Listing client statuses from Redis key 'client_stats'")
try:
redis_client = _get_redis_client(redis_conn_id)
stats_key = "client_stats"
all_stats_raw = redis_client.hgetall(stats_key)
if not all_stats_raw:
print("\n--- Client Statuses ---\nNo client stats found in Redis.\n-----------------------\n")
return
from tabulate import tabulate
status_list = []
for client_bytes, stats_json_bytes in all_stats_raw.items():
client_name = client_bytes.decode('utf-8')
try:
stats = json.loads(stats_json_bytes.decode('utf-8'))
def format_latest(data):
if not data: return "N/A"
ts = format_timestamp(data.get('timestamp'))
url = data.get('url') or 'N/A'
machine = data.get('machine_id', 'N/A')
video_id_match = re.search(r'v=([a-zA-Z0-9_-]{11})', url)
video_id = video_id_match.group(1) if video_id_match else 'N/A'
return f"{ts}\nMachine: {machine}\nVideo ID: {video_id}"
status_item = {
"Client": client_name,
"Success": stats.get('success_count', 0),
"Failures": stats.get('failure_count', 0),
"Last Success": format_latest(stats.get('latest_success')),
"Last Failure": format_latest(stats.get('latest_failure')),
}
status_list.append(status_item)
except (json.JSONDecodeError, AttributeError) as e:
logger.error(f"Could not parse stats for client '{client_name}': {e}")
status_list.append({
"Client": client_name, "Success": "ERROR", "Failures": "ERROR",
"Last Success": "Could not parse data", "Last Failure": "Could not parse data"
})
status_list.sort(key=lambda item: item.get('Client', ''))
print("\n--- Client Statuses ---")
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("-----------------------\n")
except Exception as e:
logger.error(f"An unexpected error occurred while getting client statuses: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def _list_activity_counters(redis_conn_id: str):
"""Lists current activity rates for proxies and accounts from Redis."""
logger.info("Listing activity counters from Redis keys 'activity:per_proxy:*' and 'activity:per_account:*'")
try:
redis_client = _get_redis_client(redis_conn_id)
from tabulate import tabulate
now = time.time()
def process_keys(pattern, entity_name):
keys = redis_client.scan_iter(pattern)
status_list = []
for key_bytes in keys:
key = key_bytes.decode('utf-8')
entity_id = key.split(':', 2)[-1]
# Clean up old entries before counting
redis_client.zremrangebyscore(key, '-inf', now - 3660) # Clean up > 1hr old
count_1m = redis_client.zcount(key, now - 60, now)
count_5m = redis_client.zcount(key, now - 300, now)
count_1h = redis_client.zcount(key, now - 3600, now)
if count_1h == 0: # Don't show entities with no recent activity
continue
status_list.append({
entity_name: entity_id,
"Activity (Last 1m)": count_1m,
"Activity (Last 5m)": count_5m,
"Activity (Last 1h)": count_1h,
})
status_list.sort(key=lambda item: item.get(entity_name, ''))
print(f"\n--- {entity_name} Activity Counters ---")
if not status_list:
print(f"No recent activity found for {entity_name.lower()}s.")
else:
print(f"\n{tabulate(status_list, headers='keys', tablefmt='grid')}")
print("-----------------------------------\n")
process_keys("activity:per_proxy:*", "Proxy URL")
process_keys("activity:per_account:*", "Account ID")
except Exception as e:
logger.error(f"An unexpected error occurred while getting activity counters: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
def manage_system_callable(**context):
"""Main callable to interact with the system management endpoints."""
# Log version for debugging
logger.info(f"Running ytdlp_mgmt_proxy_account DAG version {DAG_VERSION}")
params = context["params"]
entity = params["entity"]
action = params["action"]
# For Thrift actions, use the new management host/port
if entity not in ["activity_counters"]:
host = params["management_host"]
port = params["management_port"]
else:
host, port = None, None # Not needed for meta actions
server_identity = params.get("server_identity")
proxy_url = params.get("proxy_url")
account_id = params.get("account_id")
# --- Validate Action/Entity Combination and Parameters ---
valid_actions = {
"proxy": ["list_with_status", "ban", "unban", "ban_all", "unban_all", "delete_from_redis"],
"account": ["list_with_status", "ban", "unban", "unban_all", "delete_from_redis"],
"client": ["list_with_status", "delete_from_redis"],
"accounts_and_proxies": ["list_with_status", "ban", "unban", "ban_all", "unban_all", "delete_from_redis"],
"activity_counters": ["list_with_status"],
}
if action not in valid_actions.get(entity, []):
raise ValueError(
f"The action '{action}' is not valid for entity '{entity}'.\n"
f"Valid actions for '{entity}' are: {', '.join(valid_actions.get(entity, ['None']))}."
)
# Validate required parameters for the chosen action
if entity == "proxy":
if action in ["ban", "unban"] and not server_identity:
raise ValueError(f"A 'server_identity' is required for proxy action '{action}'.")
if action in ["ban", "unban"] and not proxy_url:
raise ValueError(f"A 'proxy_url' is required for proxy action '{action}'.")
if entity == "account":
if action in ["ban", "unban"] and not account_id:
raise ValueError(f"An 'account_id' is required for account action '{action}'.")
# --- Handle Activity Counter action ---
if entity == "activity_counters":
if action == "list_with_status":
_list_activity_counters(params["redis_conn_id"])
return # End execution
else:
raise ValueError(f"Action '{action}' is not valid for entity 'activity_counters'. Only 'list_with_status' is supported.")
# Handle Thrift-based deletion actions
if action == "delete_from_redis":
client, transport = None, None
try:
client, transport = get_thrift_client(host, port)
if entity == "proxy":
proxy_url = params.get("proxy_url")
server_identity = params.get("server_identity")
if proxy_url and server_identity:
logger.info(f"Deleting proxy '{proxy_url}' for server '{server_identity}' from Redis via Thrift service...")
result = client.deleteProxyFromRedis(proxy_url, server_identity)
if result:
print(f"\nSuccessfully deleted proxy '{proxy_url}' for server '{server_identity}' from Redis.\n")
else:
print(f"\nFailed to delete proxy '{proxy_url}' for server '{server_identity}' from Redis.\n")
else:
logger.info("Deleting all proxies from Redis via Thrift service...")
# If server_identity is provided, delete all proxies for that server
# If server_identity is None, delete all proxies for ALL servers
result = client.deleteAllProxiesFromRedis(server_identity)
if server_identity:
print(f"\nSuccessfully deleted all proxies for server '{server_identity}' from Redis. Count: {result}\n")
else:
print(f"\nSuccessfully deleted all proxies from Redis across ALL servers. Count: {result}\n")
elif entity == "account":
account_id = params.get("account_id")
if account_id:
logger.info(f"Deleting account '{account_id}' from Redis via Thrift service...")
result = client.deleteAccountFromRedis(account_id)
if result:
print(f"\nSuccessfully deleted account '{account_id}' from Redis.\n")
else:
print(f"\nFailed to delete account '{account_id}' from Redis.\n")
else:
logger.info("Deleting all accounts from Redis via Thrift service...")
# If account_id is provided as prefix, delete all accounts with that prefix
# If account_id is None, delete all accounts
account_prefix = params.get("account_id")
result = client.deleteAllAccountsFromRedis(account_prefix)
if account_prefix:
print(f"\nSuccessfully deleted all accounts with prefix '{account_prefix}' from Redis. Count: {result}\n")
else:
print(f"\nSuccessfully deleted all accounts from Redis. Count: {result}\n")
elif entity == "accounts_and_proxies":
# Delete accounts
account_prefix = params.get("account_id") # Repurpose account_id param as an optional prefix
logger.info("Deleting accounts from Redis via Thrift service...")
account_result = client.deleteAllAccountsFromRedis(account_prefix)
if account_prefix:
print(f"\nSuccessfully deleted {account_result} account keys with prefix '{account_prefix}' from Redis.\n")
else:
print(f"\nSuccessfully deleted {account_result} account keys from Redis.\n")
# Delete proxies
server_identity = params.get("server_identity")
logger.info("Deleting proxies from Redis via Thrift service...")
proxy_result = client.deleteAllProxiesFromRedis(server_identity)
if server_identity:
print(f"\nSuccessfully deleted {proxy_result} proxy keys for server '{server_identity}' from Redis.\n")
else:
print(f"\nSuccessfully deleted {proxy_result} proxy keys from Redis across ALL servers.\n")
elif entity == "client":
logger.info("Deleting all client stats from Redis...")
redis_client = _get_redis_client(params["redis_conn_id"])
result = redis_client.delete("client_stats")
if result > 0:
print(f"\nSuccessfully deleted 'client_stats' key from Redis.\n")
else:
print(f"\nKey 'client_stats' not found in Redis. Nothing to delete.\n")
except (PBServiceException, PBUserException) as e:
logger.error(f"Thrift error performing delete action: {e.message}", exc_info=True)
print(f"\nERROR: Thrift service error: {e.message}\n")
raise
except Exception as e:
logger.error(f"Error performing delete action: {e}", exc_info=True)
print(f"\nERROR: An unexpected error occurred: {e}\n")
raise
finally:
if transport and transport.isOpen():
transport.close()
logger.info("Thrift connection closed.")
return # End execution for this action
client, transport = None, None
try:
client, transport = get_thrift_client(host, port)
if entity == "client":
if action == "list_with_status":
_list_client_statuses(params["redis_conn_id"])
elif entity == "proxy":
if action == "list_with_status":
_list_proxy_statuses(client, server_identity)
elif action == "ban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Banning proxy '{proxy_url}' for server '{server_identity}'...")
client.banProxy(proxy_url, server_identity)
print(f"Successfully sent request to ban proxy '{proxy_url}'.")
elif action == "unban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Unbanning proxy '{proxy_url}' for server '{server_identity}'...")
client.unbanProxy(proxy_url, server_identity)
print(f"Successfully sent request to unban proxy '{proxy_url}'.")
elif action == "ban_all":
if server_identity:
logger.info(f"Banning all proxies for server '{server_identity}'...")
client.banAllProxies(server_identity)
print(f"Successfully sent request to ban all proxies for '{server_identity}'.")
else:
logger.info("No server_identity provided. Banning all proxies for ALL servers...")
all_statuses = client.getProxyStatus(None)
if not all_statuses:
print("\nNo proxy statuses found for any server. Nothing to ban.\n")
return
all_server_identities = sorted(list(set(s.serverIdentity for s in all_statuses)))
logger.info(f"Found {len(all_server_identities)} server identities: {all_server_identities}")
print(f"Found {len(all_server_identities)} server identities. Sending ban request for each...")
success_count = 0
fail_count = 0
for identity in all_server_identities:
try:
client.banAllProxies(identity)
logger.info(f" - Sent ban_all for '{identity}'.")
success_count += 1
except Exception as e:
logger.error(f" - Failed to ban all proxies for '{identity}': {e}")
fail_count += 1
print(f"\nSuccessfully sent ban_all requests for {success_count} server identities.")
if fail_count > 0:
print(f"Failed to send ban_all requests for {fail_count} server identities. See logs for details.")
elif action == "unban_all":
if server_identity:
logger.info(f"Unbanning all proxy statuses for server '{server_identity}'...")
client.resetAllProxyStatuses(server_identity)
print(f"Successfully sent request to unban all proxy statuses for '{server_identity}'.")
else:
logger.info("No server_identity provided. Unbanning all proxies for ALL servers...")
all_statuses = client.getProxyStatus(None)
if not all_statuses:
print("\nNo proxy statuses found for any server. Nothing to unban.\n")
return
all_server_identities = sorted(list(set(s.serverIdentity for s in all_statuses)))
logger.info(f"Found {len(all_server_identities)} server identities: {all_server_identities}")
print(f"Found {len(all_server_identities)} server identities. Sending unban request for each...")
success_count = 0
fail_count = 0
for identity in all_server_identities:
try:
client.resetAllProxyStatuses(identity)
logger.info(f" - Sent unban_all for '{identity}'.")
success_count += 1
except Exception as e:
logger.error(f" - Failed to unban all proxies for '{identity}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban_all requests for {success_count} server identities.")
if fail_count > 0:
print(f"Failed to send unban_all requests for {fail_count} server identities. See logs for details.")
elif entity == "account":
if action == "list_with_status":
_list_account_statuses(client, account_id, params["redis_conn_id"])
elif action == "ban":
if not account_id: raise ValueError("An 'account_id' is required.")
reason = f"Manual ban from Airflow mgmt DAG by {socket.gethostname()}"
logger.info(f"Banning account '{account_id}'...")
client.banAccount(accountId=account_id, reason=reason)
print(f"Successfully sent request to ban account '{account_id}'.")
elif action == "unban":
if not account_id: raise ValueError("An 'account_id' is required.")
reason = f"Manual un-ban from Airflow mgmt DAG by {socket.gethostname()}"
logger.info(f"Unbanning account '{account_id}'...")
# Fetch status to get current success count before unbanning
statuses = client.getAccountStatus(accountId=account_id, accountPrefix=None)
if not statuses:
raise AirflowException(f"Account '{account_id}' not found.")
current_success_count = statuses[0].successCount or 0
client.unbanAccount(accountId=account_id, reason=reason)
print(f"Successfully sent request to unban account '{account_id}'.")
# Set the success_count_at_activation to baseline the account
redis_client = _get_redis_client(params["redis_conn_id"])
redis_client.hset(f"account_status:{account_id}", "success_count_at_activation", current_success_count)
logger.info(f"Set 'success_count_at_activation' for '{account_id}' to {current_success_count}.")
elif action == "unban_all":
account_prefix = account_id # Repurpose account_id param as an optional prefix
logger.info(f"Unbanning all account statuses to ACTIVE (prefix: '{account_prefix or 'ALL'}')...")
all_statuses = client.getAccountStatus(accountId=None, accountPrefix=account_prefix)
if not all_statuses:
print(f"No accounts found with prefix '{account_prefix or 'ALL'}' to unban.")
return
accounts_to_unban = [s.accountId for s in all_statuses]
account_map = {s.accountId: s for s in all_statuses}
redis_client = _get_redis_client(params["redis_conn_id"])
logger.info(f"Found {len(accounts_to_unban)} accounts to unban.")
print(f"Found {len(accounts_to_unban)} accounts. Sending unban request for each...")
unban_count = 0
fail_count = 0
for acc_id in accounts_to_unban:
try:
reason = f"Manual unban_all from Airflow mgmt DAG by {socket.gethostname()}"
client.unbanAccount(accountId=acc_id, reason=reason)
logger.info(f" - Sent unban for '{acc_id}'.")
# Also set the success_count_at_activation to baseline the account
current_success_count = account_map[acc_id].successCount or 0
redis_client.hset(f"account_status:{acc_id}", "success_count_at_activation", current_success_count)
logger.info(f" - Set 'success_count_at_activation' for '{acc_id}' to {current_success_count}.")
unban_count += 1
except Exception as e:
logger.error(f" - Failed to unban account '{acc_id}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban requests for {unban_count} accounts.")
if fail_count > 0:
print(f"Failed to send unban requests for {fail_count} accounts. See logs for details.")
# Optionally, list statuses again to confirm
print("\n--- Listing statuses after unban_all ---")
_list_account_statuses(client, account_prefix, params["redis_conn_id"])
elif entity == "accounts_and_proxies":
if action == "list_with_status":
print("\n--- Listing statuses for Proxies, Accounts, and Clients ---")
_list_proxy_statuses(client, server_identity)
_list_account_statuses(client, account_id, params["redis_conn_id"])
_list_client_statuses(params["redis_conn_id"])
return # End execution for list_with_status
print(f"\n--- Performing action '{action}' on BOTH Proxies and Accounts ---")
# --- Proxy Action ---
try:
print("\n-- Running Proxy Action --")
if action == "list_with_status":
_list_proxy_statuses(client, server_identity)
elif action == "ban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Banning proxy '{proxy_url}' for server '{server_identity}'...")
client.banProxy(proxy_url, server_identity)
print(f"Successfully sent request to ban proxy '{proxy_url}'.")
elif action == "unban":
if not proxy_url: raise ValueError("A 'proxy_url' is required.")
logger.info(f"Unbanning proxy '{proxy_url}' for server '{server_identity}'...")
client.unbanProxy(proxy_url, server_identity)
print(f"Successfully sent request to unban proxy '{proxy_url}'.")
elif action == "ban_all":
if server_identity:
logger.info(f"Banning all proxies for server '{server_identity}'...")
client.banAllProxies(server_identity)
print(f"Successfully sent request to ban all proxies for '{server_identity}'.")
else:
logger.info("No server_identity provided. Banning all proxies for ALL servers...")
all_statuses = client.getProxyStatus(None)
if not all_statuses:
print("\nNo proxy statuses found for any server. Nothing to ban.\n")
else:
all_server_identities = sorted(list(set(s.serverIdentity for s in all_statuses)))
logger.info(f"Found {len(all_server_identities)} server identities: {all_server_identities}")
print(f"Found {len(all_server_identities)} server identities. Sending ban request for each...")
success_count = 0
fail_count = 0
for identity in all_server_identities:
try:
client.banAllProxies(identity)
logger.info(f" - Sent ban_all for '{identity}'.")
success_count += 1
except Exception as e:
logger.error(f" - Failed to ban all proxies for '{identity}': {e}")
fail_count += 1
print(f"\nSuccessfully sent ban_all requests for {success_count} server identities.")
if fail_count > 0:
print(f"Failed to send ban_all requests for {fail_count} server identities. See logs for details.")
elif action == "unban_all":
if server_identity:
logger.info(f"Unbanning all proxy statuses for server '{server_identity}'...")
client.resetAllProxyStatuses(server_identity)
print(f"Successfully sent request to unban all proxy statuses for '{server_identity}'.")
else:
logger.info("No server_identity provided. Unbanning all proxies for ALL servers...")
all_statuses = client.getProxyStatus(None)
if not all_statuses:
print("\nNo proxy statuses found for any server. Nothing to unban.\n")
else:
all_server_identities = sorted(list(set(s.serverIdentity for s in all_statuses)))
logger.info(f"Found {len(all_server_identities)} server identities: {all_server_identities}")
print(f"Found {len(all_server_identities)} server identities. Sending unban request for each...")
success_count = 0
fail_count = 0
for identity in all_server_identities:
try:
client.resetAllProxyStatuses(identity)
logger.info(f" - Sent unban_all for '{identity}'.")
success_count += 1
except Exception as e:
logger.error(f" - Failed to unban all proxies for '{identity}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban_all requests for {success_count} server identities.")
if fail_count > 0:
print(f"Failed to send unban_all requests for {fail_count} server identities. See logs for details.")
except Exception as proxy_e:
logger.error(f"Error during proxy action '{action}': {proxy_e}", exc_info=True)
print(f"\nERROR during proxy action: {proxy_e}")
# --- Account Action ---
try:
print("\n-- Running Account Action --")
if action == "list_with_status":
_list_account_statuses(client, account_id, params["redis_conn_id"])
elif action == "ban":
if not account_id: raise ValueError("An 'account_id' is required.")
reason = f"Manual ban from Airflow mgmt DAG by {socket.gethostname()}"
logger.info(f"Banning account '{account_id}'...")
client.banAccount(accountId=account_id, reason=reason)
print(f"Successfully sent request to ban account '{account_id}'.")
elif action == "unban":
if not account_id: raise ValueError("An 'account_id' is required.")
reason = f"Manual un-ban from Airflow mgmt DAG by {socket.gethostname()}"
logger.info(f"Unbanning account '{account_id}'...")
# Fetch status to get current success count before unbanning
statuses = client.getAccountStatus(accountId=account_id, accountPrefix=None)
if not statuses:
logger.warning(f"Account '{account_id}' not found. Skipping account unban.")
else:
current_success_count = statuses[0].successCount or 0
client.unbanAccount(accountId=account_id, reason=reason)
print(f"Successfully sent request to unban account '{account_id}'.")
# Set the success_count_at_activation to baseline the account
redis_client = _get_redis_client(params["redis_conn_id"])
redis_client.hset(f"account_status:{account_id}", "success_count_at_activation", current_success_count)
logger.info(f"Set 'success_count_at_activation' for '{account_id}' to {current_success_count}.")
elif action == "unban_all":
account_prefix = account_id # Repurpose account_id param as an optional prefix
logger.info(f"Unbanning all account statuses to ACTIVE (prefix: '{account_prefix or 'ALL'}')...")
all_statuses = client.getAccountStatus(accountId=None, accountPrefix=account_prefix)
if not all_statuses:
print(f"No accounts found with prefix '{account_prefix or 'ALL'}' to unban.")
else:
accounts_to_unban = [s.accountId for s in all_statuses]
account_map = {s.accountId: s for s in all_statuses}
redis_client = _get_redis_client(params["redis_conn_id"])
logger.info(f"Found {len(accounts_to_unban)} accounts to unban.")
print(f"Found {len(accounts_to_unban)} accounts. Sending unban request for each...")
unban_count = 0
fail_count = 0
for acc_id in accounts_to_unban:
try:
reason = f"Manual unban_all from Airflow mgmt DAG by {socket.gethostname()}"
client.unbanAccount(accountId=acc_id, reason=reason)
logger.info(f" - Sent unban for '{acc_id}'.")
# Also set the success_count_at_activation to baseline the account
current_success_count = account_map[acc_id].successCount or 0
redis_client.hset(f"account_status:{acc_id}", "success_count_at_activation", current_success_count)
logger.info(f" - Set 'success_count_at_activation' for '{acc_id}' to {current_success_count}.")
unban_count += 1
except Exception as e:
logger.error(f" - Failed to unban account '{acc_id}': {e}")
fail_count += 1
print(f"\nSuccessfully sent unban requests for {unban_count} accounts.")
if fail_count > 0:
print(f"Failed to send unban requests for {fail_count} accounts. See logs for details.")
# Optionally, list statuses again to confirm
print("\n--- Listing statuses after unban_all ---")
_list_account_statuses(client, account_prefix, params["redis_conn_id"])
except Exception as account_e:
logger.error(f"Error during account action '{action}': {account_e}", exc_info=True)
print(f"\nERROR during account action: {account_e}")
elif entity == "all":
if action == "list_with_status":
print("\nListing all entities...")
_list_proxy_statuses(client, server_identity)
_list_account_statuses(client, account_id, params["redis_conn_id"])
except (PBServiceException, PBUserException) as e:
logger.error(f"Thrift error performing action '{action}': {e.message}", exc_info=True)
raise
except NotImplementedError as e:
logger.error(f"Feature not implemented: {e}", exc_info=True)
raise
except Exception as e:
logger.error(f"Error performing action '{action}': {e}", exc_info=True)
raise
finally:
if transport and transport.isOpen():
transport.close()
logger.info("Thrift connection closed.")
with DAG(
dag_id="ytdlp_mgmt_proxy_account",
default_args={"queue": "queue-mgmt"},
start_date=days_ago(1),
schedule=None,
catchup=False,
tags=["ytdlp", "mgmt", "master"],
doc_md="""
### YT-DLP Proxy and Account Manager DAG
This DAG provides tools to manage the state of proxies and accounts used by the `ytdlp-ops-server`.
Select an `entity` and an `action` to perform.
**IMPORTANT NOTE ABOUT DATA SOURCES:**
- **Proxy Statuses**: Read from the server's internal state via Thrift service calls.
- **Account Statuses**: Read from the Thrift service, and then enriched with live cooldown data directly from Redis.
**IMPORTANT NOTE ABOUT PROXY MANAGEMENT:**
- Proxies are managed by the server's internal state through Thrift methods
- There is NO direct Redis manipulation for proxies - they are managed entirely by the server
- To properly manage proxies, use the Thrift service methods (ban, unban, etc.)
""",
params={
"management_host": Param(DEFAULT_MANAGEMENT_SERVICE_IP, type="string", title="Management Service Host", description="The hostname or IP of the management service. Can be a Docker container name (e.g., 'envoy-thrift-lb') if on the same network."),
"management_port": Param(DEFAULT_MANAGEMENT_SERVICE_PORT, type="integer", title="Management Service Port", description="The port of the dedicated management service."),
"entity": Param(
"accounts_and_proxies",
type="string",
enum=["account", "proxy", "client", "accounts_and_proxies", "activity_counters"],
description="The type of entity to manage.",
),
"action": Param(
"list_with_status",
type="string",
enum=["list_with_status", "ban", "unban", "ban_all", "unban_all", "delete_from_redis"],
description="""The management action to perform.
---
#### Actions for `entity: proxy`
- `list_with_status`: View status of all proxies, optionally filtered by `server_identity`.
- `ban`: Ban a specific proxy for a given `server_identity`. Requires `proxy_url`.
- `unban`: Un-ban a specific proxy. Requires `proxy_url`.
- `ban_all`: Sets the status of all proxies for a given `server_identity` (or all servers) to `BANNED`.
- `unban_all`: Resets the status of all proxies for a given `server_identity` (or all servers) to `ACTIVE`.
- `delete_from_redis`: **(Destructive)** Deletes proxy status from Redis via Thrift service. This permanently removes the proxy from being tracked by the system. If `proxy_url` and `server_identity` are provided, it deletes a single proxy. If only `server_identity` is provided, it deletes all proxies for that server. If neither is provided, it deletes ALL proxies across all servers.
#### Actions for `entity: account`
- `list_with_status`: View status of all accounts, optionally filtered by `account_id` (as a prefix).
- `ban`: Ban a specific account. Requires `account_id`.
- `unban`: Un-ban a specific account. Requires `account_id`.
- `unban_all`: Sets the status of all accounts (or those matching a prefix in `account_id`) to `ACTIVE`.
- `delete_from_redis`: **(Destructive)** Deletes account status from Redis via Thrift service. This permanently removes the account from being tracked by the system. If `account_id` is provided, it deletes that specific account. If `account_id` is provided as a prefix, it deletes all accounts matching that prefix. If `account_id` is empty, it deletes ALL accounts.
#### Actions for `entity: client`
- `list_with_status`: View success/failure statistics for each client type.
- `delete_from_redis`: **(Destructive)** Deletes all client stats from Redis.
#### Actions for `entity: activity_counters`
- `list_with_status`: View current activity rates (ops/min, ops/hr) for proxies and accounts.
#### Actions for `entity: accounts_and_proxies`
- This entity performs the selected action on **both** proxies and accounts where applicable.
- `list_with_status`: View statuses for both proxies and accounts.
- `ban`: Ban a specific proxy AND a specific account. Requires `proxy_url`, `server_identity`, and `account_id`.
- `unban`: Un-ban a specific proxy AND a specific account. Requires `proxy_url`, `server_identity`, and `account_id`.
- `ban_all`: Ban all proxies for a `server_identity` (or all servers). Does not affect accounts.
- `unban_all`: Un-ban all proxies for a `server_identity` (or all servers) AND all accounts (optionally filtered by `account_id` as a prefix).
- `delete_from_redis`: Deletes both account and proxy status from Redis via Thrift service. For accounts, if `account_id` is provided as a prefix, it deletes all accounts matching that prefix. If `account_id` is empty, it deletes ALL accounts. For proxies, if `server_identity` is provided, it deletes all proxies for that server. If `server_identity` is empty, it deletes ALL proxies across all servers.
""",
),
"server_identity": Param(
None,
type=["null", "string"],
description="The identity of the server instance (for proxy management). Leave blank to list all or delete all proxies.",
),
"proxy_url": Param(
None,
type=["null", "string"],
description="The proxy URL to act upon (e.g., 'socks5://host:port').",
),
"account_id": Param(
None,
type=["null", "string"],
description="The account ID to act upon. For `unban_all` or `delete_from_redis` on accounts, this can be an optional prefix. Leave blank to delete all accounts.",
),
"redis_conn_id": Param(
DEFAULT_REDIS_CONN_ID,
type="string",
title="Redis Connection ID",
description="The Airflow connection ID for the Redis server (used for 'delete_from_redis' and for fetching detailed account status).",
),
},
) as dag:
system_management_task = PythonOperator(
task_id="system_management_task",
python_callable=manage_system_callable,
)

View File

@ -1,959 +0,0 @@
# -*- coding: utf-8 -*-
"""
Airflow DAG for manually adding YouTube URLs or Video IDs to a Redis queue.
"""
from __future__ import annotations
import json
import logging
import re
from typing import List, Optional
import csv
import os
from datetime import datetime
from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.models.dagrun import DagRun
from airflow.models.param import Param
from airflow.models.taskinstance import TaskInstance
from airflow.operators.python import PythonOperator, BranchPythonOperator
from airflow.operators.empty import EmptyOperator
from airflow.operators.bash import BashOperator
from airflow.providers.celery.executors.celery_executor import app as celery_app
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.dates import days_ago
from airflow.models.variable import Variable
from airflow.utils.session import create_session
import requests
# Configure logging
logger = logging.getLogger(__name__)
# Default settings
DEFAULT_REDIS_CONN_ID = "redis_default"
DEFAULT_QUEUE_NAME = "video_queue"
DEFAULT_QUEUE_TO_CLEAR = 'PLEASE_SPECIFY_QUEUE_TO_CLEAR'
DEFAULT_URL_LISTS_DIR = '/opt/airflow/inputfiles'
# --- Helper Functions ---
def _get_redis_client(redis_conn_id: str):
"""Gets a Redis client from an Airflow connection."""
try:
redis_hook = RedisHook(redis_conn_id=redis_conn_id)
return redis_hook.get_conn()
except Exception as e:
logger.error(f"Failed to connect to Redis using connection '{redis_conn_id}': {e}")
raise AirflowException(f"Redis connection failed: {e}")
def _get_predefined_url_lists():
"""Returns a static list of predefined URL list files."""
# This is a static list to ensure options are always visible in the UI,
# even if the files don't exist on the filesystem at parse time.
# The DAG will check for the file's existence at runtime.
predefined_files = [
'urls.dh128.json',
'urls.ixbt2045.json',
'urls.news1000.json',
'urls.rt100.json',
'urls.rt250_01.txt',
'urls.rt250_02.txt',
'urls.rt250_03.txt',
'urls.rt250_04.txt',
'urls.rt250_05.txt',
'urls.rt250_06.txt',
'urls.rt250_07.txt',
'urls.rt250_08.txt',
'urls.rt250_11.txt',
'urls.rt250_12.txt',
'urls.rt250_13.txt',
'urls.rt250_14.txt',
'urls.rt250_15.txt',
'urls.rt250_16.txt',
'urls.rt250_17.txt',
'urls.rt250_18.txt',
'urls.rt3700.txt',
'urls.sky28.json',
'urls.sky3.json',
'urls.tq46.json',
]
return ['None'] + sorted(predefined_files)
def _get_urls_from_source(**params) -> List[str]:
"""
Determines the source of video inputs based on the 'input_source' param and returns a list of raw items.
"""
input_source = params.get("input_source", "manual")
predefined_list = params.get("predefined_url_list")
file_path_or_url = params.get("url_list_file_path")
manual_inputs = params.get("video_inputs")
# Source 1: Predefined file
if input_source == 'predefined_file':
if not predefined_list or predefined_list == 'None':
raise AirflowException("Input source is 'predefined_file', but no file was selected from the list.")
default_path = DEFAULT_URL_LISTS_DIR
url_lists_dir = Variable.get('YTDLP_URL_LISTS_DIR', default_var=default_path)
file_path = os.path.join(url_lists_dir, predefined_list)
logger.info(f"Loading URLs from predefined file: {file_path}")
if not os.path.exists(file_path):
raise AirflowException(f"Selected predefined file does not exist: {file_path}")
with open(file_path, 'r', encoding='utf-8') as f:
if predefined_list.lower().endswith('.json'):
logger.info(f"Parsing '{predefined_list}' as a JSON file.")
try:
data = json.load(f)
if not isinstance(data, list):
raise AirflowException(f"JSON file '{predefined_list}' must contain a list of strings.")
return [str(item) for item in data]
except json.JSONDecodeError:
raise AirflowException(f"Failed to parse JSON from file: {predefined_list}")
elif predefined_list.lower().endswith('.txt'):
logger.info(f"Parsing '{predefined_list}' as a text file (one URL per line).")
return [line.strip() for line in f if line.strip()]
else:
raise AirflowException(f"Unsupported file type for predefined file: '{predefined_list}'. Must be .json or .txt.")
# Source 2: File path or URL
elif input_source == 'file_path_or_url':
if not file_path_or_url:
raise AirflowException("Input source is 'file_path_or_url', but no path/URL was provided.")
logger.info(f"Loading URLs from provided path/URL: {file_path_or_url}")
content = ""
if file_path_or_url.startswith(('http://', 'https://')):
try:
response = requests.get(file_path_or_url, timeout=30)
response.raise_for_status()
content = response.text
except requests.RequestException as e:
raise AirflowException(f"Failed to fetch URL list from '{file_path_or_url}': {e}")
else: # Assume local file path
if not os.path.exists(file_path_or_url):
raise AirflowException(f"Provided file path does not exist: {file_path_or_url}")
with open(file_path_or_url, 'r', encoding='utf-8') as f:
content = f.read()
try:
data = json.loads(content)
if not isinstance(data, list):
raise AirflowException("JSON content from path/URL must contain a list of strings.")
return [str(item) for item in data]
except json.JSONDecodeError:
raise AirflowException(f"Failed to parse JSON from path/URL: {file_path_or_url}")
# Source 3: Manual input
elif input_source == 'manual':
if not manual_inputs:
logger.info("Input source is 'manual', but no inputs were provided. Nothing to do.")
return []
logger.info("Loading URLs from manual input.")
return parse_video_inputs(manual_inputs)
else:
logger.warning(f"No valid input source selected or no data provided for the selected source. Nothing to do.")
return []
def parse_video_inputs(input_str: str) -> List[str]:
"""Parses a flexible string of video inputs into a list of individual items."""
if not input_str or not isinstance(input_str, str):
return []
input_str = input_str.strip()
# 1. Try to parse as a JSON array
if input_str.startswith("[") and input_str.endswith("]"):
try:
items = json.loads(input_str)
if isinstance(items, list):
logger.info("Successfully parsed input as a JSON array.")
return [str(item).strip() for item in items]
except json.JSONDecodeError:
logger.warning("Input looked like a JSON array but failed to parse. Treating as a comma-separated string.")
# 2. Treat as a comma-separated string
items = [item.strip() for item in input_str.split(",")]
# 3. Clean up quotes and extra whitespace from each item
cleaned_items = []
for item in items:
if item.startswith(('"', "'")) and item.endswith(('"', "'")):
item = item[1:-1]
if item: # Only add non-empty items
cleaned_items.append(item.strip())
return cleaned_items
def normalize_to_url(item: str) -> Optional[str]:
"""
Validates if an item is a recognizable YouTube URL or video ID,
and normalizes it to a standard watch URL format.
"""
if not item:
return None
# Regex for a standard 11-character YouTube video ID
video_id_pattern = r"^[a-zA-Z0-9_-]{11}$"
# Check if the item itself is a video ID
if re.match(video_id_pattern, item):
video_id = item
return f"https://www.youtube.com/watch?v={video_id}"
# Comprehensive regex to extract video ID from various URL formats
# Covers: watch, youtu.be, shorts, embed, /v/
url_patterns = [
r"(?:v=|\/v\/|youtu\.be\/|embed\/|shorts\/)([a-zA-Z0-9_-]{11})"
]
for pattern in url_patterns:
match = re.search(pattern, item)
if match:
video_id = match.group(1)
return f"https://www.youtube.com/watch?v={video_id}"
logger.warning(f"Could not recognize '{item}' as a valid YouTube URL or video ID.")
return None
def dump_redis_data_to_csv(redis_client, dump_dir, patterns):
"""Dumps data from Redis keys matching patterns to separate CSV files in a timestamped directory."""
timestamp_dir = datetime.now().strftime('%Y%m%d_%H%M%S')
full_dump_path = os.path.join(dump_dir, timestamp_dir)
os.makedirs(full_dump_path, exist_ok=True)
logger.info(f"Created dump directory: {full_dump_path}")
for pattern in patterns:
if not pattern: continue
# Sanitize pattern for filename
sanitized_pattern = re.sub(r'[^a-zA-Z0-9_-]', '_', pattern)
timestamp_file = datetime.now().strftime('%Y%m%d')
dump_file_name = f'redis_dump_{sanitized_pattern}_{timestamp_file}.csv'
dump_file_path = os.path.join(full_dump_path, dump_file_name)
logger.info(f"Dumping keys matching '{pattern}' to {dump_file_path}")
try:
with open(dump_file_path, 'w', newline='', encoding='utf-8') as csvfile:
writer = csv.writer(csvfile)
writer.writerow(['key', 'type', 'field_or_index', 'value'])
keys_found = 0
for key_bytes in redis_client.scan_iter(pattern):
key = key_bytes.decode('utf-8')
keys_found += 1
key_type = redis_client.type(key).decode('utf-8')
if key_type == 'hash':
for field, value in redis_client.hgetall(key).items():
writer.writerow([key, key_type, field.decode('utf-8'), value.decode('utf-8')])
elif key_type == 'list':
for index, value in enumerate(redis_client.lrange(key, 0, -1)):
writer.writerow([key, key_type, index, value.decode('utf-8')])
elif key_type == 'set':
for member in redis_client.smembers(key):
writer.writerow([key, key_type, None, member.decode('utf-8')])
elif key_type == 'string':
value = redis_client.get(key)
if value:
writer.writerow([key, key_type, None, value.decode('utf-8')])
if keys_found > 0:
logger.info(f"Successfully dumped {keys_found} keys for pattern '{pattern}' to {dump_file_path}")
else:
logger.info(f"No keys found for pattern '{pattern}'. Empty CSV file created at {dump_file_path}")
except Exception as e:
logger.error(f"Failed to dump Redis data for pattern '{pattern}': {e}", exc_info=True)
raise AirflowException(f"Failed to dump Redis data for pattern '{pattern}': {e}")
def clear_queue_callable(**context):
"""
Dumps Redis data to CSV and/or clears specified Redis keys based on selection.
The `_skipped` queue is for videos that are unavailable due to external reasons (e.g., private, removed).
"""
params = context['params']
ti = context['task_instance']
logger.info(f"Task '{ti.task_id}' running on queue '{ti.queue}'.")
redis_conn_id = params['redis_conn_id']
queue_system = params.get('queue_system', 'v1_monolithic')
queue_base_names_to_clear = []
if queue_system == 'v1_monolithic':
queue_base_names_to_clear.append(params['queue_base_name'])
elif queue_system.startswith('v2_'):
# For v2, clear both auth and dl queues for a complete clear.
queue_base_names_to_clear.extend(['queue2_auth', 'queue2_dl'])
else:
raise ValueError(f"Invalid queue_system: {queue_system}")
logger.info(f"Operating on queue system '{queue_system}' with base names: {queue_base_names_to_clear}.")
queues_to_clear_options = params.get('queues_to_clear_options', [])
confirm_clear = params.get('confirm_clear', False)
dump_queues = params['dump_queues']
dump_dir = context['templates_dict']['dump_dir']
dump_patterns = params['dump_patterns'].split(',') if params.get('dump_patterns') else []
if not confirm_clear:
message = "Action is 'clear_queue', but 'Confirm Deletion' was not checked. Aborting to prevent accidental data loss."
logger.error(message)
raise AirflowException(message)
# If no queues are selected, default to clearing all of them.
if not queues_to_clear_options:
logger.warning("No specific queues selected to clear. Defaulting to '_all'.")
queues_to_clear_options = ['_all']
redis_client = _get_redis_client(redis_conn_id)
if dump_queues and dump_patterns:
logger.info("Dumping is enabled. Performing dump before clearing.")
dump_redis_data_to_csv(redis_client, dump_dir, dump_patterns)
all_suffixes = ['_inbox', '_fail', '_result', '_progress', '_skipped']
keys_to_delete = set()
for queue_base_name in queue_base_names_to_clear:
if '_all' in queues_to_clear_options:
logger.info(f"'_all' option selected. Clearing all standard queues for base '{queue_base_name}'.")
for suffix in all_suffixes:
keys_to_delete.add(f"{queue_base_name}{suffix}")
else:
for suffix in queues_to_clear_options:
if suffix in all_suffixes:
keys_to_delete.add(f"{queue_base_name}{suffix}")
if not keys_to_delete:
logger.warning("No valid queue suffixes were selected. Nothing to delete.")
return
logger.info(f"Attempting to clear {len(keys_to_delete)} Redis key(s): {sorted(list(keys_to_delete))}")
try:
deleted_count = redis_client.delete(*keys_to_delete)
logger.info(f"Successfully sent delete command for {len(keys_to_delete)} key(s). Redis reported {deleted_count} deleted.")
except Exception as e:
logger.error(f"Failed to clear Redis keys: {e}", exc_info=True)
raise AirflowException(f"Failed to clear Redis keys: {e}")
def list_contents_callable(**context):
"""Lists the contents of the specified Redis key(s) (list or hash)."""
params = context['params']
ti = context['task_instance']
logger.info(f"Task '{ti.task_id}' running on queue '{ti.queue}'.")
redis_conn_id = params['redis_conn_id']
queues_to_list_str = params.get('queue_to_list')
max_items = params.get('max_items', 10)
if not queues_to_list_str:
raise ValueError("Parameter 'queue_to_list' cannot be empty.")
queues_to_list = [q.strip() for q in queues_to_list_str.split(',') if q.strip()]
if not queues_to_list:
logger.info("No valid queue names provided in 'queue_to_list'. Nothing to do.")
return
logger.info(f"Attempting to list contents for {len(queues_to_list)} Redis key(s): {queues_to_list}")
redis_client = _get_redis_client(redis_conn_id)
for queue_to_list in queues_to_list:
# Add a newline for better separation in logs
logger.info(f"\n--- Listing contents of Redis key '{queue_to_list}' (max: {max_items}) ---")
try:
key_type_bytes = redis_client.type(queue_to_list)
key_type = key_type_bytes.decode('utf-8') # Decode type
if key_type == 'list':
list_length = redis_client.llen(queue_to_list)
items_to_fetch = min(max_items, list_length)
contents_bytes = redis_client.lrange(queue_to_list, -items_to_fetch, -1)
contents = [item.decode('utf-8') for item in contents_bytes]
contents.reverse()
logger.info(f"--- Contents of Redis List '{queue_to_list}' ---")
logger.info(f"Total items in list: {list_length}")
if contents:
logger.info(f"Showing most recent {len(contents)} item(s):")
for i, item in enumerate(contents):
logger.info(f" [recent_{i}]: {item}")
if list_length > len(contents):
logger.info(f" ... ({list_length - len(contents)} older items not shown)")
logger.info(f"--- End of List Contents ---")
elif key_type == 'hash':
hash_size = redis_client.hlen(queue_to_list)
if hash_size > max_items * 2:
logger.warning(f"Hash '{queue_to_list}' has {hash_size} fields, which is large. Listing might be slow or incomplete. Consider using redis-cli HSCAN.")
contents_bytes = redis_client.hgetall(queue_to_list)
contents = {k.decode('utf-8'): v.decode('utf-8') for k, v in contents_bytes.items()}
logger.info(f"--- Contents of Redis Hash '{queue_to_list}' ---")
logger.info(f"Total fields in hash: {hash_size}")
if contents:
logger.info(f"Showing up to {max_items} item(s):")
item_count = 0
for key, value in contents.items():
if item_count >= max_items:
logger.info(f" ... (stopped listing after {max_items} items of {hash_size})")
break
try:
parsed_value = json.loads(value)
pretty_value = json.dumps(parsed_value, indent=2)
logger.info(f" '{key}':\n{pretty_value}")
except json.JSONDecodeError:
logger.info(f" '{key}': {value}")
item_count += 1
logger.info(f"--- End of Hash Contents ---")
elif key_type == 'none':
logger.info(f"Redis key '{queue_to_list}' does not exist.")
else:
logger.info(f"Redis key '{queue_to_list}' is of type '{key_type}'. Listing contents for this type is not implemented.")
except Exception as e:
logger.error(f"Failed to list contents of Redis key '{queue_to_list}': {e}", exc_info=True)
# Continue to the next key in the list instead of failing the whole task
def check_status_callable(**context):
"""
Checks the status (type and size) of all standard Redis queues for a given base name.
The `_skipped` queue is for videos that are unavailable due to external reasons (e.g., private, removed).
"""
params = context['params']
ti = context['task_instance']
logger.info(f"Task '{ti.task_id}' running on queue '{ti.queue}'.")
redis_conn_id = params['redis_conn_id']
queue_system = params.get('queue_system', 'v1_monolithic')
queue_base_names_to_check = []
if queue_system == 'v1_monolithic':
queue_base_names_to_check.append(params.get('queue_base_name', DEFAULT_QUEUE_NAME))
elif queue_system.startswith('v2_'):
# For v2, always check both auth and dl queues for a complete picture.
queue_base_names_to_check.extend(['queue2_auth', 'queue2_dl'])
else:
raise ValueError(f"Invalid queue_system: {queue_system}")
queue_suffixes = ['_inbox', '_progress', '_result', '_fail', '_skipped']
logger.info(f"--- Checking Status for Queue System: '{queue_system}' ---")
try:
redis_client = _get_redis_client(redis_conn_id)
for queue_name in queue_base_names_to_check:
logger.info(f"--- Base Name: '{queue_name}' ---")
for suffix in queue_suffixes:
queue_to_check = f"{queue_name}{suffix}"
key_type = redis_client.type(queue_to_check).decode('utf-8')
size = 0
if key_type == 'list':
size = redis_client.llen(queue_to_check)
elif key_type == 'hash':
size = redis_client.hlen(queue_to_check)
if key_type != 'none':
logger.info(f" - Queue '{queue_to_check}': Type='{key_type.upper()}', Size={size}")
else:
logger.info(f" - Queue '{queue_to_check}': Does not exist.")
logger.info(f"--- End of Status Check ---")
except Exception as e:
logger.error(f"Failed to check queue status for system '{queue_system}': {e}", exc_info=True)
raise AirflowException(f"Failed to check queue status: {e}")
def requeue_failed_callable(**context):
"""
Copies all URLs from the fail hash to the inbox list and optionally clears the fail hash.
Adapts behavior for v1 and v2 queue systems.
"""
params = context['params']
ti = context['task_instance']
logger.info(f"Task '{ti.task_id}' running on queue '{ti.queue}'.")
redis_conn_id = params['redis_conn_id']
clear_fail_queue = params['clear_fail_queue_after_requeue']
queue_system = params.get('queue_system', 'v1_monolithic')
fail_queue_name = ""
inbox_queue_name = ""
if queue_system == 'v1_monolithic':
queue_name = params['queue_base_name']
fail_queue_name = f"{queue_name}_fail"
inbox_queue_name = f"{queue_name}_inbox"
elif queue_system == 'v2_separated_auth':
fail_queue_name = "queue2_auth_fail"
inbox_queue_name = "queue2_auth_inbox"
elif queue_system == 'v2_separated_dl':
fail_queue_name = "queue2_dl_fail"
# DL failures must be re-authenticated, so they go back to the auth inbox.
inbox_queue_name = "queue2_auth_inbox"
else:
raise ValueError(f"Invalid queue_system: {queue_system}")
logger.info(f"Requeuing failed URLs from '{fail_queue_name}' to '{inbox_queue_name}' (system: {queue_system}).")
redis_client = _get_redis_client(redis_conn_id)
try:
# The fail queue is a hash. The keys are the URLs.
failed_urls_bytes = redis_client.hkeys(fail_queue_name)
if not failed_urls_bytes:
logger.info(f"Fail queue '{fail_queue_name}' is empty. Nothing to requeue.")
return
failed_urls = [url.decode('utf-8') for url in failed_urls_bytes]
logger.info(f"Found {len(failed_urls)} URLs to requeue:")
for url in failed_urls:
logger.info(f" - {url}")
# Add URLs to the inbox list
if failed_urls:
with redis_client.pipeline() as pipe:
pipe.rpush(inbox_queue_name, *failed_urls)
if clear_fail_queue:
pipe.delete(fail_queue_name)
pipe.execute()
final_list_length = redis_client.llen(inbox_queue_name)
success_message = (
f"Successfully requeued {len(failed_urls)} URLs to '{inbox_queue_name}'. "
f"The list now contains {final_list_length} items."
)
logger.info(success_message)
if clear_fail_queue:
logger.info(f"Successfully cleared fail queue '{fail_queue_name}'.")
else:
logger.info(f"Fail queue '{fail_queue_name}' was not cleared as per configuration.")
except Exception as e:
logger.error(f"Failed to requeue failed URLs: {e}", exc_info=True)
raise AirflowException(f"Failed to requeue failed URLs: {e}")
def purge_celery_queue_callable(**context):
"""
Purges messages from the specified Celery queues using the Airflow Celery app.
This is more reliable than shelling out to `celery purge` as it uses the same
app context and broker connection as the workers.
"""
params = context['params']
if not params.get('confirm_purge'):
raise AirflowException("'Confirm Purge' is not checked. Aborting to prevent accidental data loss.")
queues_to_purge_str = params.get('celery_queue_to_purge')
if not queues_to_purge_str:
raise AirflowException("No Celery queues specified to purge.")
queues = [q.strip() for q in queues_to_purge_str.split(',') if q.strip()]
logger.info(f"Attempting to purge {len(queues)} Celery queue(s): {queues}")
logger.info(f"Using broker: {celery_app.conf.broker_url}")
purged_counts = {}
with celery_app.connection_for_read() as conn:
with conn.channel() as channel:
for queue in queues:
try:
message_count = channel.queue_purge(queue)
purged_counts[queue] = message_count
logger.info(f"Purged {message_count} messages from queue '{queue}'.")
except Exception as e:
# This can happen if the queue doesn't exist on the broker.
# kombu might raise an operational error.
logger.error(f"Failed to purge queue '{queue}': {e}", exc_info=True)
purged_counts[queue] = f"ERROR: {e}"
logger.info("--- Celery Purge Summary ---")
for queue, result in purged_counts.items():
logger.info(f" - {queue}: {result}")
logger.info("--- Purge complete. ---")
def clear_dag_runs_callable(**context):
"""
Deletes DAG run history and associated task instances from the database.
"""
params = context['params']
dag_id = params.get("dag_id_to_manage")
clear_scope = params.get("clear_scope")
log_target = f"DAG '{dag_id}'" if dag_id != "ALL_DAGS" else "ALL DAGS (except ytdlp_mgmt_queues)"
logger.info(f"Attempting to delete DagRuns for {log_target} with scope '{clear_scope}'.")
with create_session() as session:
dag_run_query = session.query(DagRun)
if dag_id == "ALL_DAGS":
dag_run_query = dag_run_query.filter(DagRun.dag_id != 'ytdlp_mgmt_queues')
else:
dag_run_query = dag_run_query.filter(DagRun.dag_id == dag_id)
if clear_scope == "last_run":
if dag_id == "ALL_DAGS":
raise AirflowException("Cannot clear 'last_run' for ALL_DAGS. Please select a specific DAG.")
last_run = dag_run_query.order_by(DagRun.execution_date.desc()).first()
if not last_run:
logger.info(f"No runs found for DAG '{dag_id}'. Nothing to delete.")
print(f"\nNo runs found for DAG '{dag_id}'.\n")
return
logger.warning(f"Deleting last DagRun for DAG '{dag_id}' (run_id: {last_run.run_id}, execution_date: {last_run.execution_date}). This will also delete its task instances.")
session.delete(last_run)
deleted_count = 1
else: # all_runs
logger.warning(f"Deleting ALL DagRuns and associated TaskInstances for {log_target}. This will remove all history from the UI.")
ti_query = session.query(TaskInstance)
if dag_id == "ALL_DAGS":
ti_query = ti_query.filter(TaskInstance.dag_id != 'ytdlp_mgmt_queues')
else:
ti_query = ti_query.filter(TaskInstance.dag_id == dag_id)
ti_deleted_count = ti_query.delete(synchronize_session=False)
logger.info(f"Deleted {ti_deleted_count} TaskInstance records for {log_target}.")
deleted_count = dag_run_query.delete(synchronize_session=False)
# The session is committed automatically by the `with create_session()` context manager.
logger.info(f"Successfully deleted {deleted_count} DagRun(s) for {log_target}.")
print(f"\nSuccessfully deleted {deleted_count} DagRun(s) for {log_target}.\n")
def add_videos_to_queue_callable(**context):
"""
Parses video inputs from manual text, a predefined file, or a file path/URL,
normalizes them to URLs, and adds them to a Redis queue.
"""
params = context["params"]
ti = context['task_instance']
logger.info(f"Task '{ti.task_id}' running on queue '{ti.queue}'.")
queue_system = params.get('queue_system', 'v1_monolithic')
if queue_system.startswith('v2_'):
# For v2 systems, raw URLs are always added to the auth queue.
queue_name = 'queue2_auth'
logger.info(f"Queue system is '{queue_system}'. Adding URLs to '{queue_name}_inbox'.")
else:
queue_name = params["queue_base_name"]
redis_conn_id = params["redis_conn_id"]
dry_run = params["dry_run"]
# This function will get the list of strings from the correct source based on precedence
raw_items = _get_urls_from_source(**params)
if not raw_items:
logger.info("No video inputs found from any source. Nothing to do.")
return
valid_urls = []
for item in raw_items:
url = normalize_to_url(item)
if url and url not in valid_urls:
valid_urls.append(url)
elif not url:
logger.warning(f"Skipping invalid input item: '{item}'")
if not valid_urls:
raise AirflowException("No valid YouTube URLs or IDs were found in the provided input.")
logger.info(f"Found {len(valid_urls)} valid and unique URLs to add to the queue:")
for url in valid_urls:
logger.info(f" - {url}")
if dry_run:
logger.info("Dry run is enabled. Skipping Redis operation.")
print(f"\n[DRY RUN] Would have added {len(valid_urls)} URLs to the Redis list '{queue_name}_inbox'.")
return
# --- Add to Redis ---
try:
redis_client = _get_redis_client(redis_conn_id)
inbox_queue = f"{queue_name}_inbox"
# Use a pipeline for atomic and efficient addition
with redis_client.pipeline() as pipe:
for url in valid_urls:
pipe.rpush(inbox_queue, url)
pipe.execute()
final_list_length = redis_client.llen(inbox_queue)
success_message = (
f"Successfully added {len(valid_urls)} URLs to Redis list '{inbox_queue}'. "
f"The list now contains {final_list_length} items."
)
logger.info(success_message)
except Exception as e:
logger.error(f"Failed to add URLs to Redis queue '{inbox_queue}': {e}", exc_info=True)
raise AirflowException(f"Failed to add URLs to Redis: {e}")
# --- DAG Definition ---
with DAG(
dag_id="ytdlp_mgmt_queues",
default_args={
"owner": "airflow",
"start_date": days_ago(1),
"retries": 0,
"queue": "queue-mgmt",
},
schedule=None,
catchup=False,
tags=["ytdlp", "mgmt", "master"],
doc_md="""
### YT-DLP Queue Management
This DAG provides a set of tools to manage Redis queues used by the YTDLP processing pipeline.
Select an `action` to perform when triggering the DAG.
**Actions:**
- `add_videos`: Add one or more YouTube videos to a queue. You can provide input manually, select a predefined file from the server, or provide a path/URL to a file.
- `clear_queue`: Dump and/or delete a specific Redis key.
- `list_contents`: View the contents of a Redis key (list or hash).
- `check_status`: Check the overall status of the queues.
- `requeue_failed`: Copy all URLs from the `_fail` hash to the `_inbox` list and clear the `_fail` hash.
- `purge_celery_queue`: **(Destructive)** Removes all tasks from a specified Celery worker queue (e.g., `queue-dl`). This is useful for clearing out a backlog of tasks that were queued before a dispatcher was paused.
- `clear_dag_runs`: **(Destructive)** Deletes DAG run history and associated task instances from the database, removing them from the UI.
""",
params={
"action": Param(
"list_contents",
type="string",
enum=["add_videos", "clear_queue", "list_contents", "check_status", "requeue_failed", "inspect_celery_cluster", "purge_celery_queue", "clear_dag_runs"],
title="Action",
description="The management action to perform.",
),
"queue_system": Param(
"v1_monolithic",
type="string",
enum=["v1_monolithic", "v2_separated_auth", "v2_separated_dl"],
title="Queue System",
description="Select the target queue system to manage. This choice affects which queues are targeted by actions.",
),
"queue_base_name": Param(
DEFAULT_QUEUE_NAME,
type="string",
title="Queue Base Name (v1 only)",
description="Base name for queues. Only used when 'Queue System' is 'v1_monolithic'.",
),
# --- Params for 'add_videos' ---
"input_source": Param(
"predefined_file",
type="string",
enum=["manual", "predefined_file", "file_path_or_url"],
title="[add_videos] Video Input Source",
description="Choose how to provide the video URLs. This choice determines which of the following parameters is used.",
),
"video_inputs": Param(
None,
type=["null", "string"],
title="[add_videos] 1. Manual Input",
description="Used if 'Input Source' is 'manual'. Paste a single item, a comma-separated list, or a JSON array of YouTube URLs or Video IDs.",
),
"predefined_url_list": Param(
"None",
type="string",
enum=_get_predefined_url_lists(),
title="[add_videos] 2. Predefined File",
description=(
"Used if 'Input Source' is 'predefined_file'. Select a JSON file from the server's URL list directory "
f"(defined by Airflow Variable 'YTDLP_URL_LISTS_DIR', defaults to '{DEFAULT_URL_LISTS_DIR}')."
),
),
"url_list_file_path": Param(
None,
type=["null", "string"],
title="[add_videos] 3. File Path or URL",
description="Used if 'Input Source' is 'file_path_or_url'. Enter a local file path (on the Airflow worker) or a remote URL to a JSON file containing a list of URLs/IDs.",
),
"dry_run": Param(
False,
type="boolean",
title="[add_videos] Dry Run",
description="If True, validate inputs without adding them to the queue.",
),
# --- Params for 'clear_queue' ---
"queues_to_clear_options": Param(
None,
type=["null", "array"],
title="[clear_queue] Queues to Clear",
description="Select which standard queues to clear. '_all' clears all four. If left empty, it defaults to '_all'.",
items={
"type": "string",
"enum": ["_inbox", "_fail", "_result", "_progress", "_skipped", "_all"],
}
),
"confirm_clear": Param(
False,
type="boolean",
title="[clear_queue] Confirm Deletion",
description="Must be set to True to execute the 'clear_queue' action. This is a destructive operation.",
),
"dump_queues": Param(
True,
type="boolean",
title="[clear_queue] Dump Data",
description="If True, dump data before clearing.",
),
"dump_dir": Param(
None,
type=["null", "string"],
title="[clear_queue] Dump Directory",
description="Base directory to save CSV dump files. Supports Jinja. If empty, defaults to Airflow variable 'YTDLP_REDIS_DUMP_DIR' or '/opt/airflow/dumps'.",
),
"dump_patterns": Param(
'ytdlp:*,video_queue_*',
type="string",
title="[clear_queue] Dump Patterns",
description="Comma-separated list of key patterns to dump.",
),
# --- Params for 'list_contents' ---
"queue_to_list": Param(
'video_queue_inbox,queue2_auth_inbox,queue2_dl_inbox,queue2_dl_result',
type="string",
title="[list_contents] Queues to List",
description="Comma-separated list of exact Redis key names to list.",
),
"max_items": Param(
10,
type="integer",
title="[list_contents] Max Items to List",
description="Maximum number of items to show.",
),
# --- Params for 'requeue_failed' ---
"clear_fail_queue_after_requeue": Param(
True,
type="boolean",
title="[requeue_failed] Clear Fail Queue",
description="If True, deletes the `_fail` hash after requeueing items.",
),
# --- Params for 'purge_celery_queue' ---
"celery_queue_to_purge": Param(
"queue-dl,queue-auth",
type="string",
title="[purge_celery_queue] Celery Queues to Purge",
description="Comma-separated list of Celery queue names to purge from the broker. This is a destructive action.",
),
"confirm_purge": Param(
False,
type="boolean",
title="[purge_celery_queue] Confirm Purge",
description="Must be set to True to execute the 'purge_celery_queue' action. This is a destructive operation that removes all tasks from the specified Celery queue(s).",
),
# --- Params for 'clear_dag_runs' ---
"dag_id_to_manage": Param(
"ALL_DAGS",
type="string",
enum=["ALL_DAGS", "ytdlp_ops_v01_orchestrator", "ytdlp_ops_v01_dispatcher", "ytdlp_ops_v01_worker_per_url", "ytdlp_ops_v02_orchestrator_auth", "ytdlp_ops_v02_dispatcher_auth", "ytdlp_ops_v02_worker_per_url_auth", "ytdlp_ops_v02_orchestrator_dl", "ytdlp_ops_v02_dispatcher_dl", "ytdlp_ops_v02_worker_per_url_dl"],
title="[clear_dag_runs] DAG ID",
description="The DAG ID to perform the action on. Select 'ALL_DAGS' to clear history for all DAGs.",
),
"clear_scope": Param(
"all_runs",
type="string",
enum=["last_run", "all_runs"],
title="[clear_dag_runs] Clear Scope",
description="For 'clear_dag_runs' action, specifies the scope of runs to clear.",
),
# --- Common Params ---
"redis_conn_id": Param(
DEFAULT_REDIS_CONN_ID,
type="string",
title="Redis Connection ID",
),
},
) as dag:
branch_on_action = BranchPythonOperator(
task_id="branch_on_action",
python_callable=lambda **context: f"action_{context['params']['action']}",
)
action_add_videos = PythonOperator(
task_id="action_add_videos",
python_callable=add_videos_to_queue_callable,
)
action_clear_queue = PythonOperator(
task_id="action_clear_queue",
python_callable=clear_queue_callable,
templates_dict={'dump_dir': "{{ params.dump_dir or var.value.get('YTDLP_REDIS_DUMP_DIR', '/opt/airflow/dumps') }}"},
)
action_list_contents = PythonOperator(
task_id="action_list_contents",
python_callable=list_contents_callable,
)
action_check_status = PythonOperator(
task_id="action_check_status",
python_callable=check_status_callable,
)
action_requeue_failed = PythonOperator(
task_id="action_requeue_failed",
python_callable=requeue_failed_callable,
)
action_inspect_celery_cluster = BashOperator(
task_id="action_inspect_celery_cluster",
bash_command="""
# Get the broker URL from Airflow config
BROKER_URL=$(airflow config get-value celery broker_url)
echo "--- Inspecting Celery Cluster (Broker: $BROKER_URL) ---"
echo ""
echo "--- Active Queues (shows queues with consumers) ---"
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect active_queues
echo ""
echo "--- Worker Stats (shows connected workers) ---"
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect stats
echo ""
echo "--- Active Tasks (tasks currently running) ---"
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect active
echo ""
echo "--- Reserved Tasks (tasks prefetched by workers) ---"
celery -A airflow.providers.celery.executors.celery_executor.app -b "$BROKER_URL" inspect reserved
""",
)
action_purge_celery_queue = PythonOperator(
task_id="action_purge_celery_queue",
python_callable=purge_celery_queue_callable,
)
action_clear_dag_runs = PythonOperator(
task_id="action_clear_dag_runs",
python_callable=clear_dag_runs_callable,
)
# --- Wire up tasks ---
branch_on_action >> [
action_add_videos,
action_clear_queue,
action_list_contents,
action_check_status,
action_requeue_failed,
action_inspect_celery_cluster,
action_purge_celery_queue,
action_clear_dag_runs,
]

View File

@ -1,258 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright © 2024 rl
#
# Distributed under terms of the MIT license.
"""
Maintenance DAG for managing the lifecycle of ytdlp-ops accounts.
This DAG is responsible for:
- Un-banning accounts whose ban duration has expired.
- Transitioning accounts from RESTING to ACTIVE after their cooldown period.
- Transitioning accounts from ACTIVE to RESTING after their active duration.
This logic was previously handled inside the ytdlp-ops-server and has been
moved here to give the orchestrator full control over account state.
"""
from __future__ import annotations
import logging
import time
from datetime import datetime, timedelta
from airflow.decorators import task
from airflow.models import Variable
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.utils.dates import days_ago
# Import utility functions and Thrift modules
from utils.redis_utils import _get_redis_client
from pangramia.yt.management import YTManagementService
from thrift.protocol import TBinaryProtocol
from thrift.transport import TSocket, TTransport
# Configure logging
logger = logging.getLogger(__name__)
# Default settings from Airflow Variables or hardcoded fallbacks
DEFAULT_REDIS_CONN_ID = 'redis_default'
DEFAULT_MANAGEMENT_SERVICE_IP = Variable.get("MANAGEMENT_SERVICE_HOST", default_var="172.17.0.1")
DEFAULT_MANAGEMENT_SERVICE_PORT = Variable.get("MANAGEMENT_SERVICE_PORT", default_var=9080)
DEFAULT_ARGS = {
'owner': 'airflow',
'retries': 1,
'retry_delay': 30,
'queue': 'queue-mgmt',
}
# --- Helper Functions ---
def _get_thrift_client(host, port, timeout=60):
"""Helper to create and connect a Thrift client."""
transport = TSocket.TSocket(host, port)
transport.setTimeout(timeout * 1000)
transport = TTransport.TFramedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = YTManagementService.Client(protocol)
transport.open()
logger.info(f"Connected to Thrift server at {host}:{port}")
return client, transport
@task
def manage_account_states(**context):
"""
Fetches all account statuses and performs necessary state transitions
based on time durations configured in the DAG parameters.
"""
params = context['params']
requests_limit = params['account_requests_limit']
cooldown_duration_s = params['account_cooldown_duration_min'] * 60
ban_duration_s = params['account_ban_duration_hours'] * 3600
host = DEFAULT_MANAGEMENT_SERVICE_IP
port = int(DEFAULT_MANAGEMENT_SERVICE_PORT)
redis_conn_id = DEFAULT_REDIS_CONN_ID
logger.info(f"Starting account maintenance. Service: {host}:{port}, Redis: {redis_conn_id}")
logger.info(f"Using limits: Requests={requests_limit}, Cooldown={params['account_cooldown_duration_min']}m, Ban={params['account_ban_duration_hours']}h")
client, transport = None, None
try:
client, transport = _get_thrift_client(host, port)
redis_client = _get_redis_client(redis_conn_id)
logger.info(f"--- Step 1: Fetching all account statuses from the ytdlp-ops-server at {host}:{port}... ---")
all_accounts = client.getAccountStatus(accountId=None, accountPrefix=None)
logger.info(f"Found {len(all_accounts)} total accounts to process.")
accounts_to_unban = []
accounts_to_activate = []
accounts_to_rest = []
now_ts = int(time.time())
for acc in all_accounts:
# Thrift can return 0 for unset integer fields.
# The AccountStatus thrift object is missing status_changed_timestamp and active_since_timestamp.
# We use available timestamps as proxies.
last_failure_ts = int(acc.lastFailureTimestamp or 0)
last_success_ts = int(acc.lastSuccessTimestamp or 0)
last_usage_ts = max(last_failure_ts, last_success_ts)
if acc.status == "BANNED" and last_failure_ts > 0:
time_since_ban = now_ts - last_failure_ts
if time_since_ban >= ban_duration_s:
accounts_to_unban.append(acc.accountId)
else:
remaining_s = ban_duration_s - time_since_ban
logger.info(f"Account {acc.accountId} is BANNED. Time until unban: {timedelta(seconds=remaining_s)}")
elif acc.status == "RESTING" and last_usage_ts > 0:
time_since_rest = now_ts - last_usage_ts
if time_since_rest >= cooldown_duration_s:
accounts_to_activate.append(acc.accountId)
else:
remaining_s = cooldown_duration_s - time_since_rest
logger.info(f"Account {acc.accountId} is RESTING. Time until active: {timedelta(seconds=remaining_s)}")
elif acc.status == "ACTIVE":
# For ACTIVE -> RESTING, check how many requests have been made since activation.
count_at_activation_raw = redis_client.hget(f"account_status:{acc.accountId}", "success_count_at_activation")
if count_at_activation_raw is not None:
count_at_activation = int(count_at_activation_raw)
current_success_count = acc.successCount or 0
requests_made = current_success_count - count_at_activation
if requests_made >= requests_limit:
logger.info(f"Account {acc.accountId} reached request limit ({requests_made}/{requests_limit}). Moving to RESTING.")
accounts_to_rest.append(acc.accountId)
else:
requests_remaining = requests_limit - requests_made
logger.info(f"Account {acc.accountId} is ACTIVE. Requests until rest: {requests_remaining}/{requests_limit}")
else:
# This is a fallback for accounts that were activated before this logic was deployed.
# We can activate them "fresh" by setting their baseline count now.
logger.info(f"Account {acc.accountId} is ACTIVE but has no 'success_count_at_activation'. Setting it now.")
redis_client.hset(f"account_status:{acc.accountId}", "success_count_at_activation", acc.successCount or 0)
logger.info("--- Step 2: Analyzing accounts for state transitions ---")
logger.info(f"Found {len(accounts_to_unban)} accounts with expired bans to un-ban.")
logger.info(f"Found {len(accounts_to_activate)} accounts with expired rest periods to activate.")
logger.info(f"Found {len(accounts_to_rest)} accounts with expired active periods to put to rest.")
# --- Perform State Transitions ---
# 1. Un-ban accounts via Thrift call
logger.info("--- Step 3: Processing un-bans ---")
if accounts_to_unban:
logger.info(f"Un-banning {len(accounts_to_unban)} accounts: {accounts_to_unban}")
account_map = {acc.accountId: acc for acc in all_accounts}
for acc_id in accounts_to_unban:
try:
client.unbanAccount(acc_id, "Automatic un-ban by Airflow maintenance DAG.")
logger.info(f"Successfully un-banned account '{acc_id}'.")
# Set the activation count to baseline the account immediately after un-banning.
key = f"account_status:{acc_id}"
current_success_count = account_map[acc_id].successCount or 0
redis_client.hset(key, "success_count_at_activation", current_success_count)
logger.info(f"Set 'success_count_at_activation' for un-banned account '{acc_id}' to {current_success_count}.")
except Exception as e:
logger.error(f"Failed to un-ban account '{acc_id}': {e}")
else:
logger.info("No accounts to un-ban.")
# 2. Activate resting accounts via direct Redis write
logger.info("--- Step 4: Processing activations ---")
if accounts_to_activate:
logger.info(f"Activating {len(accounts_to_activate)} accounts: {accounts_to_activate}")
now_ts = int(time.time())
account_map = {acc.accountId: acc for acc in all_accounts}
with redis_client.pipeline() as pipe:
for acc_id in accounts_to_activate:
key = f"account_status:{acc_id}"
current_success_count = account_map[acc_id].successCount or 0
pipe.hset(key, "status", "ACTIVE")
pipe.hset(key, "active_since_timestamp", now_ts)
pipe.hset(key, "status_changed_timestamp", now_ts)
pipe.hset(key, "success_count_at_activation", current_success_count)
pipe.execute()
logger.info("Finished activating accounts.")
else:
logger.info("No accounts to activate.")
# 3. Rest active accounts via direct Redis write
logger.info("--- Step 5: Processing rests ---")
if accounts_to_rest:
logger.info(f"Putting {len(accounts_to_rest)} accounts to rest: {accounts_to_rest}")
now_ts = int(time.time())
with redis_client.pipeline() as pipe:
for acc_id in accounts_to_rest:
key = f"account_status:{acc_id}"
pipe.hset(key, "status", "RESTING")
pipe.hset(key, "status_changed_timestamp", now_ts)
pipe.hdel(key, "success_count_at_activation")
pipe.execute()
logger.info("Finished putting accounts to rest.")
else:
logger.info("No accounts to put to rest.")
logger.info("--- Account maintenance run complete. ---")
finally:
if transport and transport.isOpen():
transport.close()
with DAG(
dag_id='ytdlp_ops_account_maintenance',
default_args=DEFAULT_ARGS,
schedule='*/5 * * * *', # Run every 5 minutes
start_date=days_ago(1),
catchup=False,
tags=['ytdlp', 'maintenance'],
doc_md="""
### YT-DLP Account Maintenance: Time-Based State Transitions
This DAG is the central authority for automated, **time-based** state management for ytdlp-ops accounts.
It runs periodically to fetch the status of all accounts and applies its own logic to determine if an account's state should change based on configurable time durations.
The thresholds are defined as DAG parameters and can be configured via the Airflow UI:
- **Requests Limit**: How many successful requests an account can perform before it needs to rest.
- **Cooldown Duration**: How long an account must rest before it can be used again.
- **Ban Duration**: How long a ban lasts before the account is automatically un-banned.
---
#### Separation of Concerns: Time vs. Errors
It is critical to understand that this DAG primarily handles time-based state changes. Error-based banning may be handled by worker DAGs during URL processing. This separation ensures that maintenance is predictable and based on timers, while acute, error-driven actions are handled immediately by the workers that encounter them.
---
#### State Transitions Performed by This DAG:
On each run, this DAG fetches the raw status and timestamps for all accounts and performs the following checks:
1. **Un-banning (`BANNED` -> `ACTIVE`)**:
- **Condition**: An account has been in the `BANNED` state for longer than the configured `account_ban_duration_hours`.
- **Action**: The DAG calls the `unbanAccount` service endpoint to lift the ban.
2. **Activation (`RESTING` -> `ACTIVE`)**:
- **Condition**: An account has been in the `RESTING` state for longer than the configured `account_cooldown_duration_min`.
- **Action**: The DAG updates the account's status to `ACTIVE` directly in Redis.
3. **Resting (`ACTIVE` -> `RESTING`)**:
- **Condition**: An account has performed more successful requests than the configured `account_requests_limit` since it was last activated.
- **Action**: The DAG updates the account's status to `RESTING` directly in Redis.
This process gives full control over time-based account lifecycle management to the Airflow orchestrator.
""",
params={
'account_requests_limit': Param(250, type="integer", description="Number of successful requests an account can make before it is rested. Default is 250."),
'account_cooldown_duration_min': Param(60, type="integer", description="Duration in minutes an account must rest ('pause') before being activated again. Default is 60 minutes (1 hour)."),
'account_ban_duration_hours': Param(24, type="integer", description="Duration in hours an account stays banned before it can be un-banned."),
}
) as dag:
manage_account_states()

View File

@ -1,105 +0,0 @@
# -*- coding: utf-8 -*-
"""
DAG to dispatch work to ytdlp_ops_worker_per_url DAGs.
It pulls a URL from Redis and triggers a worker with a pinned queue.
"""
from __future__ import annotations
import logging
import os
import socket
from datetime import timedelta
from airflow.decorators import task
from airflow.exceptions import AirflowSkipException
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.api.common.trigger_dag import trigger_dag
from airflow.utils.dates import days_ago
from utils.redis_utils import _get_redis_client
logger = logging.getLogger(__name__)
DEFAULT_QUEUE_NAME = 'video_queue'
DEFAULT_REDIS_CONN_ID = 'redis_default'
@task(queue='queue-dl')
def dispatch_url_to_worker(**context):
"""
Pulls one URL from Redis, determines the current worker's dedicated queue,
and triggers the main worker DAG to process the URL on that specific queue.
"""
ti = context['task_instance']
logger.info(f"Dispatcher task '{ti.task_id}' running on queue '{ti.queue}'.")
# --- Check for worker pause lock file ---
# This path must be consistent with the Ansible playbook.
lock_file_path = '/opt/airflow/inputfiles/AIRFLOW.PREVENT_URL_PULL.lockfile'
hostname = socket.gethostname()
if os.path.exists(lock_file_path):
logger.info(f"Worker '{hostname}' is paused. Lock file found at '{lock_file_path}'. Skipping URL pull.")
raise AirflowSkipException(f"Worker '{hostname}' is paused.")
else:
logger.info(f"Worker '{hostname}' is active (no lock file found at '{lock_file_path}'). Proceeding to pull URL.")
params = context['params']
redis_conn_id = params['redis_conn_id']
queue_name = params['queue_name']
inbox_queue = f"{queue_name}_inbox"
logger.info(f"Attempting to pull one URL from Redis queue '{inbox_queue}'...")
client = _get_redis_client(redis_conn_id)
url_bytes = client.lpop(inbox_queue)
if not url_bytes:
logger.info("Redis queue is empty. No work to dispatch. Skipping task.")
raise AirflowSkipException("Redis queue is empty. No work to dispatch.")
url_to_process = url_bytes.decode('utf-8')
logger.info(f"Pulled URL '{url_to_process}' from the queue.")
# Determine the worker-specific queue for affinity
hostname = socket.gethostname()
worker_queue = f"queue-dl-{hostname}"
logger.info(f"Running on worker '{hostname}'. Dispatching job to its dedicated queue '{worker_queue}'.")
# The orchestrator passes all its params, which we will pass through to the worker.
# We add the specific URL and the determined worker queue to the configuration.
conf_to_pass = {**params, 'url_to_process': url_to_process, 'worker_queue': worker_queue}
# Embed the worker queue in the run_id to avoid DB race conditions in the mutation hook.
# The hook will parse the queue name from the run_id itself.
run_id = f"worker_run_{context['dag_run'].run_id}_{context['ts_nodash']}_q_{worker_queue}"
logger.info(f"Triggering 'ytdlp_ops_v01_worker_per_url' with run_id '{run_id}'")
trigger_dag(
dag_id='ytdlp_ops_v01_worker_per_url',
run_id=run_id,
conf=conf_to_pass,
replace_microseconds=False
)
with DAG(
dag_id='ytdlp_ops_v01_dispatcher',
default_args={'owner': 'airflow', 'retries': 0},
schedule=None, # This DAG is only triggered by the orchestrator.
start_date=days_ago(1),
catchup=False,
tags=['ytdlp', 'worker', 'dispatcher'],
doc_md="""
### YT-DLP URL Dispatcher
This DAG is responsible for dispatching a single URL to a worker with a pinned queue.
1. It pulls a single URL from the Redis `_inbox` queue.
2. It runs on the generic `queue-dl` to find any available worker.
3. It determines the worker's hostname and constructs a dedicated queue name (e.g., `queue-dl-dl-worker-1`).
4. It triggers the `ytdlp_ops_v01_worker_per_url` DAG, passing the URL and the dedicated queue name in the configuration.
This dispatcher-led affinity, combined with the `task_instance_mutation_hook` cluster policy, ensures that all subsequent processing for that URL happens on the same machine.
The `ytdlp_ops_v01_orchestrator` is used to trigger a batch of these dispatcher runs.
""",
# All params are passed through from the orchestrator
render_template_as_native_obj=True,
) as dag:
dispatch_url_to_worker()

View File

@ -1,382 +0,0 @@
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2024 rl <rl@rlmbp>
#
# Distributed under terms of the MIT license.
"""
DAG to orchestrate ytdlp_ops_dispatcher DAG runs based on a defined policy.
It fetches URLs from a Redis queue and launches dispatchers in controlled bunches,
which in turn trigger workers with affinity.
"""
from airflow import DAG
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.operators.python import PythonOperator
from airflow.models.param import Param
from airflow.models.variable import Variable
from airflow.utils.dates import days_ago
from airflow.api.common.trigger_dag import trigger_dag
from airflow.models.dagrun import DagRun
from airflow.models.dag import DagModel
from datetime import timedelta, datetime
import logging
import random
import time
import json
# Import utility functions
from utils.redis_utils import _get_redis_client
# Import Thrift modules for proxy status check
from pangramia.yt.tokens_ops import YTTokenOpService
from thrift.protocol import TBinaryProtocol
from thrift.transport import TSocket, TTransport
# Configure logging
logger = logging.getLogger(__name__)
# Default settings
DEFAULT_QUEUE_NAME = 'video_queue'
DEFAULT_REDIS_CONN_ID = 'redis_default'
DEFAULT_TOTAL_WORKERS = 1
DEFAULT_WORKERS_PER_BUNCH = 1
DEFAULT_WORKER_DELAY_S = 1
DEFAULT_BUNCH_DELAY_S = 1
DEFAULT_YT_AUTH_SERVICE_IP = Variable.get("YT_AUTH_SERVICE_IP", default_var="172.17.0.1")
DEFAULT_YT_AUTH_SERVICE_PORT = Variable.get("YT_AUTH_SERVICE_PORT", default_var=9080)
# --- Helper Functions ---
def _check_application_queue(redis_client, queue_base_name: str) -> int:
"""Checks and logs the length of the application's inbox queue."""
inbox_queue_name = f"{queue_base_name}_inbox"
logger.info(f"--- Checking Application Work Queue ---")
try:
q_len = redis_client.llen(inbox_queue_name)
logger.info(f"Application work queue '{inbox_queue_name}' has {q_len} item(s).")
return q_len
except Exception as e:
logger.error(f"Failed to check application queue '{inbox_queue_name}': {e}", exc_info=True)
return -1 # Indicate an error
def _inspect_celery_queues(redis_client, queue_names: list):
"""Inspects Celery queues in Redis and logs their status."""
logger.info("--- Inspecting Celery Queues in Redis ---")
for queue_name in queue_names:
try:
q_len = redis_client.llen(queue_name)
logger.info(f"Queue '{queue_name}': Length = {q_len}")
if q_len > 0:
logger.info(f"Showing up to 10 tasks in '{queue_name}':")
# Fetch up to 10 items from the start of the list (queue)
items_bytes = redis_client.lrange(queue_name, 0, 9)
for i, item_bytes in enumerate(items_bytes):
try:
# Celery tasks are JSON-encoded strings
task_data = json.loads(item_bytes.decode('utf-8'))
# Pretty print for readability in logs
pretty_task_data = json.dumps(task_data, indent=2)
logger.info(f" Task {i+1}:\n{pretty_task_data}")
except (json.JSONDecodeError, UnicodeDecodeError) as e:
logger.warning(f" Task {i+1}: Could not decode/parse task data. Error: {e}. Raw: {item_bytes!r}")
except Exception as e:
logger.error(f"Failed to inspect queue '{queue_name}': {e}", exc_info=True)
logger.info("--- End of Queue Inspection ---")
# --- Main Orchestration Callable ---
def orchestrate_workers_ignition_callable(**context):
"""
Main orchestration logic. Triggers a specified number of dispatcher DAGs
to initiate self-sustaining processing loops.
"""
params = context['params']
ti = context['task_instance']
logger.info(f"Orchestrator task '{ti.task_id}' running on queue '{ti.queue}'.")
logger.info("Starting dispatcher ignition sequence.")
dispatcher_dag_id = 'ytdlp_ops_v01_dispatcher'
worker_queue = 'queue-dl'
app_queue_name = params['queue_name']
logger.info(f"Running in v1 (monolithic) mode. Dispatcher DAG: '{dispatcher_dag_id}', Worker Queue: '{worker_queue}'")
dag_model = DagModel.get_dagmodel(dispatcher_dag_id)
if dag_model and dag_model.is_paused:
logger.warning(f"Dispatcher DAG '{dispatcher_dag_id}' is paused. Skipping dispatcher ignition.")
raise AirflowSkipException(f"Dispatcher DAG '{dispatcher_dag_id}' is paused.")
total_workers = int(params['total_workers'])
workers_per_bunch = int(params['workers_per_bunch'])
# --- Input Validation ---
if total_workers <= 0:
logger.warning(f"'total_workers' is {total_workers}. No workers will be started. Skipping ignition.")
raise AirflowSkipException(f"No workers to start (total_workers={total_workers}).")
if workers_per_bunch <= 0:
logger.error(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}. Aborting.")
raise AirflowException(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}.")
# --- End Input Validation ---
worker_delay = int(params['delay_between_workers_s'])
bunch_delay = int(params['delay_between_bunches_s'])
# Create a list of worker numbers to trigger
worker_indices = list(range(total_workers))
bunches = [worker_indices[i:i + workers_per_bunch] for i in range(0, len(worker_indices), workers_per_bunch)]
# --- Inspect Queues before starting ---
try:
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
redis_client = _get_redis_client(redis_conn_id)
# First, check the application queue for work
app_queue_len = _check_application_queue(redis_client, app_queue_name)
if params.get('skip_if_queue_empty') and app_queue_len == 0:
logger.info("'skip_if_queue_empty' is True and application queue is empty. Skipping worker ignition.")
raise AirflowSkipException("Application work queue is empty.")
# Then, inspect the target Celery queue for debugging
_inspect_celery_queues(redis_client, [worker_queue])
except AirflowSkipException:
raise # Re-raise to let Airflow handle the skip
except Exception as e:
logger.error(f"Could not inspect queues due to an error: {e}. Continuing with ignition sequence.")
# --- End of Inspection ---
logger.info(f"Plan: Triggering {total_workers} total dispatcher runs in {len(bunches)} bunches. Each run will attempt to process one URL.")
dag_run_id = context['dag_run'].run_id
total_triggered = 0
# --- Generate a consistent timestamped prefix for this orchestrator run ---
# This ensures all workers spawned from this run use the same set of accounts.
final_account_pool_prefix = params['account_pool']
if params.get('prepend_client_to_account') and params.get('account_pool_size') is not None:
clients_str = params.get('clients', '')
primary_client = clients_str.split(',')[0].strip() if clients_str else 'unknown'
# Use a timestamp from the orchestrator's run for consistency
timestamp = datetime.now().strftime('%Y%m%d%H%M%S')
final_account_pool_prefix = f"{params['account_pool']}_{timestamp}_{primary_client}"
logger.info(f"Generated consistent account prefix for this run: '{final_account_pool_prefix}'")
for i, bunch in enumerate(bunches):
logger.info(f"--- Triggering Bunch {i+1}/{len(bunches)} (contains {len(bunch)} dispatcher(s)) ---")
for j, _ in enumerate(bunch):
# Create a unique run_id for each dispatcher run
run_id = f"dispatched_{dag_run_id}_{total_triggered}"
# Pass all orchestrator params to the dispatcher, which will then pass them to the worker.
conf_to_pass = {p: params[p] for p in params}
# Override account_pool with the generated prefix
conf_to_pass['account_pool'] = final_account_pool_prefix
logger.info(f"Triggering dispatcher {j+1}/{len(bunch)} in bunch {i+1} (run {total_triggered + 1}/{total_workers}) (Run ID: {run_id})")
logger.debug(f"Full conf for dispatcher run {run_id}: {conf_to_pass}")
trigger_dag(
dag_id=dispatcher_dag_id,
run_id=run_id,
conf=conf_to_pass,
replace_microseconds=False
)
total_triggered += 1
# Delay between dispatches in a bunch
if j < len(bunch) - 1:
logger.info(f"Waiting {worker_delay}s before next dispatcher in bunch...")
time.sleep(worker_delay)
# Delay between bunches
if i < len(bunches) - 1:
logger.info(f"--- Bunch {i+1} triggered. Waiting {bunch_delay}s before next bunch... ---")
time.sleep(bunch_delay)
logger.info(f"--- Ignition sequence complete. Total dispatcher runs triggered: {total_triggered}. ---")
# --- Final Queue Inspection ---
final_check_delay = 30 # seconds
logger.info(f"Waiting {final_check_delay}s for a final queue status check to see if workers picked up tasks...")
time.sleep(final_check_delay)
try:
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
redis_client = _get_redis_client(redis_conn_id)
# Log connection details for debugging broker mismatch issues
conn_kwargs = redis_client.connection_pool.connection_kwargs
logger.info(f"Final check using Redis connection '{redis_conn_id}': "
f"host={conn_kwargs.get('host')}, "
f"port={conn_kwargs.get('port')}, "
f"db={conn_kwargs.get('db')}")
_inspect_celery_queues(redis_client, [worker_queue])
logger.info("Final queue inspection complete. If queues are not empty, workers have not picked up tasks yet. "
"If queues are empty, workers have started processing.")
except Exception as e:
logger.error(f"Could not perform final queue inspection: {e}. This does not affect worker ignition.")
# =============================================================================
# DAG Definition
# =============================================================================
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
'start_date': days_ago(1),
}
with DAG(
dag_id='ytdlp_ops_v01_orchestrator',
default_args=default_args,
schedule=None, # This DAG runs only when triggered.
max_active_runs=1, # Only one ignition process should run at a time.
catchup=False,
description='Ignition system for ytdlp_ops_v01_dispatcher DAGs. Starts self-sustaining worker loops via dispatchers.',
doc_md="""
### YT-DLP v1 (Monolithic) Worker Ignition System
This DAG acts as an "ignition system" to start one or more self-sustaining worker loops for the **v1 monolithic worker**.
It does **not** process URLs itself. Its only job is to trigger a specified number of `ytdlp_ops_v01_dispatcher` DAGs,
which in turn pull URLs and trigger `ytdlp_ops_v01_worker_per_url` with worker affinity.
#### How it Works:
1. **Manual Trigger:** You manually trigger this DAG with parameters defining how many dispatcher loops to start (`total_workers`), in what configuration (`workers_per_bunch`, delays).
2. **Ignition:** The orchestrator triggers the initial set of dispatcher DAGs in a "fire-and-forget" manner, passing all its configuration parameters to them.
3. **Completion:** Once all initial dispatchers have been triggered, the orchestrator's job is complete.
The dispatchers then take over, each pulling a URL, determining affinity, and triggering a worker DAG.
#### Client Selection (`clients` parameter):
The `clients` parameter determines which YouTube client persona is used for token generation. Different clients have different capabilities and requirements.
**Supported Clients:**
| Client | Visitor ID | Player poToken | GVS poToken | Cookies Support | Notes |
| ---------------- | ------------ | -------------- | ------------ | --------------- | ------------------------------------------------------------------ |
| `tv` | Required | Not Required | Not Required | Supported | All formats may have DRM if you request too much. |
| `web_safari` | Required | Required | Required* | Supported | *Provides HLS (m3u8) formats which may not require a GVS token. |
| `mweb` | Required | Required | Required | Supported | |
| `web_camoufox` | Required | Required | Required | Supported | Camoufox variant of `web`. |
**Untested / Not Recommended Clients:**
| Client | Visitor ID | Player poToken | GVS poToken | Cookies Support | Notes |
| ---------------- | ------------ | -------------- | ------------ | --------------- | ------------------------------------------------------------------ |
| `web` | Required | Required | Required | Supported | Only SABR formats available. |
| `tv_simply` | Required | Not Required | Not Required | Not Supported | |
| `tv_embedded` | Required | Not Required | Not Required | Supported | Requires account cookies for most videos. |
| `web_embedded` | Required | Not Required | Not Required | Supported | Only for embeddable videos. |
| `web_music` | Required | Required | Required | Supported | |
| `web_creator` | Required | Required | Required | Supported | Requires account cookies. |
| `android` | Required | Required | Required | Not Supported | |
| `android_vr` | Required | Not Required | Not Required | Not Supported | YouTube Kids videos are not available. |
| `ios` | Required | Required | Required | Not Supported | |
Other `_camoufox` variants are also available but untested.
""",
tags=['ytdlp', 'mgmt', 'master'],
params={
# --- Ignition Control Parameters ---
'total_workers': Param(DEFAULT_TOTAL_WORKERS, type="integer", description="Total number of dispatcher loops to start."),
'workers_per_bunch': Param(DEFAULT_WORKERS_PER_BUNCH, type="integer", description="Number of dispatchers to start in each bunch."),
'delay_between_workers_s': Param(DEFAULT_WORKER_DELAY_S, type="integer", description="Delay in seconds between starting each dispatcher within a bunch."),
'delay_between_bunches_s': Param(DEFAULT_BUNCH_DELAY_S, type="integer", description="Delay in seconds between starting each bunch."),
'skip_if_queue_empty': Param(False, type="boolean", title="[Ignition Control] Skip if Queue Empty", description="If True, the orchestrator will not start any dispatchers if the application's work queue is empty."),
# --- Worker Passthrough Parameters ---
'on_auth_failure': Param(
'proceed_loop_under_manual_inspection',
type="string",
enum=['stop_loop', 'retry_with_new_account', 'retry_without_ban', 'proceed_loop_under_manual_inspection'],
title="[Worker Param] On Authentication Failure Policy",
description="Policy for a worker when a bannable authentication error occurs. "
"'stop_loop': Ban the account, mark URL as failed, and stop the worker's loop. "
"'retry_with_new_account': (Default) Ban the failed account, retry ONCE with a new account. If retry fails, ban the second account and stop."
"'retry_without_ban': If a connection error (e.g. SOCKS timeout) occurs, retry with a new account but do NOT ban the first account/proxy. If retry fails, stop the loop without banning."
"'proceed_loop_under_manual_inspection': **BEWARE: MANUAL SUPERVISION REQUIRED.** Marks the URL as failed but continues the processing loop. Use this only when you can manually intervene."
),
'on_download_failure': Param(
'proceed_loop',
type="string",
enum=['stop_loop', 'proceed_loop', 'retry_with_new_token'],
title="[Worker Param] On Download Failure Policy",
description="Policy for a worker when a download or probe error occurs. "
"'stop_loop': Mark URL as failed and stop the worker's loop. "
"'proceed_loop': (Default) Mark URL as failed but continue the processing loop with a new URL. "
"'retry_with_new_token': Attempt to get a new token with a new account and retry the download once. If it fails again, proceed loop."
),
'request_params_json': Param('{}', type="string", title="[Worker Param] Request Params JSON", description="JSON string with per-request parameters to override server defaults. Can be a full JSON object or comma-separated key=value pairs (e.g., 'session_params.location=DE,ytdlp_params.skip_cache=true')."),
'language_code': Param('en-US', type="string", title="[Worker Param] Language Code", description="The language code (e.g., 'en-US', 'de-DE') to use for the YouTube request headers."),
'queue_name': Param(DEFAULT_QUEUE_NAME, type="string", description="[Worker Param] Base name for Redis queues."),
'redis_conn_id': Param(DEFAULT_REDIS_CONN_ID, type="string", description="[Worker Param] Airflow Redis connection ID."),
'clients': Param(
'tv_simply',
type="string",
title="[Worker Param] Clients",
description="[Worker Param] Comma-separated list of clients for token generation. Full list: web, web_safari, web_embedded, web_music, web_creator, mweb, web_camoufox, web_safari_camoufox, web_embedded_camoufox, web_music_camoufox, web_creator_camoufox, mweb_camoufox, android, android_music, android_creator, android_vr, ios, ios_music, ios_creator, tv, tv_simply, tv_embedded. See DAG documentation for details."
),
'account_pool': Param('ytdlp_account', type="string", description="[Worker Param] Account pool prefix or comma-separated list."),
'account_pool_size': Param(10, type=["integer", "null"], description="[Worker Param] If using a prefix for 'account_pool', this specifies the number of accounts to generate (e.g., 10 for 'prefix_01' through 'prefix_10'). Required when using a prefix."),
'prepend_client_to_account': Param(True, type="boolean", title="[Worker Param] Prepend Client to Account", description="If True, prepends client and timestamp to account names in prefix mode. Format: prefix_YYYYMMDDHHMMSS_client_XX."),
'service_ip': Param(DEFAULT_YT_AUTH_SERVICE_IP, type="string", description="[Worker Param] IP of the ytdlp-ops-server. Default is from Airflow variable YT_AUTH_SERVICE_IP or hardcoded."),
'service_port': Param(DEFAULT_YT_AUTH_SERVICE_PORT, type="integer", description="[Worker Param] Port of the Envoy load balancer. Default is from Airflow variable YT_AUTH_SERVICE_PORT or hardcoded."),
'machine_id': Param("ytdlp-ops-airflow-service", type="string", description="[Worker Param] Identifier for the client machine."),
'assigned_proxy_url': Param(None, type=["string", "null"], title="[Worker Param] Assigned Proxy URL", description="A specific proxy URL to use for the request, overriding the server's proxy pool logic."),
'auto_create_new_accounts_on_exhaustion': Param(True, type="boolean", description="[Worker Param] If True and all accounts in a prefix-based pool are exhausted, create a new one automatically."),
# --- Download Control Parameters ---
'delay_between_formats_s': Param(15, type="integer", title="[Worker Param] Delay Between Formats (s)", description="Delay in seconds between downloading each format when multiple formats are specified. A 22s wait may be effective for batch downloads, while 6-12s may suffice if cookies are refreshed regularly."),
'yt_dlp_test_mode': Param(False, type="boolean", title="[Worker Param] yt-dlp Test Mode", description="If True, runs yt-dlp with --test flag (dry run without downloading)."),
'skip_probe': Param(True, type="boolean", title="[Worker Param] Skip Probe", description="If True, skips the ffmpeg probe of downloaded files."),
'yt_dlp_cleanup_mode': Param(False, type="boolean", title="[Worker Param] yt-dlp Cleanup Mode", description="If True, creates a .empty file and deletes the original media file after successful download and probe."),
'socket_timeout': Param(15, type="integer", title="[Worker Param] Socket Timeout", description="Timeout in seconds for socket operations."),
'download_format': Param(
'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best',
type="string",
title="[Worker Param] Download Format",
description="Custom yt-dlp format string. Common presets: [1] 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best' (Default, best quality MP4). [2] '18-dashy/18,140-dashy/140,133-dashy/134-dashy/136-dashy/137-dashy/250-dashy/298-dashy/299-dashy' (Legacy formats). [3] '299-dashy/298-dashy/250-dashy/137-dashy/136-dashy/135-dashy/134-dashy/133-dashy' (High-framerate formats)."
),
'downloader': Param(
'cli',
type="string",
enum=['py', 'aria-rpc', 'cli'],
title="[Worker Param] Download Tool",
description="Choose the download tool to use: 'py' (native python, recommended), 'aria-rpc' (send to aria2c daemon), 'cli' (legacy yt-dlp wrapper)."
),
'aria_host': Param('172.17.0.1', type="string", title="[Worker Param] Aria2c Host", description="For 'aria-rpc' downloader: Host of the aria2c RPC server. Can be set via Airflow Variable 'YTDLP_ARIA_HOST'."),
'aria_port': Param(6800, type="integer", title="[Worker Param] Aria2c Port", description="For 'aria-rpc' downloader: Port of the aria2c RPC server. Can be set via Airflow Variable 'YTDLP_ARIA_PORT'."),
'aria_secret': Param('SQGCQPLVFQIASMPNPOJYLVGJYLMIDIXDXAIXOTX', type="string", title="[Worker Param] Aria2c Secret", description="For 'aria-rpc' downloader: Secret token. Can be set via Airflow Variable 'YTDLP_ARIA_SECRET'."),
'yt_dlp_extra_args': Param(
'',
type=["string", "null"],
title="[Worker Param] Extra yt-dlp arguments",
),
}
) as dag:
orchestrate_task = PythonOperator(
task_id='start_worker_loops',
python_callable=orchestrate_workers_ignition_callable,
queue='queue-mgmt',
)
orchestrate_task.doc_md = """
### Start Worker Loops
This is the main task that executes the ignition policy.
- It triggers `ytdlp_ops_dispatcher` DAGs according to the batch settings.
- It passes all its parameters down to the dispatchers, which will use them to trigger workers.
"""

File diff suppressed because it is too large Load Diff

View File

@ -1,98 +0,0 @@
# -*- coding: utf-8 -*-
"""
DAG to dispatch work to ytdlp_ops_worker_per_url_auth DAGs.
It pulls a URL from Redis and triggers an auth worker with a pinned queue.
"""
from __future__ import annotations
import logging
import os
import socket
from datetime import timedelta
from airflow.decorators import task
from airflow.exceptions import AirflowSkipException
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.api.common.trigger_dag import trigger_dag
from airflow.utils.dates import days_ago
from utils.redis_utils import _get_redis_client
logger = logging.getLogger(__name__)
DEFAULT_QUEUE_NAME = 'queue2_auth'
DEFAULT_REDIS_CONN_ID = 'redis_default'
@task(queue='queue-auth')
def dispatch_url_to_auth_worker(**context):
"""
Pulls one URL from Redis, determines the current worker's dedicated queue,
and triggers the auth worker DAG to process the URL on that specific queue.
"""
ti = context['task_instance']
logger.info(f"Auth Dispatcher task '{ti.task_id}' running on queue '{ti.queue}'.")
# --- Check for worker pause lock file ---
lock_file_path = '/opt/airflow/inputfiles/AIRFLOW.PREVENT_URL_PULL.lockfile'
hostname = socket.gethostname()
if os.path.exists(lock_file_path):
logger.info(f"Worker '{hostname}' is paused. Lock file found at '{lock_file_path}'. Skipping URL pull.")
raise AirflowSkipException(f"Worker '{hostname}' is paused.")
else:
logger.info(f"Worker '{hostname}' is active (no lock file found at '{lock_file_path}'). Proceeding to pull URL.")
params = context['params']
redis_conn_id = params['redis_conn_id']
queue_name = params['queue_name']
inbox_queue = f"{queue_name}_inbox"
logger.info(f"Attempting to pull one URL from Redis queue '{inbox_queue}'...")
client = _get_redis_client(redis_conn_id)
url_bytes = client.lpop(inbox_queue)
if not url_bytes:
logger.info("Redis auth inbox queue is empty. No work to dispatch. Skipping task.")
raise AirflowSkipException("Redis auth inbox queue is empty. No work to dispatch.")
url_to_process = url_bytes.decode('utf-8')
logger.info(f"Pulled URL '{url_to_process}' from the queue.")
# Determine the worker-specific queue for affinity
hostname = socket.gethostname()
worker_queue = f"queue-auth-{hostname}"
logger.info(f"Running on worker '{hostname}'. Dispatching job to its dedicated queue '{worker_queue}'.")
conf_to_pass = {**params, 'url_to_process': url_to_process, 'worker_queue': worker_queue}
run_id = f"worker_run_auth_{context['dag_run'].run_id}_{context['ts_nodash']}_q_{worker_queue}"
logger.info(f"Triggering 'ytdlp_ops_v02_worker_per_url_auth' with run_id '{run_id}'")
trigger_dag(
dag_id='ytdlp_ops_v02_worker_per_url_auth',
run_id=run_id,
conf=conf_to_pass,
replace_microseconds=False
)
with DAG(
dag_id='ytdlp_ops_v02_dispatcher_auth',
default_args={'owner': 'airflow', 'retries': 0},
schedule=None,
start_date=days_ago(1),
catchup=False,
tags=['ytdlp', 'worker', 'dispatcher', 'auth'],
is_paused_upon_creation=True,
doc_md="""
### YT-DLP Auth URL Dispatcher
This DAG dispatches a single URL to an auth worker with a pinned queue.
It pulls from the `queue2_auth_inbox` Redis queue and triggers the `ytdlp_ops_v02_worker_per_url_auth` DAG.
""",
render_template_as_native_obj=True,
params={
'queue_name': Param(DEFAULT_QUEUE_NAME, type='string', title='Queue Name', description='The base name of the Redis queue to pull URLs from.'),
'redis_conn_id': Param(DEFAULT_REDIS_CONN_ID, type='string', title='Redis Connection ID'),
},
) as dag:
dispatch_url_to_auth_worker()

View File

@ -1,89 +0,0 @@
# -*- coding: utf-8 -*-
"""
DAG to dispatch download jobs to ytdlp_ops_worker_per_url_dl DAGs.
It pulls a job payload from Redis and triggers a download worker.
"""
from __future__ import annotations
import logging
import os
import socket
from datetime import timedelta
from airflow.decorators import task
from airflow.exceptions import AirflowSkipException
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.api.common.trigger_dag import trigger_dag
from airflow.utils.dates import days_ago
from utils.redis_utils import _get_redis_client
logger = logging.getLogger(__name__)
DEFAULT_QUEUE_NAME = 'queue2_dl'
DEFAULT_REDIS_CONN_ID = 'redis_default'
@task(queue='queue-dl')
def dispatch_job_to_dl_worker(**context):
"""
Pulls one job payload from Redis, determines the current worker's dedicated queue,
and triggers the download worker DAG to process the job on that specific queue.
"""
ti = context['task_instance']
logger.info(f"Download Dispatcher task '{ti.task_id}' running on queue '{ti.queue}'.")
params = context['params']
redis_conn_id = params['redis_conn_id']
queue_name = params['queue_name']
inbox_queue = f"{queue_name}_inbox"
logger.info(f"Attempting to pull one job from Redis queue '{inbox_queue}'...")
client = _get_redis_client(redis_conn_id)
job_bytes = client.lpop(inbox_queue)
if not job_bytes:
logger.info("Redis download inbox queue is empty. No work to dispatch. Skipping task.")
raise AirflowSkipException("Redis download inbox queue is empty. No work to dispatch.")
job_data_str = job_bytes.decode('utf-8')
logger.info(f"Pulled job from the queue.")
# Determine the worker-specific queue for affinity
hostname = socket.gethostname()
worker_queue = f"queue-dl-{hostname}"
logger.info(f"Running on worker '{hostname}'. Dispatching job to its dedicated queue '{worker_queue}'.")
conf_to_pass = {**params, 'job_data': job_data_str, 'worker_queue': worker_queue}
run_id = f"worker_run_dl_{context['dag_run'].run_id}_{context['ts_nodash']}_q_{worker_queue}"
logger.info(f"Triggering 'ytdlp_ops_v02_worker_per_url_dl' with run_id '{run_id}'")
trigger_dag(
dag_id='ytdlp_ops_v02_worker_per_url_dl',
run_id=run_id,
conf=conf_to_pass,
replace_microseconds=False
)
with DAG(
dag_id='ytdlp_ops_v02_dispatcher_dl',
default_args={'owner': 'airflow', 'retries': 0},
schedule=None,
start_date=days_ago(1),
catchup=False,
tags=['ytdlp', 'worker', 'dispatcher', 'download'],
is_paused_upon_creation=True,
doc_md="""
### YT-DLP Download Job Dispatcher
This DAG dispatches a single download job to a download worker with a pinned queue.
It pulls a JSON payload from the `queue2_dl_inbox` Redis queue and triggers the `ytdlp_ops_v02_worker_per_url_dl` DAG.
""",
render_template_as_native_obj=True,
params={
'queue_name': Param(DEFAULT_QUEUE_NAME, type='string', title='Queue Name', description='The base name of the Redis queue to pull job payloads from.'),
'redis_conn_id': Param(DEFAULT_REDIS_CONN_ID, type='string', title='Redis Connection ID'),
},
) as dag:
dispatch_job_to_dl_worker()

View File

@ -1,305 +0,0 @@
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2024 rl <rl@rlmbp>
#
# Distributed under terms of the MIT license.
"""
DAG to orchestrate ytdlp_ops_dispatcher_v2_auth DAG runs based on a defined policy.
"""
from airflow import DAG
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.operators.python import PythonOperator
from airflow.models.param import Param
from airflow.models.variable import Variable
from airflow.utils.dates import days_ago
from airflow.api.common.trigger_dag import trigger_dag
from airflow.models.dagrun import DagRun
from airflow.models.dag import DagModel
from datetime import timedelta, datetime
import logging
import random
import time
import json
# Import utility functions
from utils.redis_utils import _get_redis_client
# Import Thrift modules for proxy status check
from pangramia.yt.tokens_ops import YTTokenOpService
from thrift.protocol import TBinaryProtocol
from thrift.transport import TSocket, TTransport
# Configure logging
logger = logging.getLogger(__name__)
# Default settings
DEFAULT_REDIS_CONN_ID = 'redis_default'
DEFAULT_TOTAL_WORKERS = 8
DEFAULT_WORKERS_PER_BUNCH = 1
DEFAULT_WORKER_DELAY_S = 1
DEFAULT_BUNCH_DELAY_S = 1
DEFAULT_YT_AUTH_SERVICE_IP = Variable.get("YT_AUTH_SERVICE_IP", default_var="172.17.0.1")
DEFAULT_YT_AUTH_SERVICE_PORT = Variable.get("YT_AUTH_SERVICE_PORT", default_var=9080)
# --- Helper Functions ---
def _check_application_queue(redis_client, queue_base_name: str) -> int:
"""Checks and logs the length of the application's inbox queue."""
inbox_queue_name = f"{queue_base_name}_inbox"
logger.info(f"--- Checking Application Work Queue ---")
try:
q_len = redis_client.llen(inbox_queue_name)
logger.info(f"Application work queue '{inbox_queue_name}' has {q_len} item(s).")
return q_len
except Exception as e:
logger.error(f"Failed to check application queue '{inbox_queue_name}': {e}", exc_info=True)
return -1 # Indicate an error
def _inspect_celery_queues(redis_client, queue_names: list):
"""Inspects Celery queues in Redis and logs their status."""
logger.info("--- Inspecting Celery Queues in Redis ---")
for queue_name in queue_names:
try:
q_len = redis_client.llen(queue_name)
logger.info(f"Queue '{queue_name}': Length = {q_len}")
if q_len > 0:
logger.info(f"Showing up to 10 tasks in '{queue_name}':")
# Fetch up to 10 items from the start of the list (queue)
items_bytes = redis_client.lrange(queue_name, 0, 9)
for i, item_bytes in enumerate(items_bytes):
try:
# Celery tasks are JSON-encoded strings
task_data = json.loads(item_bytes.decode('utf-8'))
# Pretty print for readability in logs
pretty_task_data = json.dumps(task_data, indent=2)
logger.info(f" Task {i+1}:\n{pretty_task_data}")
except (json.JSONDecodeError, UnicodeDecodeError) as e:
logger.warning(f" Task {i+1}: Could not decode/parse task data. Error: {e}. Raw: {item_bytes!r}")
except Exception as e:
logger.error(f"Failed to inspect queue '{queue_name}': {e}", exc_info=True)
logger.info("--- End of Queue Inspection ---")
# --- Main Orchestration Callable ---
def orchestrate_workers_ignition_callable(**context):
"""
Main orchestration logic. Triggers a specified number of dispatcher DAGs
to initiate self-sustaining processing loops.
"""
params = context['params']
ti = context['task_instance']
logger.info(f"Orchestrator task '{ti.task_id}' running on queue '{ti.queue}'.")
logger.info("Starting dispatcher ignition sequence.")
dispatcher_dag_id = 'ytdlp_ops_v02_dispatcher_auth'
worker_queue = 'queue-auth'
app_queue_name = 'queue2_auth'
logger.info(f"Running in v2 (auth) mode. Dispatcher DAG: '{dispatcher_dag_id}', Worker Queue: '{worker_queue}'")
dag_model = DagModel.get_dagmodel(dispatcher_dag_id)
if dag_model and dag_model.is_paused:
logger.warning(f"Dispatcher DAG '{dispatcher_dag_id}' is paused. Skipping dispatcher ignition.")
raise AirflowSkipException(f"Dispatcher DAG '{dispatcher_dag_id}' is paused.")
total_workers = int(params['total_workers'])
workers_per_bunch = int(params['workers_per_bunch'])
# --- Input Validation ---
if total_workers <= 0:
logger.warning(f"'total_workers' is {total_workers}. No workers will be started. Skipping ignition.")
raise AirflowSkipException(f"No workers to start (total_workers={total_workers}).")
if workers_per_bunch <= 0:
logger.error(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}. Aborting.")
raise AirflowException(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}.")
# --- End Input Validation ---
worker_delay = int(params['delay_between_workers_s'])
bunch_delay = int(params['delay_between_bunches_s'])
# Create a list of worker numbers to trigger
worker_indices = list(range(total_workers))
bunches = [worker_indices[i:i + workers_per_bunch] for i in range(0, len(worker_indices), workers_per_bunch)]
# --- Inspect Queues before starting ---
try:
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
redis_client = _get_redis_client(redis_conn_id)
# First, check the application queue for work
app_queue_len = _check_application_queue(redis_client, app_queue_name)
if params.get('skip_if_queue_empty') and app_queue_len == 0:
logger.info("'skip_if_queue_empty' is True and application queue is empty. Skipping worker ignition.")
raise AirflowSkipException("Application work queue is empty.")
# Then, inspect the target Celery queue for debugging
_inspect_celery_queues(redis_client, [worker_queue])
except AirflowSkipException:
raise # Re-raise to let Airflow handle the skip
except Exception as e:
logger.error(f"Could not inspect queues due to an error: {e}. Continuing with ignition sequence.")
# --- End of Inspection ---
logger.info(f"Plan: Triggering {total_workers} total dispatcher runs in {len(bunches)} bunches. Each run will attempt to process one URL.")
dag_run_id = context['dag_run'].run_id
total_triggered = 0
# --- Generate a consistent timestamped prefix for this orchestrator run ---
# This ensures all workers spawned from this run use the same set of accounts.
final_account_pool_prefix = params['account_pool']
if params.get('prepend_client_to_account') and params.get('account_pool_size') is not None:
clients_str = params.get('clients', '')
primary_client = clients_str.split(',')[0].strip() if clients_str else 'unknown'
# Use a timestamp from the orchestrator's run for consistency
timestamp = datetime.now().strftime('%Y%m%d%H%M%S')
final_account_pool_prefix = f"{params['account_pool']}_{timestamp}_{primary_client}"
logger.info(f"Generated consistent account prefix for this run: '{final_account_pool_prefix}'")
for i, bunch in enumerate(bunches):
logger.info(f"--- Triggering Bunch {i+1}/{len(bunches)} (contains {len(bunch)} dispatcher(s)) ---")
for j, _ in enumerate(bunch):
# Create a unique run_id for each dispatcher run
run_id = f"dispatched_{dag_run_id}_{total_triggered}"
# Pass all orchestrator params to the dispatcher, which will then pass them to the worker.
conf_to_pass = {p: params[p] for p in params}
# Override account_pool with the generated prefix
conf_to_pass['account_pool'] = final_account_pool_prefix
logger.info(f"Triggering dispatcher {j+1}/{len(bunch)} in bunch {i+1} (run {total_triggered + 1}/{total_workers}) (Run ID: {run_id})")
logger.debug(f"Full conf for dispatcher run {run_id}: {conf_to_pass}")
trigger_dag(
dag_id=dispatcher_dag_id,
run_id=run_id,
conf=conf_to_pass,
replace_microseconds=False
)
total_triggered += 1
# Delay between dispatches in a bunch
if j < len(bunch) - 1:
logger.info(f"Waiting {worker_delay}s before next dispatcher in bunch...")
time.sleep(worker_delay)
# Delay between bunches
if i < len(bunches) - 1:
logger.info(f"--- Bunch {i+1} triggered. Waiting {bunch_delay}s before next bunch... ---")
time.sleep(bunch_delay)
logger.info(f"--- Ignition sequence complete. Total dispatcher runs triggered: {total_triggered}. ---")
# --- Final Queue Inspection ---
final_check_delay = 30 # seconds
logger.info(f"Waiting {final_check_delay}s for a final queue status check to see if workers picked up tasks...")
time.sleep(final_check_delay)
try:
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
redis_client = _get_redis_client(redis_conn_id)
# Log connection details for debugging broker mismatch issues
conn_kwargs = redis_client.connection_pool.connection_kwargs
logger.info(f"Final check using Redis connection '{redis_conn_id}': "
f"host={conn_kwargs.get('host')}, "
f"port={conn_kwargs.get('port')}, "
f"db={conn_kwargs.get('db')}")
_inspect_celery_queues(redis_client, [worker_queue])
logger.info("Final queue inspection complete. If queues are not empty, workers have not picked up tasks yet. "
"If queues are empty, workers have started processing.")
except Exception as e:
logger.error(f"Could not perform final queue inspection: {e}. This does not affect worker ignition.")
# =============================================================================
# DAG Definition
# =============================================================================
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
'start_date': days_ago(1),
}
with DAG(
dag_id='ytdlp_ops_v02_orchestrator_auth',
default_args=default_args,
schedule=None, # This DAG runs only when triggered.
max_active_runs=1, # Only one ignition process should run at a time.
catchup=False,
description='Ignition system for ytdlp_ops_v02_dispatcher_auth DAGs.',
doc_md="""
### YT-DLP v2 (Auth) Worker Ignition System
This DAG acts as an "ignition system" to start one or more self-sustaining worker loops for the **v2 authentication worker**.
It triggers `ytdlp_ops_v02_dispatcher_auth` DAGs, which pull raw URLs from `queue2_auth_inbox` and trigger `ytdlp_ops_v02_worker_per_url_auth` workers.
""",
tags=['ytdlp', 'mgmt', 'master'],
params={
# --- Ignition Control Parameters ---
'total_workers': Param(DEFAULT_TOTAL_WORKERS, type="integer", description="Total number of dispatcher loops to start."),
'workers_per_bunch': Param(DEFAULT_WORKERS_PER_BUNCH, type="integer", description="Number of dispatchers to start in each bunch."),
'delay_between_workers_s': Param(DEFAULT_WORKER_DELAY_S, type="integer", description="Delay in seconds between starting each dispatcher within a bunch."),
'delay_between_bunches_s': Param(DEFAULT_BUNCH_DELAY_S, type="integer", description="Delay in seconds between starting each bunch."),
'skip_if_queue_empty': Param(False, type="boolean", title="[Ignition Control] Skip if Queue Empty", description="If True, the orchestrator will not start any dispatchers if the application's work queue is empty."),
# --- Worker Passthrough Parameters ---
'on_bannable_failure': Param(
'proceed_loop_under_manual_inspection',
type="string",
enum=['stop_loop', 'retry_with_new_account', 'retry_without_ban', 'retry_and_ban_account_only', 'retry_on_connection_error', 'proceed_loop_under_manual_inspection', 'stop_loop_on_auth_proceed_on_download_error'],
title="[Worker Param] On Bannable Failure Policy",
description="Policy for a worker when a bannable error occurs. "
"'stop_loop': Ban the account, mark URL as failed, and stop the worker's loop on any failure (auth or download). "
"'retry_with_new_account': Ban the failed account, retry ONCE with a new account. If retry fails, ban the second account and proxy, then stop."
"'retry_on_connection_error': If a connection error (e.g. SOCKS timeout) occurs, retry with a new account but do NOT ban the first account/proxy. If retry fails, stop the loop without banning."
"'proceed_loop_under_manual_inspection': **BEWARE: MANUAL SUPERVISION REQUIRED.** Marks the URL as failed but continues the processing loop. Use this only when you can manually intervene by pausing the dispatcher DAG or creating a lock file (`/opt/airflow/inputfiles/AIRFLOW.PREVENT_URL_PULL.lockfile`) to prevent a runaway failure loop."
"'stop_loop_on_auth_proceed_on_download_error': **(Default)** Stops the loop on an authentication/token error (like 'stop_loop'), but continues the loop on a download/probe error (like 'proceed...')."
),
'request_params_json': Param('{}', type="string", title="[Worker Param] Request Params JSON", description="JSON string with per-request parameters to override server defaults. Can be a full JSON object or comma-separated key=value pairs (e.g., 'session_params.location=DE,ytdlp_params.skip_cache=true')."),
'language_code': Param('en-US', type="string", title="[Worker Param] Language Code", description="The language code (e.g., 'en-US', 'de-DE') to use for the YouTube request headers."),
'redis_conn_id': Param(DEFAULT_REDIS_CONN_ID, type="string", description="[Worker Param] Airflow Redis connection ID."),
'clients': Param(
'tv_simply',
type="string",
title="[Worker Param] Clients",
description="[Worker Param] Comma-separated list of clients for token generation. Full list: web, web_safari, web_embedded, web_music, web_creator, mweb, web_camoufox, web_safari_camoufox, web_embedded_camoufox, web_music_camoufox, web_creator_camoufox, mweb_camoufox, android, android_music, android_creator, android_vr, ios, ios_music, ios_creator, tv, tv_simply, tv_embedded. See DAG documentation for details."
),
'account_pool': Param('ytdlp_account', type="string", description="[Worker Param] Account pool prefix or comma-separated list."),
'account_pool_size': Param(10, type=["integer", "null"], description="[Worker Param] If using a prefix for 'account_pool', this specifies the number of accounts to generate (e.g., 10 for 'prefix_01' through 'prefix_10'). Required when using a prefix."),
'prepend_client_to_account': Param(True, type="boolean", title="[Worker Param] Prepend Client to Account", description="If True, prepends client and timestamp to account names in prefix mode. Format: prefix_YYYYMMDDHHMMSS_client_XX."),
'service_ip': Param(DEFAULT_YT_AUTH_SERVICE_IP, type="string", description="[Worker Param] IP of the ytdlp-ops-server. Default is from Airflow variable YT_AUTH_SERVICE_IP or hardcoded."),
'service_port': Param(DEFAULT_YT_AUTH_SERVICE_PORT, type="integer", description="[Worker Param] Port of the Envoy load balancer. Default is from Airflow variable YT_AUTH_SERVICE_PORT or hardcoded."),
'machine_id': Param("ytdlp-ops-airflow-service", type="string", description="[Worker Param] Identifier for the client machine."),
'assigned_proxy_url': Param(None, type=["string", "null"], title="[Worker Param] Assigned Proxy URL", description="If provided, forces the token service to use this specific proxy for the request."),
'auto_create_new_accounts_on_exhaustion': Param(True, type="boolean", description="[Worker Param] If True and all accounts in a prefix-based pool are exhausted, create a new one automatically."),
}
) as dag:
orchestrate_task = PythonOperator(
task_id='start_worker_loops',
python_callable=orchestrate_workers_ignition_callable,
queue='queue-mgmt',
)
orchestrate_task.doc_md = """
### Start Worker Loops
This is the main task that executes the ignition policy.
- It triggers `ytdlp_ops_v02_dispatcher_auth` DAGs according to the batch settings.
- It passes all its parameters down to the dispatchers, which will use them to trigger workers.
"""

View File

@ -1,293 +0,0 @@
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2024 rl <rl@rlmbp>
#
# Distributed under terms of the MIT license.
"""
DAG to orchestrate ytdlp_ops_dispatcher_v2_dl DAG runs based on a defined policy.
"""
from airflow import DAG
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.operators.python import PythonOperator
from airflow.models.param import Param
from airflow.models.variable import Variable
from airflow.utils.dates import days_ago
from airflow.api.common.trigger_dag import trigger_dag
from airflow.models.dagrun import DagRun
from airflow.models.dag import DagModel
from datetime import timedelta
import logging
import random
import time
import json
# Import utility functions
from utils.redis_utils import _get_redis_client
# Import Thrift modules for proxy status check
from pangramia.yt.tokens_ops import YTTokenOpService
from thrift.protocol import TBinaryProtocol
from thrift.transport import TSocket, TTransport
# Configure logging
logger = logging.getLogger(__name__)
# Default settings
DEFAULT_REDIS_CONN_ID = 'redis_default'
DEFAULT_TOTAL_WORKERS = 8
DEFAULT_WORKERS_PER_BUNCH = 1
DEFAULT_WORKER_DELAY_S = 1
DEFAULT_BUNCH_DELAY_S = 1
# --- Helper Functions ---
def _check_application_queue(redis_client, queue_base_name: str) -> int:
"""Checks and logs the length of the application's inbox queue."""
inbox_queue_name = f"{queue_base_name}_inbox"
logger.info(f"--- Checking Application Work Queue ---")
try:
q_len = redis_client.llen(inbox_queue_name)
logger.info(f"Application work queue '{inbox_queue_name}' has {q_len} item(s).")
return q_len
except Exception as e:
logger.error(f"Failed to check application queue '{inbox_queue_name}': {e}", exc_info=True)
return -1 # Indicate an error
def _inspect_celery_queues(redis_client, queue_names: list):
"""Inspects Celery queues in Redis and logs their status."""
logger.info("--- Inspecting Celery Queues in Redis ---")
for queue_name in queue_names:
try:
q_len = redis_client.llen(queue_name)
logger.info(f"Queue '{queue_name}': Length = {q_len}")
if q_len > 0:
logger.info(f"Showing up to 10 tasks in '{queue_name}':")
# Fetch up to 10 items from the start of the list (queue)
items_bytes = redis_client.lrange(queue_name, 0, 9)
for i, item_bytes in enumerate(items_bytes):
try:
# Celery tasks are JSON-encoded strings
task_data = json.loads(item_bytes.decode('utf-8'))
# Pretty print for readability in logs
pretty_task_data = json.dumps(task_data, indent=2)
logger.info(f" Task {i+1}:\n{pretty_task_data}")
except (json.JSONDecodeError, UnicodeDecodeError) as e:
logger.warning(f" Task {i+1}: Could not decode/parse task data. Error: {e}. Raw: {item_bytes!r}")
except Exception as e:
logger.error(f"Failed to inspect queue '{queue_name}': {e}", exc_info=True)
logger.info("--- End of Queue Inspection ---")
# --- Main Orchestration Callable ---
def orchestrate_workers_ignition_callable(**context):
"""
Main orchestration logic. Triggers a specified number of dispatcher DAGs
to initiate self-sustaining processing loops.
"""
params = context['params']
ti = context['task_instance']
logger.info(f"Orchestrator task '{ti.task_id}' running on queue '{ti.queue}'.")
logger.info("Starting dispatcher ignition sequence.")
dispatcher_dag_id = 'ytdlp_ops_v02_dispatcher_dl'
worker_queue = 'queue-dl'
app_queue_name = 'queue2_dl'
logger.info(f"Running in v2 (download) mode. Dispatcher DAG: '{dispatcher_dag_id}', Worker Queue: '{worker_queue}'")
dag_model = DagModel.get_dagmodel(dispatcher_dag_id)
if dag_model and dag_model.is_paused:
logger.warning(f"Dispatcher DAG '{dispatcher_dag_id}' is paused. Skipping dispatcher ignition.")
raise AirflowSkipException(f"Dispatcher DAG '{dispatcher_dag_id}' is paused.")
total_workers = int(params['total_workers'])
workers_per_bunch = int(params['workers_per_bunch'])
# --- Input Validation ---
if total_workers <= 0:
logger.warning(f"'total_workers' is {total_workers}. No workers will be started. Skipping ignition.")
raise AirflowSkipException(f"No workers to start (total_workers={total_workers}).")
if workers_per_bunch <= 0:
logger.error(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}. Aborting.")
raise AirflowException(f"'workers_per_bunch' must be a positive integer, but got {workers_per_bunch}.")
# --- End Input Validation ---
worker_delay = int(params['delay_between_workers_s'])
bunch_delay = int(params['delay_between_bunches_s'])
# Create a list of worker numbers to trigger
worker_indices = list(range(total_workers))
bunches = [worker_indices[i:i + workers_per_bunch] for i in range(0, len(worker_indices), workers_per_bunch)]
# --- Inspect Queues before starting ---
try:
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
redis_client = _get_redis_client(redis_conn_id)
# First, check the application queue for work
app_queue_len = _check_application_queue(redis_client, app_queue_name)
if params.get('skip_if_queue_empty') and app_queue_len == 0:
logger.info("'skip_if_queue_empty' is True and application queue is empty. Skipping worker ignition.")
raise AirflowSkipException("Application work queue is empty.")
# Then, inspect the target Celery queue for debugging
_inspect_celery_queues(redis_client, [worker_queue])
except AirflowSkipException:
raise # Re-raise to let Airflow handle the skip
except Exception as e:
logger.error(f"Could not inspect queues due to an error: {e}. Continuing with ignition sequence.")
# --- End of Inspection ---
logger.info(f"Plan: Triggering {total_workers} total dispatcher runs in {len(bunches)} bunches. Each run will attempt to process one URL.")
dag_run_id = context['dag_run'].run_id
total_triggered = 0
for i, bunch in enumerate(bunches):
logger.info(f"--- Triggering Bunch {i+1}/{len(bunches)} (contains {len(bunch)} dispatcher(s)) ---")
for j, _ in enumerate(bunch):
# Create a unique run_id for each dispatcher run
run_id = f"dispatched_{dag_run_id}_{total_triggered}"
# Pass all orchestrator params to the dispatcher, which will then pass them to the worker.
conf_to_pass = {p: params[p] for p in params}
logger.info(f"Triggering dispatcher {j+1}/{len(bunch)} in bunch {i+1} (run {total_triggered + 1}/{total_workers}) (Run ID: {run_id})")
logger.debug(f"Full conf for dispatcher run {run_id}: {conf_to_pass}")
trigger_dag(
dag_id=dispatcher_dag_id,
run_id=run_id,
conf=conf_to_pass,
replace_microseconds=False
)
total_triggered += 1
# Delay between dispatches in a bunch
if j < len(bunch) - 1:
logger.info(f"Waiting {worker_delay}s before next dispatcher in bunch...")
time.sleep(worker_delay)
# Delay between bunches
if i < len(bunches) - 1:
logger.info(f"--- Bunch {i+1} triggered. Waiting {bunch_delay}s before next bunch... ---")
time.sleep(bunch_delay)
logger.info(f"--- Ignition sequence complete. Total dispatcher runs triggered: {total_triggered}. ---")
# --- Final Queue Inspection ---
final_check_delay = 30 # seconds
logger.info(f"Waiting {final_check_delay}s for a final queue status check to see if workers picked up tasks...")
time.sleep(final_check_delay)
try:
redis_conn_id = params.get('redis_conn_id', DEFAULT_REDIS_CONN_ID)
redis_client = _get_redis_client(redis_conn_id)
# Log connection details for debugging broker mismatch issues
conn_kwargs = redis_client.connection_pool.connection_kwargs
logger.info(f"Final check using Redis connection '{redis_conn_id}': "
f"host={conn_kwargs.get('host')}, "
f"port={conn_kwargs.get('port')}, "
f"db={conn_kwargs.get('db')}")
_inspect_celery_queues(redis_client, [worker_queue])
logger.info("Final queue inspection complete. If queues are not empty, workers have not picked up tasks yet. "
"If queues are empty, workers have started processing.")
except Exception as e:
logger.error(f"Could not perform final queue inspection: {e}. This does not affect worker ignition.")
# =============================================================================
# DAG Definition
# =============================================================================
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
'start_date': days_ago(1),
}
with DAG(
dag_id='ytdlp_ops_v02_orchestrator_dl',
default_args=default_args,
schedule=None, # This DAG runs only when triggered.
max_active_runs=1, # Only one ignition process should run at a time.
catchup=False,
description='Ignition system for ytdlp_ops_v02_dispatcher_dl DAGs.',
doc_md="""
### YT-DLP v2 (Download) Worker Ignition System
This DAG acts as an "ignition system" to start one or more self-sustaining worker loops for the **v2 download worker**.
It triggers `ytdlp_ops_v02_dispatcher_dl` DAGs, which pull job payloads from `queue2_dl_inbox` and trigger `ytdlp_ops_v02_worker_per_url_dl` workers.
""",
tags=['ytdlp', 'mgmt', 'master'],
params={
# --- Ignition Control Parameters ---
'total_workers': Param(DEFAULT_TOTAL_WORKERS, type="integer", description="Total number of dispatcher loops to start."),
'workers_per_bunch': Param(DEFAULT_WORKERS_PER_BUNCH, type="integer", description="Number of dispatchers to start in each bunch."),
'delay_between_workers_s': Param(DEFAULT_WORKER_DELAY_S, type="integer", description="Delay in seconds between starting each dispatcher within a bunch."),
'delay_between_bunches_s': Param(DEFAULT_BUNCH_DELAY_S, type="integer", description="Delay in seconds between starting each bunch."),
'skip_if_queue_empty': Param(False, type="boolean", title="[Ignition Control] Skip if Queue Empty", description="If True, the orchestrator will not start any dispatchers if the application's work queue is empty."),
'redis_conn_id': Param(DEFAULT_REDIS_CONN_ID, type="string", description="[Worker Param] Airflow Redis connection ID."),
'clients': Param('mweb,web_camoufox,tv', type="string", title="[Worker Param] Clients", description="Comma-separated list of clients for token generation. e.g. mweb,tv,web_camoufox"),
# --- Download Control Parameters ---
'delay_between_formats_s': Param(15, type="integer", title="[Worker Param] Delay Between Formats (s)", description="Delay in seconds between downloading each format when multiple formats are specified. A 22s wait may be effective for batch downloads, while 6-12s may suffice if cookies are refreshed regularly."),
'yt_dlp_test_mode': Param(False, type="boolean", title="[Worker Param] yt-dlp Test Mode", description="If True, runs yt-dlp with --test flag (dry run without downloading)."),
'skip_probe': Param(True, type="boolean", title="[Worker Param] Skip Probe", description="If True, skips the ffmpeg probe of downloaded files."),
'yt_dlp_cleanup_mode': Param(False, type="boolean", title="[Worker Param] yt-dlp Cleanup Mode", description="If True, creates a .empty file and deletes the original media file after successful download and probe."),
'fragment_retries': Param(2, type="integer", title="[Worker Param] Fragment Retries", description="Number of retries for a fragment before giving up."),
'limit_rate': Param('5M', type=["string", "null"], title="[Worker Param] Limit Rate", description="Download speed limit (e.g., 50K, 4.2M)."),
'socket_timeout': Param(15, type="integer", title="[Worker Param] Socket Timeout", description="Timeout in seconds for socket operations."),
'min_sleep_interval': Param(5, type="integer", title="[Worker Param] Min Sleep Interval", description="Minimum time to sleep between downloads (seconds)."),
'max_sleep_interval': Param(10, type="integer", title="[Worker Param] Max Sleep Interval", description="Maximum time to sleep between downloads (seconds)."),
'download_format': Param(
'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best',
type="string",
title="[Worker Param] Download Format",
description="Custom yt-dlp format string. Common presets: [1] 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best' (Default, best quality MP4). [2] '18-dashy/18,140-dashy/140,133-dashy/134-dashy/136-dashy/137-dashy/250-dashy/298-dashy/299-dashy' (Legacy formats). [3] '299-dashy/298-dashy/250-dashy/137-dashy/136-dashy/135-dashy/134-dashy/133-dashy' (High-framerate formats)."
),
'downloader': Param(
'cli',
type="string",
enum=['py', 'aria-rpc', 'cli'],
title="[Worker Param] Download Tool",
description="Choose the download tool to use: 'py' (native python, recommended), 'aria-rpc' (send to aria2c daemon), 'cli' (legacy yt-dlp wrapper)."
),
'aria_host': Param('172.17.0.1', type="string", title="[Worker Param] Aria2c Host", description="For 'aria-rpc' downloader: Host of the aria2c RPC server. Can be set via Airflow Variable 'YTDLP_ARIA_HOST'."),
'aria_port': Param(6800, type="integer", title="[Worker Param] Aria2c Port", description="For 'aria-rpc' downloader: Port of the aria2c RPC server. Can be set via Airflow Variable 'YTDLP_ARIA_PORT'."),
'aria_secret': Param('SQGCQPLVFQIASMPNPOJYLVGJYLMIDIXDXAIXOTX', type="string", title="[Worker Param] Aria2c Secret", description="For 'aria-rpc' downloader: Secret token. Can be set via Airflow Variable 'YTDLP_ARIA_SECRET'."),
'yt_dlp_extra_args': Param(
'--no-part --restrict-filenames',
type=["string", "null"],
title="[Worker Param] Extra yt-dlp arguments",
description="Extra command-line arguments for yt-dlp during download."
),
}
) as dag:
orchestrate_task = PythonOperator(
task_id='start_worker_loops',
python_callable=orchestrate_workers_ignition_callable,
queue='queue-mgmt',
)
orchestrate_task.doc_md = """
### Start Worker Loops
This is the main task that executes the ignition policy.
- It triggers `ytdlp_ops_v02_dispatcher_dl` DAGs according to the batch settings.
- It passes all its parameters down to the dispatchers, which will use them to trigger workers.
"""

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,447 +0,0 @@
# -*- coding: utf-8 -*-
"""
DAG to upload completed video directories to an S3-compatible service.
This DAG creates one long-running task for each configured S3 worker.
"""
from __future__ import annotations
import logging
import os
import shutil
import subprocess
import time
from datetime import datetime, timedelta
from airflow.decorators import task
from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.models.param import Param
from airflow.models.variable import Variable
from airflow.operators.dummy import DummyOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.utils.dates import days_ago
logger = logging.getLogger(__name__)
DEFAULT_ARGS = {
'owner': 'airflow',
'retries': 1,
'retry_delay': timedelta(minutes=1),
}
BASE_DOWNLOAD_PATH = '/opt/airflow/downloadfiles'
VIDEOS_PATH = os.path.join(BASE_DOWNLOAD_PATH, 'videos')
READY_PATH = os.path.join(VIDEOS_PATH, 'ready')
def run_s3_upload_batch(**context):
"""
This function runs in a continuous loop to check for completed video directories and upload them to S3.
If no videos are found, it sleeps for a configurable interval before checking again.
Dry run mode is non-destructive and will pause briefly after checking to prevent tight loops.
"""
params = context['params']
ti = context['task_instance']
# Log the configured execution timeout for debugging purposes.
# This helps verify that the timeout setting from the DAG file is being applied.
timeout_delta = ti.task.execution_timeout
logger.info(f"Task is configured with execution_timeout: {timeout_delta}")
concurrency = params['concurrency']
mode = params['mode']
dry_run = params['dry_run']
sleep_interval_min = params['sleep_if_no_videos_min']
sleep_interval_sec = sleep_interval_min * 60
s3_conn_id = params['s3_conn_id']
s3_bucket = params['s3_bucket_name']
s3_access_key_id = None
s3_secret_access_key = None
s3_endpoint = None
s3_region = None
config_source = "Unknown"
profile_name = "rusonyx"
# --- Attempt 1: Get S3 Configuration from Airflow Connection ---
if s3_conn_id:
try:
logger.info(f"Attempting to load S3 configuration from Airflow connection '{s3_conn_id}'.")
s3_hook = S3Hook(aws_conn_id=s3_conn_id)
s3_conn = s3_hook.get_connection(s3_conn_id)
s3_access_key_id = s3_conn.login
s3_secret_access_key = s3_conn.password
s3_endpoint = s3_conn.host
extra_config = s3_conn.extra_dejson
s3_region = extra_config.get('region_name')
if not all([s3_access_key_id, s3_secret_access_key, s3_endpoint, s3_region]):
logger.warning("S3 connection from Airflow is missing one or more required fields (excluding bucket). Will attempt to fall back to environment variables.")
s3_access_key_id = s3_secret_access_key = s3_endpoint = s3_region = None # Reset all
else:
config_source = f"Airflow Connection '{s3_conn_id}'"
profile_name = "rusonyx-airflow"
except Exception as e:
logger.warning(f"Failed to load S3 configuration from Airflow connection '{s3_conn_id}': {e}. Will attempt to fall back to environment variables.")
# --- Attempt 2: Fallback to Environment Variables ---
if not all([s3_access_key_id, s3_secret_access_key, s3_endpoint, s3_region]):
try:
logger.info("Attempting to load S3 configuration from environment variables as a fallback.")
s3_access_key_id = os.environ['S3_DELIVERY_AWS_ACCESS_KEY_ID']
s3_secret_access_key = os.environ['S3_DELIVERY_AWS_SECRET_ACCESS_KEY']
s3_endpoint = os.environ['S3_DELIVERY_ENDPOINT']
s3_region = os.environ['S3_DELIVERY_AWS_REGION']
if not all([s3_access_key_id, s3_secret_access_key, s3_endpoint, s3_region]):
raise ValueError("One or more S3 configuration environment variables are empty (excluding bucket).")
config_source = "Environment Variables"
profile_name = "rusonyx"
except (KeyError, ValueError) as e:
logger.error(f"Having problems reading S3 configuration from environment variables: {e}", exc_info=True)
raise AirflowException("S3 configuration is missing. Could not load from Airflow connection or environment variables.")
if not s3_bucket:
raise AirflowException("S3 bucket name is not specified in DAG parameters.")
s3_destination = f"s3://{s3_bucket}/"
logger.info(f"Starting S3 upload loop. Watching source '{READY_PATH}' for delivery to '{s3_destination}'.")
logger.info(f"Mode: {mode}, Dry Run: {dry_run}, Idle Sleep: {sleep_interval_min} min")
logger.info(f"S3 Config loaded from {config_source}: Endpoint='{s3_endpoint}', Bucket='{s3_bucket}', Region='{s3_region}', Profile='{profile_name}'")
# --- Write credentials to file for s5cmd profile ---
aws_credentials_path = os.path.expanduser("~/.aws/credentials")
aws_config_path = os.path.expanduser("~/.aws/config")
try:
os.makedirs(os.path.dirname(aws_credentials_path), exist_ok=True)
with open(aws_credentials_path, 'w') as f:
f.write(f"[{profile_name}]\n")
f.write(f"aws_access_key_id = {s3_access_key_id}\n")
f.write(f"aws_secret_access_key = {s3_secret_access_key}\n")
logger.info(f"Wrote credentials for profile '{profile_name}' to {aws_credentials_path}")
with open(aws_config_path, 'w') as f:
f.write(f"[profile {profile_name}]\n")
f.write(f"region = {s3_region}\n")
logger.info(f"Wrote config for profile '{profile_name}' to {aws_config_path}")
except Exception as e:
logger.error(f"Failed to write AWS credentials/config file: {e}", exc_info=True)
raise AirflowException(f"Failed to write AWS credentials/config file: {e}")
while True:
logger.info("--- Starting new S3 upload cycle ---")
# --- Dry Run Logic (Non-destructive) ---
if dry_run:
logger.info("[DRY RUN] Checking for completed video batches...")
if not os.path.exists(READY_PATH):
logger.info(f"[DRY RUN] Source directory '{READY_PATH}' does not exist. Nothing to upload.")
else:
now = datetime.now()
wait_minutes = params['batch_completion_wait_min']
cutoff_time = now - timedelta(minutes=wait_minutes)
rounded_minute = (cutoff_time.minute // 10) * 10
cutoff_batch_ts = cutoff_time.strftime('%Y%m%dT%H') + f"{rounded_minute:02d}"
logger.info(f"[DRY RUN] Current time is {now.strftime('%H:%M:%S')}. With a {wait_minutes} min wait, processing batches up to and including '{cutoff_batch_ts}'.")
all_video_dirs_to_process = []
processed_batch_dirs = set()
all_batch_dirs = sorted([d for d in os.listdir(READY_PATH) if os.path.isdir(os.path.join(READY_PATH, d))])
for ts_dir in all_batch_dirs:
if ts_dir > cutoff_batch_ts:
continue
batch_dir_path = os.path.join(READY_PATH, ts_dir)
video_dirs_in_batch = [os.path.join(batch_dir_path, d) for d in os.listdir(batch_dir_path) if os.path.isdir(os.path.join(batch_dir_path, d))]
if video_dirs_in_batch:
all_video_dirs_to_process.extend(video_dirs_in_batch)
processed_batch_dirs.add(batch_dir_path)
else:
logger.info(f"[DRY RUN] Batch directory '{batch_dir_path}' is empty. Would remove it.")
if all_video_dirs_to_process:
logger.info(f"[DRY RUN] Found {len(all_video_dirs_to_process)} total video director(y/ies) in {len(processed_batch_dirs)} batch(es) to process.")
# Construct and log the command that would be run
cmd = [
's5cmd', '--endpoint-url', s3_endpoint, '--log', 'debug', '--no-verify-ssl',
'--use-list-objects-v1', '--profile', profile_name, '--stat',
'--numworkers', str(concurrency), 'run'
]
cmd_str = ' '.join(cmd)
# Construct the commands to be piped
commands_to_pipe = '\n'.join([f"cp \"{dir_path}\" \"{s3_destination}\"" for dir_path in all_video_dirs_to_process])
logger.info(f"[DRY RUN] The following command would be executed:\n{cmd_str}")
logger.info(f"[DRY RUN] The following commands would be piped to stdin:\n{commands_to_pipe}")
if mode == 'mv':
logger.info(f"[DRY RUN] Mode is 'mv'. Would delete {len(processed_batch_dirs)} source batch directories after successful upload.")
# Pause briefly in dry-run mode if videos are found to avoid a fast, noisy loop.
dry_run_pause_s = 10
logger.info(f"[DRY RUN] Pausing for {dry_run_pause_s} seconds to prevent rapid re-listing of the same files (this is a short, fixed pause for dry-run only).")
time.sleep(dry_run_pause_s)
continue # Go to the start of the next cycle
else:
logger.info("[DRY RUN] No completed video batches found.")
# If in dry-run and no videos are found, sleep for the main interval.
logger.info(f"[DRY RUN] Sleeping for {sleep_interval_min} minute(s)...")
time.sleep(sleep_interval_sec)
continue
# --- Normal Operation Logic (Destructive) ---
work_done_in_cycle = False
try:
# --- 1. Find all videos to upload from all completed batches ---
if not os.path.exists(READY_PATH):
logger.info(f"Ready directory '{READY_PATH}' does not exist. Nothing to upload.")
else:
now = datetime.now()
wait_minutes = params['batch_completion_wait_min']
cutoff_time = now - timedelta(minutes=wait_minutes)
rounded_minute = (cutoff_time.minute // 10) * 10
cutoff_batch_ts = cutoff_time.strftime('%Y%m%dT%H') + f"{rounded_minute:02d}"
logger.info(f"Current time is {now.strftime('%H:%M:%S')}. With a {wait_minutes} min wait, processing batches up to and including '{cutoff_batch_ts}'.")
all_video_dirs_to_process = []
processed_batch_dirs = set()
all_batch_dirs = sorted([d for d in os.listdir(READY_PATH) if os.path.isdir(os.path.join(READY_PATH, d))])
for ts_dir in all_batch_dirs:
if ts_dir > cutoff_batch_ts:
continue # This batch is not old enough to be processed
batch_dir_path = os.path.join(READY_PATH, ts_dir)
video_dirs_in_batch = [os.path.join(batch_dir_path, d) for d in os.listdir(batch_dir_path) if os.path.isdir(os.path.join(batch_dir_path, d))]
if not video_dirs_in_batch:
logger.info(f"Batch directory '{batch_dir_path}' is empty. Removing it.")
try:
os.rmdir(batch_dir_path)
except OSError as e:
logger.error(f"Could not remove empty batch directory {batch_dir_path}: {e}")
continue # Move to the next batch
all_video_dirs_to_process.extend(video_dirs_in_batch)
processed_batch_dirs.add(batch_dir_path)
# --- 2. Upload All Found Videos in a Single Batch Command ---
if all_video_dirs_to_process:
work_done_in_cycle = True
logger.info(f"Found {len(all_video_dirs_to_process)} total video director(y/ies) in {len(processed_batch_dirs)} batch(es) to upload.")
cmd = [
's5cmd', '--endpoint-url', s3_endpoint, '--log', 'debug', '--no-verify-ssl',
'--use-list-objects-v1', '--profile', profile_name, '--stat',
'--numworkers', str(concurrency), 'run'
]
cmd_str = ' '.join(cmd)
# Construct the commands to be piped to stdin
commands_to_pipe = '\n'.join([f"cp \"{dir_path}\" \"{s3_destination}\"" for dir_path in all_video_dirs_to_process])
logger.info(f"Executing s5cmd batch command:\n{cmd_str}")
logger.info(f"Piping {len(all_video_dirs_to_process)} 'cp' commands to stdin.")
upload_start_time = time.time()
process = subprocess.run(cmd, check=True, capture_output=True, text=True, input=commands_to_pipe)
upload_duration = time.time() - upload_start_time
logger.info(f"s5cmd STDOUT: {process.stdout}")
if process.stderr:
logger.info(f"s5cmd STDERR: {process.stderr}")
logger.info(f"Upload command completed successfully in {upload_duration:.2f} seconds.")
logger.info(f"Successfully copied {len(all_video_dirs_to_process)} director(y/ies) to S3.")
# --- 3. Cleanup ---
if mode == 'mv':
logger.info(f"Mode is 'mv'. Cleaning up {len(processed_batch_dirs)} source batch director(y/ies).")
cleanup_start_time = time.time()
# Create a temporary empty directory to use as a source for rsync deletion
empty_dir_for_rsync = os.path.join(READY_PATH, f"__empty_{int(time.time())}")
os.makedirs(empty_dir_for_rsync, exist_ok=True)
try:
for batch_dir_path in processed_batch_dirs:
try:
# Use rsync with an empty source to efficiently delete the contents of the batch directory
# The trailing slash on both source and destination is important.
rsync_cmd = [
'rsync',
'-a', '--delete',
f'{empty_dir_for_rsync}/',
f'{batch_dir_path}/'
]
subprocess.run(rsync_cmd, check=True, capture_output=True, text=True)
# After the contents are deleted, remove the now-empty directory
os.rmdir(batch_dir_path)
logger.info(f"Successfully removed {batch_dir_path}")
except Exception as cleanup_e:
logger.error(f"Failed to remove directory {batch_dir_path}: {cleanup_e}", exc_info=True)
if isinstance(cleanup_e, subprocess.CalledProcessError):
logger.error(f"rsync STDERR: {cleanup_e.stderr}")
finally:
# Clean up the temporary empty directory
shutil.rmtree(empty_dir_for_rsync)
cleanup_duration = time.time() - cleanup_start_time
logger.info(f"Cleanup complete in {cleanup_duration:.2f} seconds.")
else: # mode == 'cp'
logger.info(f"Mode is 'cp'. Source directories will be left for inspection.")
if not work_done_in_cycle:
logger.info(f"No completed video batches found in '{READY_PATH}'.")
except Exception as e:
logger.error(f"An error occurred during the S3 upload cycle: {e}", exc_info=True)
if isinstance(e, subprocess.CalledProcessError):
logger.error(f"s5cmd STDERR: {e.stderr}")
# On error, we do NOT clean up, to allow for investigation and retries.
# The failed directories will be picked up in the next cycle.
# Treat errors as "no work done" to trigger sleep and prevent fast failure loops
work_done_in_cycle = False
# --- Loop Control ---
if not work_done_in_cycle:
logger.info(f"No work done in this cycle. Sleeping for {sleep_interval_min} minute(s)...")
time.sleep(sleep_interval_sec)
else:
logger.info("Work was completed in this cycle. Checking for more immediately.")
with DAG(
dag_id='ytdlp_s3_uploader',
default_args=DEFAULT_ARGS,
schedule=None,
start_date=days_ago(1),
catchup=False,
tags=['ytdlp', 's3', 'upload'],
doc_md="""### S3 Uploader DAG
1. This DAG creates dynamic uploader tasks with clear names depicting their worker machine (e.g., `upload_batch_on_dl001`).
2. Ansible updates an Airflow Variable named `s3_worker_hostnames` with a JSON list of all active uploader workers (typically dlXXX machines). Each worker listens to its own queue (e.g., `queue-dl-dl001`).
3. This DAG reads the variable on manual trigger or after a pause/resume cycle to create the dynamic tasks. This allows for easy inspection of per-worker logs and status from the Airflow UI.
4. Each dynamic task watches a shared folder (`/opt/airflow/downloadfiles/videos/ready`). Download workers place completed videos into timestamped sub-folders (e.g., `20241122T1050`). The uploader processes these 10-minute batches, copying them to S3 with `s5cmd` and then deleting the source directories. This design avoids race conditions and improves performance.
#### Why use 10-minute batch folders?
While an `mv` command (atomic on the same filesystem) is sufficient to ensure a single video directory is complete when it appears in the `ready` folder, the batching system solves higher-level concurrency and efficiency problems in a high-throughput environment.
- **Concurrency Management**: The uploader needs to process a discrete *set* of videos. By working on batches from a *previous* time window (e.g., uploading the `10:40` batch after `10:50`), it guarantees that no new files will be added to that batch while it's being processed. This creates a clean, reliable unit of work and prevents the uploader from missing videos that are moved in while it's compiling its list.
- **Bulk Operation Efficiency**: It is far more efficient to upload hundreds of videos in a single bulk command than one by one. The batching system allows videos to accumulate, and the uploader sends them all to S3 in one highly optimized `s5cmd run` command. Similarly, after a successful upload, the uploader can delete the single parent batch directory, which is much faster than deleting hundreds of individual video folders.
- **Continuous Operation**: The uploader task is a long-running loop. If processing a batch takes longer than 10 minutes (e.g., due to a large volume of videos or slow network), the uploader will continue working on that batch until it is complete. It only sleeps when it has processed all available completed batches and is waiting for new ones to become ready.
#### Cleanup Method: `rsync` vs `shutil.rmtree`
The cleanup process uses the `rsync` empty-folder trick to delete the contents of the batch directory before removing the directory itself. This is a deliberate performance optimization. The command is effectively: `rsync -a --delete /path/to/empty/ /path/to/delete/`.
- Python's `shutil.rmtree` can be slow as it makes an individual `os.remove()` system call for every file.
- The `rsync` method is a well-known and highly efficient alternative for this scenario, as `rsync` is a mature C program optimized for these operations. More details on this performance difference can be found here: https://stackoverflow.com/questions/5470939/why-is-shutil-rmtree-so-slow
""",
params={
'mode': Param(
'mv', type="string", enum=['cp', 'mv'], title="Operation Mode",
description="`mv` (move): After a successful upload, the temporary batch directory is deleted. This is the standard behavior. `cp` (copy): The temporary batch directory is left intact for debugging; it will be cleaned up on the next run."
),
'dry_run': Param(
True, type="boolean", title="Dry Run",
description="If True, the DAG will perform all steps except the actual upload and cleanup. `s5cmd` will be run with `--dry-run`, and the final directory removal will be skipped. Log messages will indicate what would have happened."
),
'concurrency': Param(10, type="integer", title="s5cmd Concurrency"),
'sleep_if_no_videos_min': Param(5, type="integer", title="Sleep if Idle (minutes)", description="How many minutes the task should sleep if no videos are found to upload. This should be less than any external timeout (e.g., Celery's worker_proc_timeout)."),
'batch_completion_wait_min': Param(0, type="integer", title="Batch Completion Wait (minutes)", description="How many minutes to wait after a 10-minute batch window closes before considering it for upload. Default is 0, which processes the current batch immediately. A value of 10 restores the old behavior of waiting for the next 10-minute window."),
's3_conn_id': Param('s3_delivery_connection', type="string", title="S3 Connection ID", description="The Airflow connection ID for the S3-compatible storage. If this connection is invalid or missing, the task will fall back to environment variables."),
's3_bucket_name': Param(
'videos',
type="string",
title="S3 Bucket Name",
description="The name of the S3 bucket to upload to. Common values are 'videos' or 'videos-prod'."
),
}
) as dag:
# Dynamically create one task per S3 worker hostname
# IMPORTANT: The tasks are created when this DAG file is parsed by the Airflow Scheduler.
# If you add/change the 's3_worker_hostnames' Airflow Variable, you may need to
# wait a few minutes for the scheduler to re-parse the file and update the tasks.
# Forcing a re-parse can be done by pausing and un-pausing the DAG in the UI.
s3_worker_hostnames = [] # Initialize to be safe
try:
# The variable should be a JSON list of strings, e.g., ["s3-001", "s3-002"]
s3_worker_hostnames = Variable.get("s3_worker_hostnames", deserialize_json=True, default_var=[])
logger.info(f"DAG 'ytdlp_s3_uploader' successfully loaded s3_worker_hostnames variable. Value: {s3_worker_hostnames}")
if not isinstance(s3_worker_hostnames, list):
logger.error(f"Airflow Variable 's3_worker_hostnames' is not a valid JSON list. Value: {s3_worker_hostnames}")
s3_worker_hostnames = [] # Reset to empty to prevent errors
except Exception as e:
logger.error(
f"Could not read or parse Airflow Variable 's3_worker_hostnames'. "
f"Please create it in the Airflow UI as a JSON list of your S3 worker hostnames (e.g., [\"s3-001\"]). "
f"No S3 worker tasks will be created. Error: {e}",
exc_info=True
)
s3_worker_hostnames = []
@task(task_id='check_s3_worker_configuration')
def check_s3_worker_configuration_callable():
"""Logs the current value of the s3_worker_hostnames variable at runtime for debugging."""
logger.info("--- S3 Worker Configuration Check (at runtime) ---")
try:
hostnames = Variable.get("s3_worker_hostnames", deserialize_json=True, default_var=None)
if hostnames is None:
logger.error("Airflow Variable 's3_worker_hostnames' is not defined.")
logger.info("Please create it in the Airflow UI (Admin -> Variables) as a JSON list of strings, e.g., [\"s3-worker-01\"]")
elif not isinstance(hostnames, list):
logger.error(f"Airflow Variable 's3_worker_hostnames' is not a valid JSON list. Current value: {hostnames}")
elif not hostnames:
logger.warning("Airflow Variable 's3_worker_hostnames' is defined but is an empty list []. No worker tasks will be run.")
else:
logger.info(f"Successfully read 's3_worker_hostnames'. It contains {len(hostnames)} worker(s): {hostnames}")
logger.info("If you see this task but no worker tasks in the UI, it means the DAG did not find these workers when it was parsed by the scheduler.")
logger.info("This can happen due to caching. Please wait a few minutes for the scheduler to re-parse the DAG file, or pause/un-pause the DAG.")
except Exception as e:
logger.error(f"An error occurred while trying to read the 's3_worker_hostnames' variable at runtime: {e}", exc_info=True)
logger.info("--- End of Configuration Check ---")
check_s3_worker_configuration_task = check_s3_worker_configuration_callable()
check_s3_worker_configuration_task.doc_md = """
### S3 Worker Configuration Check
This task runs at the start of every DAG run to check the `s3_worker_hostnames` Airflow Variable.
The dynamic worker tasks are created based on this variable *at the time the DAG is parsed by the scheduler*.
**Check the logs for this task to see the current value of the variable as read at runtime.** This can help diagnose why worker tasks may not have been created.
If the logs show the variable is correct but you don't see the worker tasks in the UI, you may need to wait for the scheduler to re-parse the DAG file. You can force this by pausing and un-pausing the DAG.
"""
if s3_worker_hostnames:
worker_tasks = []
for hostname in s3_worker_hostnames:
# Sanitize hostname for task_id
task_id_hostname = hostname.replace('.', '_')
# Create a task for each worker, pinned to its specific queue
upload_task = task(
task_id=f'upload_batch_on_{task_id_hostname}',
queue=f'queue-s3-{hostname}',
execution_timeout=timedelta(days=1),
)(run_s3_upload_batch)()
worker_tasks.append(upload_task)
check_s3_worker_configuration_task >> worker_tasks

View File

@ -1,287 +0,0 @@
#!/usr/bin/env python3
import os
import sys
import json
import re
try:
from jinja2 import Environment, FileSystemLoader
except ImportError:
print("FATAL: jinja2 is not installed. Please run 'pip install jinja2'.", file=sys.stderr)
exit(1)
import logging
import ipaddress
from typing import Optional
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
def is_ip_address(address: str) -> bool:
"""Checks if a given string is a valid IP address (IPv4 or IPv6)."""
if not address:
return False
try:
ipaddress.ip_address(address)
return True
except ValueError:
return False
def load_dotenv(dotenv_path):
"""
Loads environment variables from a .env file.
Does not override existing environment variables from the system.
"""
if not os.path.exists(dotenv_path):
logging.warning(f".env file not found at {dotenv_path}. Using system environment variables or defaults.")
return False
try:
with open(dotenv_path) as f:
for line in f:
line = line.strip()
if line and not line.startswith('#') and '=' in line:
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
# Remove surrounding quotes which are common in .env files
# Handle both single and double quotes
if (value.startswith('"') and value.endswith('"')) or \
(value.startswith("'") and value.endswith("'")):
value = value[1:-1]
# os.environ only takes strings
value = str(value)
if key not in os.environ:
os.environ[key] = value
logging.info(f"Successfully loaded variables from {dotenv_path}")
return True
except Exception as e:
logging.error(f"Failed to read or parse {dotenv_path}: {e}")
return False
def _get_port_from_proxy_url(url: str) -> Optional[str]:
"""Extracts the port from a proxy URL string."""
if not url or not isinstance(url, str):
return None
match = re.search(r':(\d+)$', url.strip())
return match.group(1) if match else None
def expand_env_vars(value: str) -> str:
"""
Expands environment variables in a string, including default values.
Supports ${VAR} and ${VAR:-default}.
"""
if not isinstance(value, str):
return value
# Regex to find ${VAR:-default} or ${VAR}
pattern = re.compile(r'\$\{(?P<var>\w+)(?::-(?P<default>.*?))?\}')
def replacer(match):
var_name = match.group('var')
default_value = match.group('default')
# Get value from os.environ, or use default, or empty string
return os.getenv(var_name, default_value if default_value is not None else '')
return pattern.sub(replacer, value)
def generate_configs():
"""
Generates envoy.yaml, docker-compose.camoufox.yaml, and camoufox_endpoints.json
from Jinja2 templates and environment variables.
"""
try:
# --- Setup Paths ---
# The script runs from /app. Configs and templates are in /app/configs.
project_root = os.path.dirname(os.path.abspath(__file__)) # This will be /app
configs_dir = os.path.join(project_root, 'configs')
# Load .env from the project root ONLY - no fallback
dotenv_path = os.path.join(project_root, '.env')
logging.info(f"Looking for .env file at: {dotenv_path}")
if os.path.exists(dotenv_path):
if load_dotenv(dotenv_path):
logging.info(f"Using .env file from: {dotenv_path}")
else:
logging.error(f"Failed to load .env file from: {dotenv_path}")
exit(1)
else:
logging.warning(f".env file not found at {dotenv_path}. Using system environment variables or defaults.")
# --- Common Configuration ---
ytdlp_workers_str = os.getenv('YTDLP_WORKERS', '3').strip()
try:
# Handle empty string case by defaulting to 3, otherwise convert to int.
worker_count = int(ytdlp_workers_str) if ytdlp_workers_str else 3
except (ValueError, TypeError):
logging.warning(f"Invalid value for YTDLP_WORKERS: '{ytdlp_workers_str}'. Defaulting to 3.")
worker_count = 3
if worker_count == 0:
worker_count = os.cpu_count() or 1
logging.info(f"YTDLP_WORKERS is 0, auto-detected {worker_count} CPU cores for worker and camoufox config.")
# The templates are in the 'configs' directory.
env = Environment(loader=FileSystemLoader(configs_dir), trim_blocks=True, lstrip_blocks=True)
# Make the helper function available to Jinja2 templates
env.globals['_get_port_from_proxy_url'] = _get_port_from_proxy_url
# Get service role from environment to determine what to generate
# Ensure we strip any remaining quotes that might have slipped through
service_role = os.getenv('service_role', 'management')
# Additional stripping of quotes for robustness
if (service_role.startswith('"') and service_role.endswith('"')) or \
(service_role.startswith("'") and service_role.endswith("'")):
service_role = service_role[1:-1]
logging.info(f"Service role for generation: '{service_role}'")
# --- Camoufox Configuration (only for worker/all-in-one roles) ---
logging.info("--- Camoufox (Remote Browser) Configuration ---")
camoufox_proxies = []
expanded_camoufox_proxies_str = ""
if service_role != 'management':
logging.info("--- Generating Camoufox (Remote Browser) Configuration ---")
camoufox_proxies_str = os.getenv('CAMOUFOX_PROXIES')
if not camoufox_proxies_str:
logging.warning("CAMOUFOX_PROXIES environment variable not set. No camoufox instances will be generated.")
else:
# Expand environment variables within the string before splitting
expanded_camoufox_proxies_str = expand_env_vars(camoufox_proxies_str)
logging.info(f"Expanded CAMOUFOX_PROXIES from '{camoufox_proxies_str}' to '{expanded_camoufox_proxies_str}'")
camoufox_proxies = [{'url': p.strip()} for p in expanded_camoufox_proxies_str.split(',') if p.strip()]
logging.info(f"Found {len(camoufox_proxies)} proxy/proxies for Camoufox.")
logging.info(f"Each Camoufox instance will support {worker_count} concurrent browser sessions.")
logging.info(f"Total browser sessions supported on this worker: {len(camoufox_proxies) * worker_count}")
vnc_password = os.getenv('VNC_PASSWORD', 'supersecret')
base_vnc_port = int(os.getenv('CAMOUFOX_BASE_VNC_PORT', 5901))
camoufox_port = int(os.getenv('CAMOUFOX_PORT', 12345))
camoufox_backend_prefix = os.getenv('CAMOUFOX_BACKEND_PREFIX', 'camoufox-')
# --- Generate docker-compose.camoufox.yaml ---
compose_template = env.get_template('docker-compose.camoufox.yaml.j2')
compose_output_file = os.path.join(configs_dir, 'docker-compose.camoufox.yaml')
camoufox_config_data = {
'camoufox_proxies': camoufox_proxies,
'vnc_password': vnc_password,
'camoufox_port': camoufox_port,
'worker_count': worker_count,
}
rendered_compose_config = compose_template.render(camoufox_config_data)
with open(compose_output_file, 'w') as f:
f.write(rendered_compose_config)
logging.info(f"Successfully generated {compose_output_file} with {len(camoufox_proxies)} camoufox service(s).")
logging.info("This docker-compose file defines the remote browser services, one for each proxy.")
logging.info("----------------------------------------------------------")
# --- Generate camoufox_endpoints.json ---
endpoints_map = {}
for i, proxy in enumerate(camoufox_proxies):
proxy_port = _get_port_from_proxy_url(proxy['url'])
if proxy_port:
# Use the correct container name pattern that matches the docker-compose template
# The container name in the template is: ytdlp-ops-camoufox-{{ proxy_port }}-{{ loop.index }}-1
container_name = f"ytdlp-ops-camoufox-{proxy_port}-{i+1}-1"
container_base_port = camoufox_port + i * worker_count
endpoints = []
for j in range(worker_count):
port = container_base_port + j
endpoints.append(f"ws://{container_name}:{port}/mypath")
endpoints_map[proxy_port] = {
"ws_endpoints": endpoints
}
else:
logging.warning(f"Could not extract port from proxy URL: {proxy['url']}. Skipping for endpoint map.")
endpoints_data = {"endpoints": endpoints_map}
# The camoufox directory is at the root of the project context, not under 'airflow'.
# camoufox_dir = os.path.join(project_root, 'camoufox')
# os.makedirs(camoufox_dir, exist_ok=True)
endpoints_output_file = os.path.join(configs_dir, 'camoufox_endpoints.json')
with open(endpoints_output_file, 'w') as f:
json.dump(endpoints_data, f, indent=2)
logging.info(f"Successfully generated {endpoints_output_file} with {len(endpoints_map)} port-keyed endpoint(s).")
logging.info("This file maps each proxy to a list of WebSocket endpoints for Camoufox.")
logging.info("The token_generator uses this map to connect to the correct remote browser.")
else:
logging.info("Skipping Camoufox configuration generation.")
# --- Generate docker-compose-ytdlp-ops.yaml ---
ytdlp_ops_template = env.get_template('docker-compose-ytdlp-ops.yaml.j2')
ytdlp_ops_output_file = os.path.join(configs_dir, 'docker-compose-ytdlp-ops.yaml')
# Combine all proxies (camoufox and general) into a single string for the server.
all_proxies = []
# Track if we have any explicit proxy configuration
has_explicit_proxies = False
# Add camoufox proxies if they exist
if expanded_camoufox_proxies_str:
camoufox_proxy_list = [p.strip() for p in expanded_camoufox_proxies_str.split(',') if p.strip()]
all_proxies.extend(camoufox_proxy_list)
if camoufox_proxy_list:
has_explicit_proxies = True
logging.info(f"Added {len(camoufox_proxy_list)} camoufox proxies: {camoufox_proxy_list}")
combined_proxies_str = ",".join(all_proxies)
logging.info(f"Combined proxy string for ytdlp-ops-service: '{combined_proxies_str}'")
ytdlp_ops_config_data = {
'combined_proxies_str': combined_proxies_str,
'service_role': service_role,
'camoufox_proxies': camoufox_proxies,
}
rendered_ytdlp_ops_config = ytdlp_ops_template.render(ytdlp_ops_config_data)
with open(ytdlp_ops_output_file, 'w') as f:
f.write(rendered_ytdlp_ops_config)
logging.info(f"Successfully generated {ytdlp_ops_output_file}")
# --- Envoy Configuration ---
envoy_port = int(os.getenv('ENVOY_PORT', 9080))
base_port = int(os.getenv('YTDLP_BASE_PORT', 9090))
envoy_admin_port = int(os.getenv('ENVOY_ADMIN_PORT', 9901))
# For local dev, ENVOY_BACKEND_ADDRESS is set to 127.0.0.1. For Docker, it's unset, so we default to the service name.
backend_address = os.getenv('ENVOY_BACKEND_ADDRESS', 'ytdlp-ops-service')
# Use STATIC for IP addresses, and STRICT_DNS for anything else (hostnames).
envoy_cluster_type = 'STATIC' if is_ip_address(backend_address) else 'STRICT_DNS'
# --- Generate envoy.yaml ---
envoy_template = env.get_template('envoy.yaml.j2')
# Output envoy.yaml to the configs directory, where other generated files are.
envoy_output_file = os.path.join(configs_dir, 'envoy.yaml')
logging.info("--- Generating Envoy Configuration ---")
logging.info(f"Envoy will listen on public port: {envoy_port}")
logging.info(f"It will load balance requests across {worker_count} internal gRPC endpoints of the 'ytdlp-ops-service'.")
logging.info(f"The backend service is located at: '{backend_address}' (type: {envoy_cluster_type})")
envoy_config_data = {
'envoy_port': envoy_port,
'worker_count': worker_count,
'base_port': base_port,
'envoy_admin_port': envoy_admin_port,
'backend_address': backend_address,
'envoy_cluster_type': envoy_cluster_type,
}
rendered_envoy_config = envoy_template.render(envoy_config_data)
with open(envoy_output_file, 'w') as f:
f.write(rendered_envoy_config)
logging.info(f"Successfully generated {envoy_output_file}")
logging.info("--- Configuration Generation Complete ---")
except Exception as e:
logging.error(f"Failed to generate configurations: {e}", exc_info=True)
exit(1)
if __name__ == '__main__':
generate_configs()

View File

@ -1,130 +0,0 @@
[
"https://www.youtube.com/watch?v=EH81MQiDyFs",
"https://www.youtube.com/watch?v=YwC2VtRFBPs",
"https://www.youtube.com/watch?v=keSo7x42Xis",
"https://www.youtube.com/watch?v=K6OlxDi1cws",
"https://www.youtube.com/watch?v=eIYjjvR_k6w",
"https://www.youtube.com/watch?v=CprKmvtw-TE",
"https://www.youtube.com/watch?v=4vB1bDJ8dvA",
"https://www.youtube.com/watch?v=kJcvr693bjI",
"https://www.youtube.com/watch?v=NPQz5Hn6XKM",
"https://www.youtube.com/watch?v=DCo-7dCw2OY",
"https://www.youtube.com/watch?v=Q0996ndUMxU",
"https://www.youtube.com/watch?v=IxbFckR3yIc",
"https://www.youtube.com/watch?v=xt5QQgEqVzs",
"https://www.youtube.com/watch?v=L9pzC26i3BU",
"https://www.youtube.com/watch?v=YlkzSAqV0jE",
"https://www.youtube.com/watch?v=v9ZxQw3NQA8",
"https://www.youtube.com/watch?v=EB_eBvRsGqM",
"https://www.youtube.com/watch?v=xJ4PHYU3oY4",
"https://www.youtube.com/watch?v=kHf-eCb7q2I",
"https://www.youtube.com/watch?v=q3hNcqo5qdY",
"https://www.youtube.com/watch?v=097ujVv38LU",
"https://www.youtube.com/watch?v=VYnzo8xa_dw",
"https://www.youtube.com/watch?v=2y690c69yb4",
"https://www.youtube.com/watch?v=R_JiPanFbEs",
"https://www.youtube.com/watch?v=_VF9sk-IjOE",
"https://www.youtube.com/watch?v=01yS1dPQsZc",
"https://www.youtube.com/watch?v=0xW7slvHwiU",
"https://www.youtube.com/watch?v=qeeC7i5HTpU",
"https://www.youtube.com/watch?v=McvQBwZ_MfY",
"https://www.youtube.com/watch?v=ssQ456jGiKs",
"https://www.youtube.com/watch?v=Xz84juOdgVY",
"https://www.youtube.com/watch?v=6jw_rFi75YA",
"https://www.youtube.com/watch?v=XVtwjyQESLI",
"https://www.youtube.com/watch?v=GCuRuMZG2CU",
"https://www.youtube.com/watch?v=SLGT3nSHjKY",
"https://www.youtube.com/watch?v=KfXZckcDnwc",
"https://www.youtube.com/watch?v=krlijOR_314",
"https://www.youtube.com/watch?v=c5TIIXZTWYU",
"https://www.youtube.com/watch?v=xbFlak2wDPU",
"https://www.youtube.com/watch?v=ESiCVT43y4M",
"https://www.youtube.com/watch?v=9K-8HK9NGPo",
"https://www.youtube.com/watch?v=AXfq7U9EHHY",
"https://www.youtube.com/watch?v=oWGeLLFTwhk",
"https://www.youtube.com/watch?v=dGTid_QDq3M",
"https://www.youtube.com/watch?v=s2GdkHY7e74",
"https://www.youtube.com/watch?v=EYRnywNSHfM",
"https://www.youtube.com/watch?v=8QcanJptlFs",
"https://www.youtube.com/watch?v=8_B0MrjTDqw",
"https://www.youtube.com/watch?v=2LealZ7TTlY",
"https://www.youtube.com/watch?v=dtBosQzUqDs",
"https://www.youtube.com/watch?v=PuQwOWigWVA",
"https://www.youtube.com/watch?v=LOlVXM27ap8",
"https://www.youtube.com/watch?v=JtgKbx6nm7I",
"https://www.youtube.com/watch?v=owFxod3Pe70",
"https://www.youtube.com/watch?v=dmBpn2ZjNW4",
"https://www.youtube.com/watch?v=7Do8GAKRFsw",
"https://www.youtube.com/watch?v=7oysSz1unf0",
"https://www.youtube.com/watch?v=Z4Wn7qrR0nU",
"https://www.youtube.com/watch?v=wvgwnY0x6wo",
"https://www.youtube.com/watch?v=qUGZg985hqA",
"https://www.youtube.com/watch?v=pWvyocl7dhI",
"https://www.youtube.com/watch?v=BMzSz3aiBFU",
"https://www.youtube.com/watch?v=mgOGXUctR8U",
"https://www.youtube.com/watch?v=1rIhg0Z-Ylo",
"https://www.youtube.com/watch?v=K4hj2aQ8vCM",
"https://www.youtube.com/watch?v=jzMt0J7eohg",
"https://www.youtube.com/watch?v=LeYfSHB1zZw",
"https://www.youtube.com/watch?v=hBS3QbVFHQk",
"https://www.youtube.com/watch?v=2mBdZZm8Syo",
"https://www.youtube.com/watch?v=zaZE_AHeRIc",
"https://www.youtube.com/watch?v=DBod4x5OZsM",
"https://www.youtube.com/watch?v=lNYnMLhMMNc",
"https://www.youtube.com/watch?v=Feo_5sWRjY0",
"https://www.youtube.com/watch?v=tYWLm75nibA",
"https://www.youtube.com/watch?v=xx1HYybZDH0",
"https://www.youtube.com/watch?v=EyIY0BKYIrA",
"https://www.youtube.com/watch?v=BfAoe4GbKt4",
"https://www.youtube.com/watch?v=qmizxZdHB7A",
"https://www.youtube.com/watch?v=7K73KytWJR4",
"https://www.youtube.com/watch?v=hPyi-EnO_Dw",
"https://www.youtube.com/watch?v=M4Gp7eMj2IQ",
"https://www.youtube.com/watch?v=rPOOnshXEOk",
"https://www.youtube.com/watch?v=fmOB4FNj4MM",
"https://www.youtube.com/watch?v=UgwjPBJ-iyA",
"https://www.youtube.com/watch?v=tInqj66fkxc",
"https://www.youtube.com/watch?v=tok-jMC1V0E",
"https://www.youtube.com/watch?v=2IuaROF1pMs",
"https://www.youtube.com/watch?v=Ak5JpqBA5No",
"https://www.youtube.com/watch?v=A_yH2vzq7CY",
"https://www.youtube.com/watch?v=4nzsI5fxdlA",
"https://www.youtube.com/watch?v=1FfwsJInFOM",
"https://www.youtube.com/watch?v=uRjJbkgf_3I",
"https://www.youtube.com/watch?v=HMjduefTG4E",
"https://www.youtube.com/watch?v=Cw9hUSFppnw",
"https://www.youtube.com/watch?v=vrobF1L3BJ8",
"https://www.youtube.com/watch?v=tIiVUsKPCEY",
"https://www.youtube.com/watch?v=7qprIRCTX6A",
"https://www.youtube.com/watch?v=HREKaNF7TT8",
"https://www.youtube.com/watch?v=xlIgqZ1sW5A",
"https://www.youtube.com/watch?v=6_uA0osze4w",
"https://www.youtube.com/watch?v=jarbK6tvflw",
"https://www.youtube.com/watch?v=RWmeSE312FA",
"https://www.youtube.com/watch?v=hhI7lAonIrU",
"https://www.youtube.com/watch?v=4k23-uYPObU",
"https://www.youtube.com/watch?v=rIxiOD0dA3w",
"https://www.youtube.com/watch?v=Ry-_mpn3Pe8",
"https://www.youtube.com/watch?v=m-H4fOb1o2Q",
"https://www.youtube.com/watch?v=NhGxI_tgSwI",
"https://www.youtube.com/watch?v=VTslivtVfAI",
"https://www.youtube.com/watch?v=huSCDYe04Fk",
"https://www.youtube.com/watch?v=LF82qA5a05E",
"https://www.youtube.com/watch?v=kHaHsbFg28M",
"https://www.youtube.com/watch?v=NKDFri_kL94",
"https://www.youtube.com/watch?v=BPIlpDQwWqA",
"https://www.youtube.com/watch?v=UTCAshkc8qk",
"https://www.youtube.com/watch?v=EkUtGGKaX_I",
"https://www.youtube.com/watch?v=tuLyfqdpYxU",
"https://www.youtube.com/watch?v=snxBL-8IGCA",
"https://www.youtube.com/watch?v=Mo9m8EdR8_Y",
"https://www.youtube.com/watch?v=5nBipdnGAbU",
"https://www.youtube.com/watch?v=sLs6vp5TH_w",
"https://www.youtube.com/watch?v=OYM5PrQtT34",
"https://www.youtube.com/watch?v=FX3wjgGWn1s",
"https://www.youtube.com/watch?v=1FfwsJInFOM",
"https://www.youtube.com/watch?v=osWMBc6h5Rs",
"https://www.youtube.com/watch?v=aojc0sLBm5Y",
"https://www.youtube.com/watch?v=akf_6pAx024",
"https://www.youtube.com/watch?v=SgSkvKpAxMQ"
]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,101 +0,0 @@
[
"https://www.youtube.com/watch?v=Y0WQdA4srb0",
"https://www.youtube.com/watch?v=uFyraEVj848",
"https://www.youtube.com/watch?v=VxPx0Qjgbos",
"https://www.youtube.com/watch?v=FuKOn-_rfeE",
"https://www.youtube.com/watch?v=mn9t5eOs30c",
"https://www.youtube.com/watch?v=7YOE0GEUrVo",
"https://www.youtube.com/watch?v=4L8kv6qVTfY",
"https://www.youtube.com/watch?v=7WSEWOft4Y4",
"https://www.youtube.com/watch?v=bmDsn0_1-f0",
"https://www.youtube.com/watch?v=IILtHOqYndA",
"https://www.youtube.com/watch?v=tyGqbWBjSWE",
"https://www.youtube.com/watch?v=3tgZTpkZQkQ",
"https://www.youtube.com/watch?v=JJH-CkjiQWI",
"https://www.youtube.com/watch?v=4hLWn4hHKNM",
"https://www.youtube.com/watch?v=IFwr6QGxoJo",
"https://www.youtube.com/watch?v=Fj-NKUoMbmI",
"https://www.youtube.com/watch?v=zvoxV3wLjFE",
"https://www.youtube.com/watch?v=EcC4CIyUI2Q",
"https://www.youtube.com/watch?v=jtjiTuTKCT4",
"https://www.youtube.com/watch?v=am28qDtXLLU",
"https://www.youtube.com/watch?v=WNVW86YBkMg",
"https://www.youtube.com/watch?v=kG51upknRCw",
"https://www.youtube.com/watch?v=E-HpdWghf2U",
"https://www.youtube.com/watch?v=GuaAOc9ZssE",
"https://www.youtube.com/watch?v=r1JkW0zfPOA",
"https://www.youtube.com/watch?v=OBYmpN8uAag",
"https://www.youtube.com/watch?v=0HuGAMKHXD4",
"https://www.youtube.com/watch?v=eDmdalDaPdU",
"https://www.youtube.com/watch?v=ZjDR1XMd904",
"https://www.youtube.com/watch?v=HGrsrP4idE8",
"https://www.youtube.com/watch?v=l-J_J7YFDYY",
"https://www.youtube.com/watch?v=Kr5rl0935K4",
"https://www.youtube.com/watch?v=KgK4bu9O384",
"https://www.youtube.com/watch?v=BDq3_y4mXYo",
"https://www.youtube.com/watch?v=slRiaDz12m8",
"https://www.youtube.com/watch?v=iX1oWEsHh0A",
"https://www.youtube.com/watch?v=0zJcsxB6-UU",
"https://www.youtube.com/watch?v=NTOokrCHzJA",
"https://www.youtube.com/watch?v=CXYXqQ-VuYo",
"https://www.youtube.com/watch?v=xaxZtPTEraU",
"https://www.youtube.com/watch?v=wX1wNCPZdE8",
"https://www.youtube.com/watch?v=DOt7ckIGN4Y",
"https://www.youtube.com/watch?v=bncasw-Z4Ow",
"https://www.youtube.com/watch?v=nbVWfXlo7kQ",
"https://www.youtube.com/watch?v=Uu6DmhonkEE",
"https://www.youtube.com/watch?v=HGWigeoSMvA",
"https://www.youtube.com/watch?v=rjbLCaC9yFE",
"https://www.youtube.com/watch?v=Uew7f09gW4o",
"https://www.youtube.com/watch?v=uzc-jLt65mY",
"https://www.youtube.com/watch?v=ZX7qnLuAsMU",
"https://www.youtube.com/watch?v=ZlSgDvCP5UI",
"https://www.youtube.com/watch?v=RmGIid7Yctw",
"https://www.youtube.com/watch?v=u9g0_eR5gEk",
"https://www.youtube.com/watch?v=wu9Cw905NUU",
"https://www.youtube.com/watch?v=cNhQVoY5V5Q",
"https://www.youtube.com/watch?v=I63iJNKOb8I",
"https://www.youtube.com/watch?v=3G5ceoSK6jg",
"https://www.youtube.com/watch?v=JF4TbV940PM",
"https://www.youtube.com/watch?v=0yGaVHfmGa0",
"https://www.youtube.com/watch?v=r8cgtI_ZQIY",
"https://www.youtube.com/watch?v=OcG3-r98XEM",
"https://www.youtube.com/watch?v=w7hooOUEMQI",
"https://www.youtube.com/watch?v=yipW8SF5Gxk",
"https://www.youtube.com/watch?v=LH4PqRiuxts",
"https://www.youtube.com/watch?v=IfAsA3ezUqQ",
"https://www.youtube.com/watch?v=5cUg8I0yps4",
"https://www.youtube.com/watch?v=lCea6bQj3eg",
"https://www.youtube.com/watch?v=5Ie0MAv4XCY",
"https://www.youtube.com/watch?v=57eomGPy1PU",
"https://www.youtube.com/watch?v=TEnk3OfU8Gc",
"https://www.youtube.com/watch?v=1uA4xXlDhvE",
"https://www.youtube.com/watch?v=aXF8ijpn4bM",
"https://www.youtube.com/watch?v=3vKmCDomyJ8",
"https://www.youtube.com/watch?v=z7jLEWJ59uY",
"https://www.youtube.com/watch?v=0TTsKnyH6EY",
"https://www.youtube.com/watch?v=PcqA6Y1RfVQ",
"https://www.youtube.com/watch?v=f1Ar3ydryqc",
"https://www.youtube.com/watch?v=N2nLayOIjxM",
"https://www.youtube.com/watch?v=Cziyx9qaYVM",
"https://www.youtube.com/watch?v=RTJCbIJ294w",
"https://www.youtube.com/watch?v=GC1FB-bZTvA",
"https://www.youtube.com/watch?v=kKYv5uLBSFk",
"https://www.youtube.com/watch?v=jfQHlnNeKzw",
"https://www.youtube.com/watch?v=J7e8PRu9kSU",
"https://www.youtube.com/watch?v=UoHf6pdy0oE",
"https://www.youtube.com/watch?v=JOwNcwSupXs",
"https://www.youtube.com/watch?v=gxwk-bb78-U",
"https://www.youtube.com/watch?v=_lrDwiK544A",
"https://www.youtube.com/watch?v=6i8BVQ9GE1g",
"https://www.youtube.com/watch?v=8c_l9D1qyKY",
"https://www.youtube.com/watch?v=KFCr5BdjFB8",
"https://www.youtube.com/watch?v=orEvHn7lL4A",
"https://www.youtube.com/watch?v=6BhGJxrp8P4",
"https://www.youtube.com/watch?v=n2t8beFnhyA",
"https://www.youtube.com/watch?v=GJzZ2-f_k30",
"https://www.youtube.com/watch?v=oId850O591s",
"https://www.youtube.com/watch?v=f2XmdQdwppw",
"https://www.youtube.com/watch?v=iWM_oe-JY_k",
"https://www.youtube.com/watch?v=GHEDWE9LjRY"
]

View File

@ -1,236 +0,0 @@
https://www.youtube.com/watch?v=W_ovLaoQ5pI
https://www.youtube.com/watch?v=KnDAl7BqOq0
https://www.youtube.com/watch?v=Ixwy1_Y1wPE
https://www.youtube.com/watch?v=oYHN2vgcpls
https://www.youtube.com/watch?v=WwYtzky4mjs
https://www.youtube.com/watch?v=dc7-aDyAeL4
https://www.youtube.com/watch?v=XQDrjYTFVfE
https://www.youtube.com/watch?v=F0s9IJR4CN4
https://www.youtube.com/watch?v=qrZ7RVXHdzo
https://www.youtube.com/watch?v=gwOjcuexMWU
https://www.youtube.com/watch?v=B4gVYr_9XP4
https://www.youtube.com/watch?v=9bjUH5xViPE
https://www.youtube.com/watch?v=kbVZoqBfjCo
https://www.youtube.com/watch?v=hWXnlqOatS8
https://www.youtube.com/watch?v=0izQuMMfIIo
https://www.youtube.com/watch?v=1x32zwHrg8s
https://www.youtube.com/watch?v=uzU1fxHS2dU
https://www.youtube.com/watch?v=qckyrafr4W4
https://www.youtube.com/watch?v=tuvMl-Gbs1E
https://www.youtube.com/watch?v=uByH48D79KI
https://www.youtube.com/watch?v=qJ9qia29Kwk
https://www.youtube.com/watch?v=17DIjWMYBNs
https://www.youtube.com/watch?v=Ex9z0iIVluU
https://www.youtube.com/watch?v=DJP67SSAM9A
https://www.youtube.com/watch?v=OUe2oNykheg
https://www.youtube.com/watch?v=CgpuKqQl1tU
https://www.youtube.com/watch?v=4RNiyOZexag
https://www.youtube.com/watch?v=d4d8B-axCJU
https://www.youtube.com/watch?v=AMkVydmdLDU
https://www.youtube.com/watch?v=s6D1xf3VCCs
https://www.youtube.com/watch?v=ns1u61Zrzzk
https://www.youtube.com/watch?v=Ysqesg9CQ94
https://www.youtube.com/watch?v=_VvNRtc3K0w
https://www.youtube.com/watch?v=zt-wA67nfJA
https://www.youtube.com/watch?v=bProjpkgXn8
https://www.youtube.com/watch?v=8Gb8ccijtxI
https://www.youtube.com/watch?v=iFJJ_7_SUns
https://www.youtube.com/watch?v=9HBlnMlGSpI
https://www.youtube.com/watch?v=ioTF6x9mhz0
https://www.youtube.com/watch?v=k1pdrHKb3P4
https://www.youtube.com/watch?v=wprhAP6S7MY
https://www.youtube.com/watch?v=WFKKd_o89wI
https://www.youtube.com/watch?v=w59IixVPPUs
https://www.youtube.com/watch?v=9XeYW0l7JKU
https://www.youtube.com/watch?v=HGSIM6WiIMo
https://www.youtube.com/watch?v=SGXZJ8z8QHg
https://www.youtube.com/watch?v=IsqDx71KLyo
https://www.youtube.com/watch?v=NXOHkWf_sTY
https://www.youtube.com/watch?v=ICYe8ZSy0LQ
https://www.youtube.com/watch?v=GMKd0gjPCsU
https://www.youtube.com/watch?v=fbZYCKUCJs4
https://www.youtube.com/watch?v=br1jufv9rW0
https://www.youtube.com/watch?v=Y_flpHI4uEw
https://www.youtube.com/watch?v=FlVTAEpf53s
https://www.youtube.com/watch?v=fN6olULFwTA
https://www.youtube.com/watch?v=EsXZIyx9fmc
https://www.youtube.com/watch?v=s-TQlPdyoiE
https://www.youtube.com/watch?v=GyczQSFPTfQ
https://www.youtube.com/watch?v=ER1qbc1pMzs
https://www.youtube.com/watch?v=ceyVCBX-wGc
https://www.youtube.com/watch?v=BcNvEGIWolk
https://www.youtube.com/watch?v=15idHcwUAfk
https://www.youtube.com/watch?v=WrC6SRasFnU
https://www.youtube.com/watch?v=lBzdu8tLe-I
https://www.youtube.com/watch?v=T7U3BOIwVNc
https://www.youtube.com/watch?v=o9wi6Tt4Z3w
https://www.youtube.com/watch?v=rDKH2dhVE_c
https://www.youtube.com/watch?v=hgzkDHhImoI
https://www.youtube.com/watch?v=3Nz1Vym36ak
https://www.youtube.com/watch?v=TCwcSAZkppc
https://www.youtube.com/watch?v=btS5Kzy_zB0
https://www.youtube.com/watch?v=csimc-rbrhE
https://www.youtube.com/watch?v=TQymjBogMlw
https://www.youtube.com/watch?v=iMkXhj31UKQ
https://www.youtube.com/watch?v=T0Gdsnmffro
https://www.youtube.com/watch?v=2z6HSIypJ_w
https://www.youtube.com/watch?v=oX_3TTyEnKk
https://www.youtube.com/watch?v=89yqiHuBvQQ
https://www.youtube.com/watch?v=eLympVyGQQI
https://www.youtube.com/watch?v=Gy67TZyPn2Q
https://www.youtube.com/watch?v=qw8437Em-3k
https://www.youtube.com/watch?v=z7Y8KiT7bPk
https://www.youtube.com/watch?v=ojJhtn1mEGg
https://www.youtube.com/watch?v=D1Cic7Uc0ns
https://www.youtube.com/watch?v=-63519KiUqM
https://www.youtube.com/watch?v=qJfHfBR20-g
https://www.youtube.com/watch?v=QwbNv-bG9oA
https://www.youtube.com/watch?v=nwfiyKx9x18
https://www.youtube.com/watch?v=L3aPsthTBW8
https://www.youtube.com/watch?v=zjfCX6bpUFg
https://www.youtube.com/watch?v=2hdR_bpvjDM
https://www.youtube.com/watch?v=leoxUrT9DJg
https://www.youtube.com/watch?v=fongJ-r7Uac
https://www.youtube.com/watch?v=vas0e7e8bmI
https://www.youtube.com/watch?v=MRrUq0oqFmo
https://www.youtube.com/watch?v=lgWMCCYXqGg
https://www.youtube.com/watch?v=hwqXFvpujRs
https://www.youtube.com/watch?v=V3ZWVvrmPvw
https://www.youtube.com/watch?v=gP2QkIJaQHE
https://www.youtube.com/watch?v=7U1Q4tscmUU
https://www.youtube.com/watch?v=T5oO9HYyT_8
https://www.youtube.com/watch?v=N7St23woljA
https://www.youtube.com/watch?v=_iWSQ3XD_eQ
https://www.youtube.com/watch?v=ev1urHanjCo
https://www.youtube.com/watch?v=ebvVkT_gHOQ
https://www.youtube.com/watch?v=IUrylOsLD6A
https://www.youtube.com/watch?v=aJ2DWpOhM98
https://www.youtube.com/watch?v=HldN0Atn5LA
https://www.youtube.com/watch?v=f6eY71i7TfI
https://www.youtube.com/watch?v=m1A5aOGYGM8
https://www.youtube.com/watch?v=US0in27JPv4
https://www.youtube.com/watch?v=SWAWTrsXH5E
https://www.youtube.com/watch?v=0K6F8TJowCw
https://www.youtube.com/watch?v=O-uAmbRDCjQ
https://www.youtube.com/watch?v=TdqayW3Yhus
https://www.youtube.com/watch?v=mZ1Gieg2PbU
https://www.youtube.com/watch?v=D7DoRpB_p7g
https://www.youtube.com/watch?v=y2j03DYoC9k
https://www.youtube.com/watch?v=H6UwY_jvIkg
https://www.youtube.com/watch?v=jPHdMovcsno
https://www.youtube.com/watch?v=Ui9ioQhlYB8
https://www.youtube.com/watch?v=16A9rW-bYOw
https://www.youtube.com/watch?v=0HjKLqPZlk8
https://www.youtube.com/watch?v=KFcUjf9pJzE
https://www.youtube.com/watch?v=qslMthxSRWU
https://www.youtube.com/watch?v=Jt7haujk3sk
https://www.youtube.com/watch?v=MJK7NX0E2_4
https://www.youtube.com/watch?v=OMdjNk3aQdk
https://www.youtube.com/watch?v=n4HCQrbYc_w
https://www.youtube.com/watch?v=3gFC-igZPr8
https://www.youtube.com/watch?v=aqS6aduySeo
https://www.youtube.com/watch?v=ylLsucs0PRY
https://www.youtube.com/watch?v=-hIPIMAAk9E
https://www.youtube.com/watch?v=SHjD3xOkWac
https://www.youtube.com/watch?v=FxiWcpW1hOc
https://www.youtube.com/watch?v=s-wpgAK-fzg
https://www.youtube.com/watch?v=82XT7UQbF-w
https://www.youtube.com/watch?v=4bsjF-d9ODc
https://www.youtube.com/watch?v=LPfOH1_9gYU
https://www.youtube.com/watch?v=ITD6zT6SNZo
https://www.youtube.com/watch?v=mBSP343k7Xk
https://www.youtube.com/watch?v=SpXgj9PI1FI
https://www.youtube.com/watch?v=xkiqMGZEYbc
https://www.youtube.com/watch?v=ph2UXTChSsw
https://www.youtube.com/watch?v=Lq0cra_cqLc
https://www.youtube.com/watch?v=W1SS9Yt4PNI
https://www.youtube.com/watch?v=yV5g1sufBVI
https://www.youtube.com/watch?v=y7jNwdmysbo
https://www.youtube.com/watch?v=3i4Q9EgSuA8
https://www.youtube.com/watch?v=-j7C5MfDXrA
https://www.youtube.com/watch?v=BkSglsAO7-w
https://www.youtube.com/watch?v=05dqwprWsnc
https://www.youtube.com/watch?v=b2xwjdv5nxY
https://www.youtube.com/watch?v=06RshyLtUic
https://www.youtube.com/watch?v=Mm1DH0lAtQs
https://www.youtube.com/watch?v=OfxsLW1ZUsk
https://www.youtube.com/watch?v=_AZs4CG7CbA
https://www.youtube.com/watch?v=RzZRssOgH7A
https://www.youtube.com/watch?v=Mqe8ZhqIISI
https://www.youtube.com/watch?v=6w0qYD46Afo
https://www.youtube.com/watch?v=YVtFh0283YU
https://www.youtube.com/watch?v=jCCH685ldpg
https://www.youtube.com/watch?v=Ut1_9Ma9fZg
https://www.youtube.com/watch?v=pjV_fCpJgLc
https://www.youtube.com/watch?v=a4NtRH9sZLk
https://www.youtube.com/watch?v=XjVj9wipu70
https://www.youtube.com/watch?v=CWMhNCPMXeI
https://www.youtube.com/watch?v=MC9YYtWLadQ
https://www.youtube.com/watch?v=7yI1tP5oWQw
https://www.youtube.com/watch?v=wxh9zh-ygig
https://www.youtube.com/watch?v=Ul3JY18tiJ0
https://www.youtube.com/watch?v=suj-r9RCMGY
https://www.youtube.com/watch?v=AbL6ZGOjc_M
https://www.youtube.com/watch?v=VGjHw351GdU
https://www.youtube.com/watch?v=32W16gzB3E8
https://www.youtube.com/watch?v=PTujVtVZD-c
https://www.youtube.com/watch?v=SFbnmsMa_i0
https://www.youtube.com/watch?v=X_FpFtE9mGM
https://www.youtube.com/watch?v=g6xy8KXaxDE
https://www.youtube.com/watch?v=UO6BR3rXpHs
https://www.youtube.com/watch?v=-a14SXc9ERk
https://www.youtube.com/watch?v=Hl73pJhS1Jk
https://www.youtube.com/watch?v=wXgLpByOcos
https://www.youtube.com/watch?v=FH3nQzkKc08
https://www.youtube.com/watch?v=wFYAbEfajd0
https://www.youtube.com/watch?v=zcyM1HzB4OY
https://www.youtube.com/watch?v=cLXZ9INHYyI
https://www.youtube.com/watch?v=Iq7Sjbcw5Ek
https://www.youtube.com/watch?v=kgX5IcFE2HE
https://www.youtube.com/watch?v=Dnq4p0BZ1zA
https://www.youtube.com/watch?v=bitMfdK4mAE
https://www.youtube.com/watch?v=HWqLp-gGOPw
https://www.youtube.com/watch?v=g6DFwFhfqSE
https://www.youtube.com/watch?v=81VXi1v_6Gg
https://www.youtube.com/watch?v=sCf09W7u_as
https://www.youtube.com/watch?v=MIRmwYQ0pnw
https://www.youtube.com/watch?v=dT0PJCDY-WY
https://www.youtube.com/watch?v=Tia6NFKI29c
https://www.youtube.com/watch?v=ZEH4XMI2gi0
https://www.youtube.com/watch?v=JFC-hkuLwz8
https://www.youtube.com/watch?v=xfjVsnGk92M
https://www.youtube.com/watch?v=aeF2hf_R2h0
https://www.youtube.com/watch?v=D_j_k8nWY3g
https://www.youtube.com/watch?v=lvO8Dq1yORA
https://www.youtube.com/watch?v=tvovFzmiF6E
https://www.youtube.com/watch?v=KUsI9cxtJPU
https://www.youtube.com/watch?v=vgLVkrAnBLI
https://www.youtube.com/watch?v=M_ofMDC-FEQ
https://www.youtube.com/watch?v=O4gqva5ROqw
https://www.youtube.com/watch?v=DTgN-m3lAY4
https://www.youtube.com/watch?v=bqgdyYcM4_Q
https://www.youtube.com/watch?v=hNCpvI-d6Fk
https://www.youtube.com/watch?v=va96DIxvE44
https://www.youtube.com/watch?v=ZOwqc5DjkDk
https://www.youtube.com/watch?v=1i6UAXkjy9A
https://www.youtube.com/watch?v=gbaxWdn_Uq0
https://www.youtube.com/watch?v=ygjE8I2k5m8
https://www.youtube.com/watch?v=U6TUDhJ4KF4
https://www.youtube.com/watch?v=-OSZqBAF-ck
https://www.youtube.com/watch?v=MXTnUTdBLaU
https://www.youtube.com/watch?v=38JAI9MIprU
https://www.youtube.com/watch?v=VE7TqzCQypI
https://www.youtube.com/watch?v=TdkXjlJiQq4
https://www.youtube.com/watch?v=TTQ6N9GNeGo
https://www.youtube.com/watch?v=JnTv_K8ah0E
https://www.youtube.com/watch?v=kNI1tYegCZY
https://www.youtube.com/watch?v=Tc3A1vJf4Rg
https://www.youtube.com/watch?v=xBvINP1ddSo
https://www.youtube.com/watch?v=p-Z-cx-43eA
https://www.youtube.com/watch?v=0sqjAko-vgI
https://www.youtube.com/watch?v=W0i1_RaLrho
https://www.youtube.com/watch?v=2G3RFYBcHds
https://www.youtube.com/watch?v=Yirxj1qPBnU
https://www.youtube.com/watch?v=_e9JfXsM9ks
https://www.youtube.com/watch?v=V-UlKut8NbU
https://www.youtube.com/watch?v=C7D19AiYG4c

View File

@ -1,237 +0,0 @@
https://www.youtube.com/watch?v=Qj_u_rAgqDU
https://www.youtube.com/watch?v=ftObtlgqcFM
https://www.youtube.com/watch?v=wa02oKjBgvA
https://www.youtube.com/watch?v=yPjuAumM7g8
https://www.youtube.com/watch?v=lpGq2LRqwAk
https://www.youtube.com/watch?v=jeD7tHfacHw
https://www.youtube.com/watch?v=CIxVCg71xZo
https://www.youtube.com/watch?v=gyKj3b-MbAQ
https://www.youtube.com/watch?v=yTCAjBGiUTg
https://www.youtube.com/watch?v=rNtMzj0thHg
https://www.youtube.com/watch?v=eDf4QdHkM5A
https://www.youtube.com/watch?v=KF7elMYIiyk
https://www.youtube.com/watch?v=tfoyuy5jQoc
https://www.youtube.com/watch?v=ZoXnm8dR3VU
https://www.youtube.com/watch?v=eGmo34B_OVo
https://www.youtube.com/watch?v=OR4pk7e4KbA
https://www.youtube.com/watch?v=cSVcsJ6jK4Y
https://www.youtube.com/watch?v=Ye1_bZGicWU
https://www.youtube.com/watch?v=n98hMqGK16k
https://www.youtube.com/watch?v=gXmfUJhgvAg
https://www.youtube.com/watch?v=ZHaZsBQzycY
https://www.youtube.com/watch?v=SfEQCvh6OmI
https://www.youtube.com/watch?v=xqe3MAkyGVc
https://www.youtube.com/watch?v=mc6BvRRyN8M
https://www.youtube.com/watch?v=wzPxLW7GZr0
https://www.youtube.com/watch?v=zLJd9PBomIA
https://www.youtube.com/watch?v=DlLER38zpq4
https://www.youtube.com/watch?v=lg9hBws5KS4
https://www.youtube.com/watch?v=pSgRbPFNgj4
https://www.youtube.com/watch?v=gX8tm4sP1qY
https://www.youtube.com/watch?v=-2EYqmuGLLM
https://www.youtube.com/watch?v=kKTq3Ndpu7E
https://www.youtube.com/watch?v=KvsOV5hHnq4
https://www.youtube.com/watch?v=DPuK9pasFDA
https://www.youtube.com/watch?v=zQVSEIb4uJ4
https://www.youtube.com/watch?v=AuupjeyKLnw
https://www.youtube.com/watch?v=-iDaJ1KO8A0
https://www.youtube.com/watch?v=OheF39Zcees
https://www.youtube.com/watch?v=kqjOVTQlGrI
https://www.youtube.com/watch?v=QcLD4KdJkKA
https://www.youtube.com/watch?v=bHngc3m0Xdk
https://www.youtube.com/watch?v=Ti8ZnrOD5_0
https://www.youtube.com/watch?v=dIUNVVnFC0U
https://www.youtube.com/watch?v=6Mc1Q7Ii55c
https://www.youtube.com/watch?v=kwxDk9nT9J4
https://www.youtube.com/watch?v=B9WBEPkNf-w
https://www.youtube.com/watch?v=1Pt5Zrakvdg
https://www.youtube.com/watch?v=dsiu7kXFBI8
https://www.youtube.com/watch?v=AQiIRKhgFHE
https://www.youtube.com/watch?v=geFIHaBoKaY
https://www.youtube.com/watch?v=QA4a9Db8m88
https://www.youtube.com/watch?v=OIFU4k1f0Ec
https://www.youtube.com/watch?v=2iSBnEwWwjo
https://www.youtube.com/watch?v=VcWAOEoue1Y
https://www.youtube.com/watch?v=rdR7_4da4Js
https://www.youtube.com/watch?v=hOkvzOkipaM
https://www.youtube.com/watch?v=wLOR5mlx7VY
https://www.youtube.com/watch?v=PFcDLcK_zcY
https://www.youtube.com/watch?v=cP8Q7DIl3nI
https://www.youtube.com/watch?v=x_pJksDZSzU
https://www.youtube.com/watch?v=BeOF0c-EzIQ
https://www.youtube.com/watch?v=7PQ2uRSFd94
https://www.youtube.com/watch?v=v_ZmsRnDmsw
https://www.youtube.com/watch?v=2Y-9Rznk8ug
https://www.youtube.com/watch?v=U6flSitpCM0
https://www.youtube.com/watch?v=VVDf4mcyPAw
https://www.youtube.com/watch?v=m8jrjn64MVk
https://www.youtube.com/watch?v=rq8chzZeDpo
https://www.youtube.com/watch?v=e9c6Is5-XYM
https://www.youtube.com/watch?v=SxTrAm_2oT8
https://www.youtube.com/watch?v=tSy7g2s9_eo
https://www.youtube.com/watch?v=zxs7UeUJr0s
https://www.youtube.com/watch?v=FvryEetPxrI
https://www.youtube.com/watch?v=o9qn_UHBKQ0
https://www.youtube.com/watch?v=PBNpVOwoXLY
https://www.youtube.com/watch?v=PpMPvuSX1CY
https://www.youtube.com/watch?v=dqjASGYlWRU
https://www.youtube.com/watch?v=DGfo_K6NTwo
https://www.youtube.com/watch?v=WpUpTVFW3S4
https://www.youtube.com/watch?v=dCgjywvszFE
https://www.youtube.com/watch?v=FuW3lMJF2zA
https://www.youtube.com/watch?v=bKaU95ceeUw
https://www.youtube.com/watch?v=Ynwqt_R3faM
https://www.youtube.com/watch?v=td70vUbqAgw
https://www.youtube.com/watch?v=9ZwKVhtzFM4
https://www.youtube.com/watch?v=xAvwjZxkp_s
https://www.youtube.com/watch?v=FlwadWqd9jY
https://www.youtube.com/watch?v=grosmlJJpOQ
https://www.youtube.com/watch?v=8tj04EuSuR8
https://www.youtube.com/watch?v=bCdkBP6nYrY
https://www.youtube.com/watch?v=9BbMwzKy7pY
https://www.youtube.com/watch?v=0A55FZ5R0MI
https://www.youtube.com/watch?v=S7Z5XDc5X3I
https://www.youtube.com/watch?v=sWO5gY7UbKM
https://www.youtube.com/watch?v=UrkAAASpCis
https://www.youtube.com/watch?v=iAiQBMEeeV4
https://www.youtube.com/watch?v=XnRekopCpZ0
https://www.youtube.com/watch?v=bnstqG8YJ-E
https://www.youtube.com/watch?v=dk5UlOS6IYI
https://www.youtube.com/watch?v=uelzEzmIhh0
https://www.youtube.com/watch?v=gq1pUYxILOc
https://www.youtube.com/watch?v=OgXkB9S_GmA
https://www.youtube.com/watch?v=mvGD7RRehaI
https://www.youtube.com/watch?v=s3df_PR0x7Y
https://www.youtube.com/watch?v=mRDmsxKQurs
https://www.youtube.com/watch?v=bhKN_KOeWhI
https://www.youtube.com/watch?v=EStYpTS-TRU
https://www.youtube.com/watch?v=357YonN45w0
https://www.youtube.com/watch?v=UGJfPbOpiCA
https://www.youtube.com/watch?v=1F6uYuHgOdI
https://www.youtube.com/watch?v=PvsqLRbCJlA
https://www.youtube.com/watch?v=P96IOk9mQgk
https://www.youtube.com/watch?v=M5EqG9d-3Ug
https://www.youtube.com/watch?v=R6eDBa6UjmY
https://www.youtube.com/watch?v=CZvUQEU2cvs
https://www.youtube.com/watch?v=giazbLbDdv0
https://www.youtube.com/watch?v=JFoI12_47ck
https://www.youtube.com/watch?v=q5dZ396lYbk
https://www.youtube.com/watch?v=McPkR_D7zI0
https://www.youtube.com/watch?v=774oBwazxHw
https://www.youtube.com/watch?v=-_5AipO_dfw
https://www.youtube.com/watch?v=MnlU4BPrLuk
https://www.youtube.com/watch?v=24LuuQH4hnc
https://www.youtube.com/watch?v=e4ivBc0l7Ok
https://www.youtube.com/watch?v=S4ff7HgfULA
https://www.youtube.com/watch?v=AxhJcEndmjs
https://www.youtube.com/watch?v=NdeCQFd2blY
https://www.youtube.com/watch?v=xgl4ltsE_8E
https://www.youtube.com/watch?v=in5xKqvxrAk
https://www.youtube.com/watch?v=TAU_0EpXBgQ
https://www.youtube.com/watch?v=3DbMqaactuU
https://www.youtube.com/watch?v=BWuY55TfChs
https://www.youtube.com/watch?v=41ecD9culo4
https://www.youtube.com/watch?v=kYV8Q5UpDTw
https://www.youtube.com/watch?v=wOQlIDXHkD4
https://www.youtube.com/watch?v=vMXVse5OuFI
https://www.youtube.com/watch?v=Fem5C3R60Sg
https://www.youtube.com/watch?v=OhySOErdxjM
https://www.youtube.com/watch?v=KC09gbct8u4
https://www.youtube.com/watch?v=bJ4vPNkjRdE
https://www.youtube.com/watch?v=RvBrUzLugjA
https://www.youtube.com/watch?v=QuNxtlXivBk
https://www.youtube.com/watch?v=yz6OjqZfdLM
https://www.youtube.com/watch?v=CoOLkzZCcGE
https://www.youtube.com/watch?v=FecXRY-8IPw
https://www.youtube.com/watch?v=KUQBqesn-6M
https://www.youtube.com/watch?v=NAF9kveijEA
https://www.youtube.com/watch?v=hl4j6E-ICco
https://www.youtube.com/watch?v=yg5tpHvElvM
https://www.youtube.com/watch?v=ZddCBXb10hw
https://www.youtube.com/watch?v=hTKjjdN8MGQ
https://www.youtube.com/watch?v=X9hHQaYj7Lo
https://www.youtube.com/watch?v=P0B-L66ffLw
https://www.youtube.com/watch?v=5mQ4hc8Uvn8
https://www.youtube.com/watch?v=KaPSeF592h0
https://www.youtube.com/watch?v=xerdSyr3sSU
https://www.youtube.com/watch?v=ZW2jcFuHdhA
https://www.youtube.com/watch?v=ek3TqzF-KVE
https://www.youtube.com/watch?v=sMT8I7qIoZs
https://www.youtube.com/watch?v=hPvr-qxf52s
https://www.youtube.com/watch?v=__Ier-gibdA
https://www.youtube.com/watch?v=Wqs0Im26Bfg
https://www.youtube.com/watch?v=auk6LFmPgC8
https://www.youtube.com/watch?v=OEX2aUVFYNI
https://www.youtube.com/watch?v=i_suF4tWuj4
https://www.youtube.com/watch?v=Gmy9pDH26do
https://www.youtube.com/watch?v=ktXSBjJdd5Q
https://www.youtube.com/watch?v=p3kzKLus9yg
https://www.youtube.com/watch?v=tB2l4wTK4OE
https://www.youtube.com/watch?v=gm4XxHSJePc
https://www.youtube.com/watch?v=uy7z2ywGb8c
https://www.youtube.com/watch?v=OmfINsA961s
https://www.youtube.com/watch?v=8impJJlnKS8
https://www.youtube.com/watch?v=aKCzJoP2bsY
https://www.youtube.com/watch?v=Q0jNgwJDXYk
https://www.youtube.com/watch?v=ratCJH1TN9Y
https://www.youtube.com/watch?v=kpiCo2tDedQ
https://www.youtube.com/watch?v=Jxsj2VSYp_I
https://www.youtube.com/watch?v=FeS1TqWJLqE
https://www.youtube.com/watch?v=xJJnQWo50lA
https://www.youtube.com/watch?v=FMqeu-2OCC8
https://www.youtube.com/watch?v=wHaVTysBL9U
https://www.youtube.com/watch?v=lfmVNlorAV8
https://www.youtube.com/watch?v=mD1d0YLwbHQ
https://www.youtube.com/watch?v=BZHKlc3N_wA
https://www.youtube.com/watch?v=7X4vxF9V9PE
https://www.youtube.com/watch?v=s_ftU_N-KAc
https://www.youtube.com/watch?v=LMXj3C2JhdA
https://www.youtube.com/watch?v=iq6sC58oSMo
https://www.youtube.com/watch?v=ZV3e4CtYltc
https://www.youtube.com/watch?v=TBB6xBg7isY
https://www.youtube.com/watch?v=majq3tuDPlg
https://www.youtube.com/watch?v=A62-iVYtkvg
https://www.youtube.com/watch?v=oH-hzXI7RzE
https://www.youtube.com/watch?v=OqAu24YGNKM
https://www.youtube.com/watch?v=YcgFu0urTjo
https://www.youtube.com/watch?v=L_qDQ2WALdc
https://www.youtube.com/watch?v=76nZ2RSxxik
https://www.youtube.com/watch?v=s4mnCMUrMV0
https://www.youtube.com/watch?v=eAhZel9fdcE
https://www.youtube.com/watch?v=TXchNmKFu8I
https://www.youtube.com/watch?v=KqLLKx7jJxM
https://www.youtube.com/watch?v=wBkH4Sho9Uw
https://www.youtube.com/watch?v=3UeYut9Nm3E
https://www.youtube.com/watch?v=rNHZh5931hA
https://www.youtube.com/watch?v=fU6GFD3wNDs
https://www.youtube.com/watch?v=WAFiutRXPHU
https://www.youtube.com/watch?v=d9PoN3qbkUA
https://www.youtube.com/watch?v=jjbVZ6fPReI
https://www.youtube.com/watch?v=avHoMxrGh3c
https://www.youtube.com/watch?v=zxdhR5cBKYA
https://www.youtube.com/watch?v=XoK7nSXYmgQ
https://www.youtube.com/watch?v=ZhzwfaYrcvc
https://www.youtube.com/watch?v=-cHTdfy6CUI
https://www.youtube.com/watch?v=hJ6se5Ms3ko
https://www.youtube.com/watch?v=Zxfcj4uc0h4
https://www.youtube.com/watch?v=yTG5zrbbxmg
https://www.youtube.com/watch?v=EH8BsC2MKNY
https://www.youtube.com/watch?v=fGXTLaO7aPo
https://www.youtube.com/watch?v=p2jo-VXkzr4
https://www.youtube.com/watch?v=DN47veER2K0
https://www.youtube.com/watch?v=h3dMZC3V_mA
https://www.youtube.com/watch?v=4KBB_CxKN6M
https://www.youtube.com/watch?v=nVoSg1NfPrE
https://www.youtube.com/watch?v=GHzS1ogWdMI
https://www.youtube.com/watch?v=r6Q8GLUGWY4
https://www.youtube.com/watch?v=-t0U70j9DHY
https://www.youtube.com/watch?v=gHYAwsSXsNI
https://www.youtube.com/watch?v=XTGlxwURgJo
https://www.youtube.com/watch?v=Dj-Zrmh_a54
https://www.youtube.com/watch?v=GNrt-iNaKvQ
https://www.youtube.com/watch?v=vrvYFPHxVMg
https://www.youtube.com/watch?v=bdlZlk0wvvo
https://www.youtube.com/watch?v=qd789Zfq5iU
https://www.youtube.com/watch?v=G4h-B9lI_vA
https://www.youtube.com/watch?v=HtJIvuVRR_s
https://www.youtube.com/watch?v=eiB4V7hSqa4

View File

@ -1,237 +0,0 @@
https://www.youtube.com/watch?v=B_Ay09BH2qU
https://www.youtube.com/watch?v=jA-64XSM2s4
https://www.youtube.com/watch?v=IzPqkrsEPSc
https://www.youtube.com/watch?v=pHJvLpyb2tA
https://www.youtube.com/watch?v=S3t3wvksx9U
https://www.youtube.com/watch?v=YdmaCruUVDM
https://www.youtube.com/watch?v=zlnoM_Le0C4
https://www.youtube.com/watch?v=ppoa5SJDmA0
https://www.youtube.com/watch?v=fD9Jz4GuSY4
https://www.youtube.com/watch?v=DGzLoTmx6JQ
https://www.youtube.com/watch?v=dovFWM5KjLU
https://www.youtube.com/watch?v=ZvlXe9HEQXQ
https://www.youtube.com/watch?v=-69wodyyiVw
https://www.youtube.com/watch?v=ymd-9Mlusbg
https://www.youtube.com/watch?v=5NM8qn6Hz20
https://www.youtube.com/watch?v=aZLq0ODPkJs
https://www.youtube.com/watch?v=E8idIfNUTGA
https://www.youtube.com/watch?v=9tLuJxoySL0
https://www.youtube.com/watch?v=TAOo-sTxYWw
https://www.youtube.com/watch?v=6vqRtTHv2l8
https://www.youtube.com/watch?v=u2-XFiAeZ0M
https://www.youtube.com/watch?v=GSevUSqfbKM
https://www.youtube.com/watch?v=ZAx17rgrhM4
https://www.youtube.com/watch?v=beW0wPoKU08
https://www.youtube.com/watch?v=ffi0bNkc7iw
https://www.youtube.com/watch?v=YN_bIXecRzk
https://www.youtube.com/watch?v=f5XA4-NaHfk
https://www.youtube.com/watch?v=41L2f8-Gp1E
https://www.youtube.com/watch?v=cyN4abf_cUQ
https://www.youtube.com/watch?v=kZwS6uracK0
https://www.youtube.com/watch?v=ssJ2YAl-W60
https://www.youtube.com/watch?v=Zn_ngvMUp8s
https://www.youtube.com/watch?v=of80gd4-_rU
https://www.youtube.com/watch?v=scj5d81nEWY
https://www.youtube.com/watch?v=24SMoy1JqxU
https://www.youtube.com/watch?v=SsqrVhVWBtQ
https://www.youtube.com/watch?v=qRuuDA3Oy1k
https://www.youtube.com/watch?v=E-II-TTGm1s
https://www.youtube.com/watch?v=a3kKG0hEbE4
https://www.youtube.com/watch?v=UUi3KUyAJVw
https://www.youtube.com/watch?v=0f4Tl-y1SHY
https://www.youtube.com/watch?v=rNWD8g2gYlU
https://www.youtube.com/watch?v=E5KbVk6kFo8
https://www.youtube.com/watch?v=9EQPXEvgaT0
https://www.youtube.com/watch?v=VijmHIURpAg
https://www.youtube.com/watch?v=XZVeeC2MFps
https://www.youtube.com/watch?v=MU1izPlV7mE
https://www.youtube.com/watch?v=YzvEiKysxfI
https://www.youtube.com/watch?v=S-zswgmxRWk
https://www.youtube.com/watch?v=irR7K8QC5Mw
https://www.youtube.com/watch?v=8fboEbvBP4U
https://www.youtube.com/watch?v=Ehi60JJR6K8
https://www.youtube.com/watch?v=unQ37i1fI3E
https://www.youtube.com/watch?v=Sl1xZIVwQzE
https://www.youtube.com/watch?v=EsgjQP8kc-4
https://www.youtube.com/watch?v=-CS0ojb2VjA
https://www.youtube.com/watch?v=9_1qSUWAtzM
https://www.youtube.com/watch?v=H5_guRjO7qc
https://www.youtube.com/watch?v=GEi5YyVLB5M
https://www.youtube.com/watch?v=kMdZXT_6Jmo
https://www.youtube.com/watch?v=Kw1KahIAPkI
https://www.youtube.com/watch?v=sCat14cTzYA
https://www.youtube.com/watch?v=oQeBFLY3WL4
https://www.youtube.com/watch?v=G0wp8-Il2RY
https://www.youtube.com/watch?v=PWlydfB627s
https://www.youtube.com/watch?v=aDUtimJ1GL8
https://www.youtube.com/watch?v=vdaLx-wJ118
https://www.youtube.com/watch?v=SggLcqRWUcU
https://www.youtube.com/watch?v=X_jKmYUcbmE
https://www.youtube.com/watch?v=DBi96HRvEug
https://www.youtube.com/watch?v=W-RrbD170uM
https://www.youtube.com/watch?v=z3JNl4ABEMU
https://www.youtube.com/watch?v=0vQu4oSMdQI
https://www.youtube.com/watch?v=s1Z1731q5e0
https://www.youtube.com/watch?v=Et1ErNdiqXI
https://www.youtube.com/watch?v=b8HdEnMG9Mw
https://www.youtube.com/watch?v=4Jn35uhyAdU
https://www.youtube.com/watch?v=7g8nStM_XbM
https://www.youtube.com/watch?v=fNYdM_UWg_I
https://www.youtube.com/watch?v=EA2TyYTpMlQ
https://www.youtube.com/watch?v=-gqWADvlZ44
https://www.youtube.com/watch?v=pjsMnKiyMmI
https://www.youtube.com/watch?v=-B8qoqH0i1Y
https://www.youtube.com/watch?v=nE-vyXDpjYU
https://www.youtube.com/watch?v=0Ok2fGNfbDU
https://www.youtube.com/watch?v=IXee870AMAs
https://www.youtube.com/watch?v=ye9lx_rbQ8o
https://www.youtube.com/watch?v=Ku8aWQaLIBQ
https://www.youtube.com/watch?v=junMQDztHck
https://www.youtube.com/watch?v=xngdVZtDh7I
https://www.youtube.com/watch?v=DARiF_HgHts
https://www.youtube.com/watch?v=jw7-9lEo1kc
https://www.youtube.com/watch?v=ODPGJPoga1A
https://www.youtube.com/watch?v=ZBybfxJyQuE
https://www.youtube.com/watch?v=mrPeOtu_6cU
https://www.youtube.com/watch?v=E3fxsRem3rA
https://www.youtube.com/watch?v=5Oa8Uec_eBg
https://www.youtube.com/watch?v=pcJxFJcHkuo
https://www.youtube.com/watch?v=Zl4Gbaun1pA
https://www.youtube.com/watch?v=EhKgh4N1AXc
https://www.youtube.com/watch?v=urGuSLPunlU
https://www.youtube.com/watch?v=4o0i3UFDJBA
https://www.youtube.com/watch?v=JXX3NQKvpIg
https://www.youtube.com/watch?v=EEwrQrEtivk
https://www.youtube.com/watch?v=FSxmGJBvYbU
https://www.youtube.com/watch?v=_eCnHwhXaTI
https://www.youtube.com/watch?v=xGtKzO7r0GI
https://www.youtube.com/watch?v=U5S9E6KMNns
https://www.youtube.com/watch?v=zF0yuAUI1F0
https://www.youtube.com/watch?v=6lEE8tAk8YE
https://www.youtube.com/watch?v=dXrLLUqzlCM
https://www.youtube.com/watch?v=hOLrUvPyF68
https://www.youtube.com/watch?v=aFwCB5VOk_c
https://www.youtube.com/watch?v=nKCjiJ5MJ9s
https://www.youtube.com/watch?v=il_9MesqVEw
https://www.youtube.com/watch?v=DhHrSXSJ9sQ
https://www.youtube.com/watch?v=vRshEolL8eM
https://www.youtube.com/watch?v=_UOmXO1t0ms
https://www.youtube.com/watch?v=_Lq0LKMTsTc
https://www.youtube.com/watch?v=1rZgsDAohi8
https://www.youtube.com/watch?v=rZl1NAjtlr8
https://www.youtube.com/watch?v=KFWegI-YGBw
https://www.youtube.com/watch?v=Nhqny-t2BoA
https://www.youtube.com/watch?v=rHf1PBmve8U
https://www.youtube.com/watch?v=Qhm3rTNuu1c
https://www.youtube.com/watch?v=_mGDcyryvuQ
https://www.youtube.com/watch?v=qo0cNivWHwI
https://www.youtube.com/watch?v=KjoN6pDVw7c
https://www.youtube.com/watch?v=hYLAZNqx9Sc
https://www.youtube.com/watch?v=Y4GEzEh4BDY
https://www.youtube.com/watch?v=SFfDx-SSDzo
https://www.youtube.com/watch?v=vt6I-SUokgs
https://www.youtube.com/watch?v=4Eqz9U1oEpE
https://www.youtube.com/watch?v=iBdn0aG6SCY
https://www.youtube.com/watch?v=5YGOwYF5zlE
https://www.youtube.com/watch?v=iqdS0qfA1iw
https://www.youtube.com/watch?v=S6SvIe3Kxa0
https://www.youtube.com/watch?v=0JV0SuPtWwU
https://www.youtube.com/watch?v=UB-YooM-NIY
https://www.youtube.com/watch?v=4f7uOAxYQKk
https://www.youtube.com/watch?v=ODYcEncY9Z8
https://www.youtube.com/watch?v=z1gsZhSRs_A
https://www.youtube.com/watch?v=lQHEDa6vDhk
https://www.youtube.com/watch?v=Y33t3LEoTlM
https://www.youtube.com/watch?v=SOk9ROkKPrA
https://www.youtube.com/watch?v=lxPy60KW3VY
https://www.youtube.com/watch?v=reT95LPQCoM
https://www.youtube.com/watch?v=jmrqWtANVm0
https://www.youtube.com/watch?v=3TfciDvpMOU
https://www.youtube.com/watch?v=HW677VglUgs
https://www.youtube.com/watch?v=DtYkKDkGrqo
https://www.youtube.com/watch?v=128YFZf8DGo
https://www.youtube.com/watch?v=KujWR5rPJ1o
https://www.youtube.com/watch?v=RZ6g7zRVaOA
https://www.youtube.com/watch?v=3L_yf8TO1P0
https://www.youtube.com/watch?v=RJuY4t_58Y0
https://www.youtube.com/watch?v=DLTlIVuawAE
https://www.youtube.com/watch?v=QfF9mpVq_14
https://www.youtube.com/watch?v=OqGD8MNJKnI
https://www.youtube.com/watch?v=O4BqK1cylmQ
https://www.youtube.com/watch?v=vu2FttWQKMg
https://www.youtube.com/watch?v=Yh2nT6crCiE
https://www.youtube.com/watch?v=dKbRkBvtohg
https://www.youtube.com/watch?v=D0eDZjIwAmI
https://www.youtube.com/watch?v=AD37qE7t0ck
https://www.youtube.com/watch?v=l3UU8A8JEE8
https://www.youtube.com/watch?v=GshhMRTjwZs
https://www.youtube.com/watch?v=-R-0EKCgXNY
https://www.youtube.com/watch?v=WhXJJQzmlTQ
https://www.youtube.com/watch?v=x0Fx3YGbvrs
https://www.youtube.com/watch?v=P5T1dXkG7-I
https://www.youtube.com/watch?v=7VO6E6Nj75c
https://www.youtube.com/watch?v=CivVo4AbbVo
https://www.youtube.com/watch?v=MKrMnu22z9c
https://www.youtube.com/watch?v=2YgNc05_Z7E
https://www.youtube.com/watch?v=6HJi1cg-gBE
https://www.youtube.com/watch?v=felrJtLc3UY
https://www.youtube.com/watch?v=U8HIIF-W3zE
https://www.youtube.com/watch?v=EYwNAObexJk
https://www.youtube.com/watch?v=iq4lpHbGQ60
https://www.youtube.com/watch?v=ANpWkCGe6Zk
https://www.youtube.com/watch?v=a7W0t-Q5cFs
https://www.youtube.com/watch?v=9oDYxrEfVzM
https://www.youtube.com/watch?v=okbFZMnxoGQ
https://www.youtube.com/watch?v=Npb88SaLOPc
https://www.youtube.com/watch?v=S6u-py6UlX4
https://www.youtube.com/watch?v=FXmNHhFOUuM
https://www.youtube.com/watch?v=q9p7i0Jb4rg
https://www.youtube.com/watch?v=6ZaG2I7mXcM
https://www.youtube.com/watch?v=ojDYVBeK_d4
https://www.youtube.com/watch?v=HisWu1kZcTI
https://www.youtube.com/watch?v=WXm5T0AWE04
https://www.youtube.com/watch?v=6mI2rvHbCQE
https://www.youtube.com/watch?v=hEYARh2flvc
https://www.youtube.com/watch?v=MVTmbHpeQwg
https://www.youtube.com/watch?v=DN7Pp-qdKY8
https://www.youtube.com/watch?v=nGLB3uIhvdA
https://www.youtube.com/watch?v=VDq2whjVOQ8
https://www.youtube.com/watch?v=2uFJkQJHX7s
https://www.youtube.com/watch?v=fRJrdKVfA4E
https://www.youtube.com/watch?v=JXtbeBL7iog
https://www.youtube.com/watch?v=1HUZpi6Kx5g
https://www.youtube.com/watch?v=j-UII0WaS-w
https://www.youtube.com/watch?v=HMjlEN2YgLg
https://www.youtube.com/watch?v=9TqFfzAzbNE
https://www.youtube.com/watch?v=GZSslRRYJg8
https://www.youtube.com/watch?v=sR123A-THRs
https://www.youtube.com/watch?v=bt3X8MJgJWo
https://www.youtube.com/watch?v=NXEmtBe3R2c
https://www.youtube.com/watch?v=Aw7KY5ryvNo
https://www.youtube.com/watch?v=iIyfFJZuxSs
https://www.youtube.com/watch?v=JR2R1yTcUyk
https://www.youtube.com/watch?v=ID2HSLcAKmE
https://www.youtube.com/watch?v=sxuixFTH4Y8
https://www.youtube.com/watch?v=4veS9jm-utw
https://www.youtube.com/watch?v=-bNr6-8xHhE
https://www.youtube.com/watch?v=mLscN6cV89k
https://www.youtube.com/watch?v=cZlMf1khndo
https://www.youtube.com/watch?v=FXWA9qKxhXM
https://www.youtube.com/watch?v=Gen3Ng42Md4
https://www.youtube.com/watch?v=AqYhwYiXmvM
https://www.youtube.com/watch?v=rUnmd1j4mkM
https://www.youtube.com/watch?v=b3C6X_DUwbs
https://www.youtube.com/watch?v=I7xIcND2oBs
https://www.youtube.com/watch?v=grkcZZo9nXE
https://www.youtube.com/watch?v=rF3gnarthUE
https://www.youtube.com/watch?v=ft-hzWHU9ac
https://www.youtube.com/watch?v=w4tasjBq9pI
https://www.youtube.com/watch?v=JiCFWNxWwAs
https://www.youtube.com/watch?v=DpaZOzMXHnI
https://www.youtube.com/watch?v=2OwwoTVQ7Uc
https://www.youtube.com/watch?v=MdqKCXXXRBs
https://www.youtube.com/watch?v=bZgOyRXBqMM
https://www.youtube.com/watch?v=dmAteOuRBfQ
https://www.youtube.com/watch?v=h_e-al5r4gk
https://www.youtube.com/watch?v=pi3N_wQS2n0
https://www.youtube.com/watch?v=4SD-hsZ2Cso

View File

@ -1,235 +0,0 @@
https://www.youtube.com/watch?v=mtd2vwD7t5I
https://www.youtube.com/watch?v=0zj6do0S0rc
https://www.youtube.com/watch?v=sO_hQgkul_w
https://www.youtube.com/watch?v=p3nf7CuskI4
https://www.youtube.com/watch?v=Ck-RlFjl6UA
https://www.youtube.com/watch?v=GWg_lkXHg6A
https://www.youtube.com/watch?v=_I34dV-BhaA
https://www.youtube.com/watch?v=SWpRsRAAyXE
https://www.youtube.com/watch?v=uXZqwKp255E
https://www.youtube.com/watch?v=60Tt-BZ2QF4
https://www.youtube.com/watch?v=acBMw3OL_8k
https://www.youtube.com/watch?v=aZpKNjN-mnA
https://www.youtube.com/watch?v=eQ9LWl_NpxE
https://www.youtube.com/watch?v=QbVtkJI56x4
https://www.youtube.com/watch?v=0S4iduG2DoY
https://www.youtube.com/watch?v=5IAD-EFqgX4
https://www.youtube.com/watch?v=hVAanRQ6kmI
https://www.youtube.com/watch?v=GKMr408YsLI
https://www.youtube.com/watch?v=trrwQ2_mr10
https://www.youtube.com/watch?v=EN-mjnSO8rU
https://www.youtube.com/watch?v=ssujDv_dWCQ
https://www.youtube.com/watch?v=0gZCi6hZjqc
https://www.youtube.com/watch?v=ueATjovo5TM
https://www.youtube.com/watch?v=l5nzkdMPeTM
https://www.youtube.com/watch?v=ejxUiHoZqoo
https://www.youtube.com/watch?v=jK04XF1iK3g
https://www.youtube.com/watch?v=7M33poh57dg
https://www.youtube.com/watch?v=gwfCxfvMQuQ
https://www.youtube.com/watch?v=RxX_q9ra7XM
https://www.youtube.com/watch?v=5qFfbpA6hjg
https://www.youtube.com/watch?v=PbFKX8EZGAM
https://www.youtube.com/watch?v=Lc_SsKBQqMs
https://www.youtube.com/watch?v=unsrEUNXxGQ
https://www.youtube.com/watch?v=VKLI2iMY7v4
https://www.youtube.com/watch?v=ymgo_bReR9A
https://www.youtube.com/watch?v=DOruZP0mpNc
https://www.youtube.com/watch?v=TndhOZv6ejE
https://www.youtube.com/watch?v=Ex_5kLFxpHk
https://www.youtube.com/watch?v=5Oga5OYv-sU
https://www.youtube.com/watch?v=fzEcs1s1jNk
https://www.youtube.com/watch?v=kKjY7Pj4K3o
https://www.youtube.com/watch?v=-OTZxe3IZ0Y
https://www.youtube.com/watch?v=rnUxkSviq0w
https://www.youtube.com/watch?v=u9ob0AOX3DE
https://www.youtube.com/watch?v=HWplP9yPSrk
https://www.youtube.com/watch?v=bi4qdsDEQk4
https://www.youtube.com/watch?v=K-dvZrLMPFg
https://www.youtube.com/watch?v=05OF14NhSVQ
https://www.youtube.com/watch?v=56T4a3FoWxc
https://www.youtube.com/watch?v=dJr-KpDS86M
https://www.youtube.com/watch?v=d8lGr1e7rHc
https://www.youtube.com/watch?v=DoCiNSlYEuM
https://www.youtube.com/watch?v=pHhIGaECBIU
https://www.youtube.com/watch?v=jbNxu-lkamQ
https://www.youtube.com/watch?v=Mu3klSAn8_M
https://www.youtube.com/watch?v=GaqU6VbRfjA
https://www.youtube.com/watch?v=fq4Q90CjxFw
https://www.youtube.com/watch?v=oSXLaLC6zks
https://www.youtube.com/watch?v=Y8jW2jm6Rho
https://www.youtube.com/watch?v=uRAc3_UsXs8
https://www.youtube.com/watch?v=W1n5dvzGph0
https://www.youtube.com/watch?v=uWhiz9or3PA
https://www.youtube.com/watch?v=_A-3gkPVmc0
https://www.youtube.com/watch?v=huVwqWg2vM4
https://www.youtube.com/watch?v=_P6k6AdXts4
https://www.youtube.com/watch?v=OcMnLtgMHEY
https://www.youtube.com/watch?v=r87feQvLfLI
https://www.youtube.com/watch?v=39A9XqLgtsk
https://www.youtube.com/watch?v=f5D79JWJq20
https://www.youtube.com/watch?v=M6znxNv8TvI
https://www.youtube.com/watch?v=9rv04oK-PSo
https://www.youtube.com/watch?v=6KWq6lNjdQ8
https://www.youtube.com/watch?v=sZWHJvC847c
https://www.youtube.com/watch?v=Jzoc9dyaOZ8
https://www.youtube.com/watch?v=6253hY1D0oY
https://www.youtube.com/watch?v=TpRAHZun_wo
https://www.youtube.com/watch?v=EX8y8u2nTJQ
https://www.youtube.com/watch?v=ZEbobft1Awo
https://www.youtube.com/watch?v=SFe1RqTiAYc
https://www.youtube.com/watch?v=5KM9hc1V_qw
https://www.youtube.com/watch?v=G9B-P8msN74
https://www.youtube.com/watch?v=-3zkv6CKMfU
https://www.youtube.com/watch?v=Cckr3dhQOLI
https://www.youtube.com/watch?v=Gy2NB2ncaMU
https://www.youtube.com/watch?v=gN44tf6nx78
https://www.youtube.com/watch?v=ShdOISVUjHw
https://www.youtube.com/watch?v=Lm5IFC9ALfk
https://www.youtube.com/watch?v=_rZYSwKpLX0
https://www.youtube.com/watch?v=L92zNi0MNRM
https://www.youtube.com/watch?v=n0UoWuP9OiA
https://www.youtube.com/watch?v=vg335u2KZy4
https://www.youtube.com/watch?v=HlwRYcXoM-A
https://www.youtube.com/watch?v=DyaWG0Pff6w
https://www.youtube.com/watch?v=J4t265zWn04
https://www.youtube.com/watch?v=Z66kRbSH_uU
https://www.youtube.com/watch?v=oKuaW6z16EA
https://www.youtube.com/watch?v=vLN3Vy3BRDk
https://www.youtube.com/watch?v=pnDDrLoNjvE
https://www.youtube.com/watch?v=sfKcuh-4KXk
https://www.youtube.com/watch?v=MUV2tbTe-gk
https://www.youtube.com/watch?v=SeHGsjnTjGY
https://www.youtube.com/watch?v=4p1_h_-HEPs
https://www.youtube.com/watch?v=57yQ1qJhJe0
https://www.youtube.com/watch?v=17HNRtQyAGU
https://www.youtube.com/watch?v=C-bvICeWw_M
https://www.youtube.com/watch?v=kQ7hd-68au4
https://www.youtube.com/watch?v=s6o2AtE-kUI
https://www.youtube.com/watch?v=YoMMu1gBahs
https://www.youtube.com/watch?v=VNfm4RT431g
https://www.youtube.com/watch?v=rrYlQlJwkus
https://www.youtube.com/watch?v=oonEB-IPBaU
https://www.youtube.com/watch?v=Dcac2B3qvWg
https://www.youtube.com/watch?v=HZqKKzn2UFs
https://www.youtube.com/watch?v=AM-FGYVsoBc
https://www.youtube.com/watch?v=5OX7CLr4PLE
https://www.youtube.com/watch?v=HAWvUl5dyDk
https://www.youtube.com/watch?v=1eqqdP0K4Vg
https://www.youtube.com/watch?v=aDPG1-baku0
https://www.youtube.com/watch?v=smJio-3-LIM
https://www.youtube.com/watch?v=sBr1clm9xdI
https://www.youtube.com/watch?v=ULzPldjmoYM
https://www.youtube.com/watch?v=7tW7QBkGQrc
https://www.youtube.com/watch?v=FXmWRe2FQU8
https://www.youtube.com/watch?v=ZyJ6Mx_j8go
https://www.youtube.com/watch?v=8HeODJvg2cc
https://www.youtube.com/watch?v=0nQQpVvH01k
https://www.youtube.com/watch?v=7_Qw2VsZ_fE
https://www.youtube.com/watch?v=4rLiF8ROqgQ
https://www.youtube.com/watch?v=lSdrRidKmRI
https://www.youtube.com/watch?v=9BoyIFnQvlo
https://www.youtube.com/watch?v=YXxUEgD9g5E
https://www.youtube.com/watch?v=HOGUAI-kJFc
https://www.youtube.com/watch?v=67oQXgK7Vz8
https://www.youtube.com/watch?v=co8DCZR_0s4
https://www.youtube.com/watch?v=hb6j3L0RDaE
https://www.youtube.com/watch?v=6ZtkrtIPo4Q
https://www.youtube.com/watch?v=0LZmbG61eDY
https://www.youtube.com/watch?v=JOgjRV8K5lc
https://www.youtube.com/watch?v=LxYu4xXKfyw
https://www.youtube.com/watch?v=N8MJhB4L8v4
https://www.youtube.com/watch?v=n4bkV1s-PPY
https://www.youtube.com/watch?v=4Wa6XJEDYH4
https://www.youtube.com/watch?v=n4udGJAqe6c
https://www.youtube.com/watch?v=zFNTtZXUvhc
https://www.youtube.com/watch?v=FtvVf23ZDjY
https://www.youtube.com/watch?v=Aqv3MX-dHT4
https://www.youtube.com/watch?v=4uqg6MuwNc0
https://www.youtube.com/watch?v=f18SkV6hFvo
https://www.youtube.com/watch?v=ZAD5eCOWIS8
https://www.youtube.com/watch?v=0CUF13oxaVE
https://www.youtube.com/watch?v=yHiVYkb5eWo
https://www.youtube.com/watch?v=vyNo6dW7XrI
https://www.youtube.com/watch?v=EwMMdVKF22I
https://www.youtube.com/watch?v=6YpC-pGDRyQ
https://www.youtube.com/watch?v=y2NixaBN6xw
https://www.youtube.com/watch?v=TCyvJv6qv10
https://www.youtube.com/watch?v=ubsYhOEBmWI
https://www.youtube.com/watch?v=ul5Xai16hjE
https://www.youtube.com/watch?v=QKC8cerBW-s
https://www.youtube.com/watch?v=_v9sKsaDJ0s
https://www.youtube.com/watch?v=fJM0M7I4bmM
https://www.youtube.com/watch?v=dmeajYLmHOQ
https://www.youtube.com/watch?v=9DtM-zA1_0Y
https://www.youtube.com/watch?v=20NI7mGf0kE
https://www.youtube.com/watch?v=UfLuPlzcq1o
https://www.youtube.com/watch?v=7xT1b0AzI1Y
https://www.youtube.com/watch?v=SaiyvwevIno
https://www.youtube.com/watch?v=L8RX2_DVKqY
https://www.youtube.com/watch?v=K2VRYB9PSIY
https://www.youtube.com/watch?v=z1iBdDvIXKg
https://www.youtube.com/watch?v=TjRiRF8nXEo
https://www.youtube.com/watch?v=UKS0YrXoRdA
https://www.youtube.com/watch?v=did4sI6zq_A
https://www.youtube.com/watch?v=wf5-HRTKg-E
https://www.youtube.com/watch?v=nB6y1x4pbi4
https://www.youtube.com/watch?v=9JsoIuIxopQ
https://www.youtube.com/watch?v=DsuREQMVjNY
https://www.youtube.com/watch?v=z5GgUQak-us
https://www.youtube.com/watch?v=yiVAyD9ozCo
https://www.youtube.com/watch?v=roHXzgobmnw
https://www.youtube.com/watch?v=XaOHdTuxuPM
https://www.youtube.com/watch?v=PNlmaXsixvQ
https://www.youtube.com/watch?v=PCxRTU3rBTE
https://www.youtube.com/watch?v=e3BCGTYsDOs
https://www.youtube.com/watch?v=QRv90i58W_E
https://www.youtube.com/watch?v=33PovAKeH0E
https://www.youtube.com/watch?v=FlGvWltFCmA
https://www.youtube.com/watch?v=b-ffdH8Hg08
https://www.youtube.com/watch?v=Si0KHefiKaM
https://www.youtube.com/watch?v=2dgFiwdcorY
https://www.youtube.com/watch?v=cFVF-6pX9R0
https://www.youtube.com/watch?v=OUkHgcG2v2M
https://www.youtube.com/watch?v=gm9LqOfnZOI
https://www.youtube.com/watch?v=_UQO2LM1WPw
https://www.youtube.com/watch?v=LKwG21IEWUw
https://www.youtube.com/watch?v=S4hoDHN_wQY
https://www.youtube.com/watch?v=8MV8Yy5GImE
https://www.youtube.com/watch?v=oF2HTr_XgVM
https://www.youtube.com/watch?v=CH9JaEBW83Y
https://www.youtube.com/watch?v=pxjDuq3r4iI
https://www.youtube.com/watch?v=3dUJdTxxCiE
https://www.youtube.com/watch?v=HcfxxE__Xjc
https://www.youtube.com/watch?v=GqVZLE5yxtY
https://www.youtube.com/watch?v=FfrhxtShg5s
https://www.youtube.com/watch?v=j6_gXJ54OFA
https://www.youtube.com/watch?v=UZ-qB0EKdEM
https://www.youtube.com/watch?v=SOgvMA1G7_M
https://www.youtube.com/watch?v=S5abWlkirvU
https://www.youtube.com/watch?v=3MXfmTMJ_SI
https://www.youtube.com/watch?v=hT8KURdMMi4
https://www.youtube.com/watch?v=UoIr_9J12RY
https://www.youtube.com/watch?v=dPCNW1dFigg
https://www.youtube.com/watch?v=IWc4RArypJs
https://www.youtube.com/watch?v=ZghfaSSd3dQ
https://www.youtube.com/watch?v=H_YFW94w_FQ
https://www.youtube.com/watch?v=PJv9mk5hltM
https://www.youtube.com/watch?v=7XV_gsf8yow
https://www.youtube.com/watch?v=8NukH9vc2JI
https://www.youtube.com/watch?v=tlNi-QfnOTc
https://www.youtube.com/watch?v=fc77CExfyvA
https://www.youtube.com/watch?v=YDZPIg6K1X4
https://www.youtube.com/watch?v=BBrIcZ8VT4E
https://www.youtube.com/watch?v=yFGpm9oLs20
https://www.youtube.com/watch?v=G81ehUvbzrU
https://www.youtube.com/watch?v=NJARNmRC1NY
https://www.youtube.com/watch?v=LZNmPPirbFQ
https://www.youtube.com/watch?v=ZqZN7qNyQso
https://www.youtube.com/watch?v=X6bFHcU6u9w
https://www.youtube.com/watch?v=Ej4LIXsaLag
https://www.youtube.com/watch?v=7T0ntzJQAO8
https://www.youtube.com/watch?v=VwOk7Y7n_0k
https://www.youtube.com/watch?v=m5IERpTJLaw
https://www.youtube.com/watch?v=_irlrf9Qhl0
https://www.youtube.com/watch?v=sNeSgwgyjPA
https://www.youtube.com/watch?v=tVoFvkB2weE

View File

@ -1,236 +0,0 @@
https://www.youtube.com/watch?v=p4T-uoIF_6Y
https://www.youtube.com/watch?v=qNZRvUK_ogw
https://www.youtube.com/watch?v=0nHJXFiFtnk
https://www.youtube.com/watch?v=wSwPs-6WdfY
https://www.youtube.com/watch?v=ONWWw-hSbsg
https://www.youtube.com/watch?v=lxM_a7xvYGc
https://www.youtube.com/watch?v=ENz1DI-xv08
https://www.youtube.com/watch?v=VYOpiHVztCE
https://www.youtube.com/watch?v=Sj1mr1vZ1zg
https://www.youtube.com/watch?v=fNfY53BPSmg
https://www.youtube.com/watch?v=vuuQKCbAIjk
https://www.youtube.com/watch?v=z3M-xV_oem4
https://www.youtube.com/watch?v=pfOYokoGVb0
https://www.youtube.com/watch?v=HejkqCQe0D8
https://www.youtube.com/watch?v=7jQUzT2M8X0
https://www.youtube.com/watch?v=kAA_7GG1LQI
https://www.youtube.com/watch?v=yRXo1l6gRX4
https://www.youtube.com/watch?v=PULxfbCW1bM
https://www.youtube.com/watch?v=Ow1zVOUxQ3E
https://www.youtube.com/watch?v=09_ieUyQD8s
https://www.youtube.com/watch?v=delIGj2LDts
https://www.youtube.com/watch?v=ir4fhBd9PW8
https://www.youtube.com/watch?v=lQ9smpWZ_dQ
https://www.youtube.com/watch?v=9XkO3mye-1w
https://www.youtube.com/watch?v=KVzjmF3sFxs
https://www.youtube.com/watch?v=0VusddXJIsI
https://www.youtube.com/watch?v=6_ssgMPsgbI
https://www.youtube.com/watch?v=VbZw9HweTeo
https://www.youtube.com/watch?v=00SImiNjHyM
https://www.youtube.com/watch?v=dO6JKS6wbAA
https://www.youtube.com/watch?v=wpqMNn1IhDw
https://www.youtube.com/watch?v=sSv3ihsOxvM
https://www.youtube.com/watch?v=6mFcIil6_z0
https://www.youtube.com/watch?v=Sgx-2mDqSzc
https://www.youtube.com/watch?v=A8AB7TpQxEs
https://www.youtube.com/watch?v=KS7eqksUg9o
https://www.youtube.com/watch?v=QF343yGE3CY
https://www.youtube.com/watch?v=T7XOVJRUa-0
https://www.youtube.com/watch?v=-D0ZbmuNZR4
https://www.youtube.com/watch?v=SZ5uMY2BnjI
https://www.youtube.com/watch?v=Wz2OGaWFY0E
https://www.youtube.com/watch?v=Wz6ofYXGc88
https://www.youtube.com/watch?v=gbG5N1WxE3Q
https://www.youtube.com/watch?v=htET1lBbmBI
https://www.youtube.com/watch?v=rKTMv0Em8XY
https://www.youtube.com/watch?v=73tgi-8FxGI
https://www.youtube.com/watch?v=io8FePn3Z6A
https://www.youtube.com/watch?v=SptD8UmCrRM
https://www.youtube.com/watch?v=WvcDTzX0vr8
https://www.youtube.com/watch?v=-_5rhG_EjTg
https://www.youtube.com/watch?v=1UAwlk6sRZo
https://www.youtube.com/watch?v=DCQmVzOifcg
https://www.youtube.com/watch?v=3cT4Sw_Dkhg
https://www.youtube.com/watch?v=pfdcN2FcKug
https://www.youtube.com/watch?v=JHD5Jt9J41U
https://www.youtube.com/watch?v=4WjBo5p6eTw
https://www.youtube.com/watch?v=Hl6AB6oViQs
https://www.youtube.com/watch?v=5XNQo4CC9dM
https://www.youtube.com/watch?v=Fnr1v1wYy60
https://www.youtube.com/watch?v=2sKHlRpOMMo
https://www.youtube.com/watch?v=Ou6UEeUGIDA
https://www.youtube.com/watch?v=x6pX2rT-SqM
https://www.youtube.com/watch?v=dINr88UYHgc
https://www.youtube.com/watch?v=S3UOmaM-PpU
https://www.youtube.com/watch?v=8pXy8bh0Q0Q
https://www.youtube.com/watch?v=jhJFYfacwAc
https://www.youtube.com/watch?v=Hy67pOxlTgQ
https://www.youtube.com/watch?v=64-sAuJ8K7Y
https://www.youtube.com/watch?v=xaGWnjP7NVQ
https://www.youtube.com/watch?v=vYu3tkfaEcc
https://www.youtube.com/watch?v=6kO7vguhv2M
https://www.youtube.com/watch?v=rZjpxCSbu9w
https://www.youtube.com/watch?v=F4SZ4kNkX0c
https://www.youtube.com/watch?v=MeMR02kR3gE
https://www.youtube.com/watch?v=hm9vnD7gsCQ
https://www.youtube.com/watch?v=MaKkOahuC78
https://www.youtube.com/watch?v=mc1FhpTls-A
https://www.youtube.com/watch?v=7XOxmV4ddNI
https://www.youtube.com/watch?v=b2o7a0D1ALs
https://www.youtube.com/watch?v=_oSutnfDUf8
https://www.youtube.com/watch?v=taiQxc18xk0
https://www.youtube.com/watch?v=kI6lgu9OL9I
https://www.youtube.com/watch?v=UNE-Himotz0
https://www.youtube.com/watch?v=uD70OQniXTQ
https://www.youtube.com/watch?v=vs72Ng_jodU
https://www.youtube.com/watch?v=yanyMgdNfWU
https://www.youtube.com/watch?v=sGtTSEDc_w8
https://www.youtube.com/watch?v=p-tCzjrh52Q
https://www.youtube.com/watch?v=PMQJ3dHi-JQ
https://www.youtube.com/watch?v=zIt66_3zYfM
https://www.youtube.com/watch?v=8mAMPRNl7ZQ
https://www.youtube.com/watch?v=ktFVOeYnwJI
https://www.youtube.com/watch?v=sKfIsBA_k60
https://www.youtube.com/watch?v=y2axw12xLlc
https://www.youtube.com/watch?v=yDPEq8ObtXg
https://www.youtube.com/watch?v=tjJX5oB4EF0
https://www.youtube.com/watch?v=WERFogub0MY
https://www.youtube.com/watch?v=JfN1S9Cil1I
https://www.youtube.com/watch?v=OUY9hCLQ6s8
https://www.youtube.com/watch?v=kpg0Q0KEvyU
https://www.youtube.com/watch?v=AxgSz2gh6WE
https://www.youtube.com/watch?v=sKRDjWd4KCw
https://www.youtube.com/watch?v=mBl2wOUv5RA
https://www.youtube.com/watch?v=-i_xeg7jssk
https://www.youtube.com/watch?v=OG2kKOmey4Q
https://www.youtube.com/watch?v=mbHsmGm0F8c
https://www.youtube.com/watch?v=pn2D3Wd5Lc4
https://www.youtube.com/watch?v=8MFvpgA6ylk
https://www.youtube.com/watch?v=BKC7rXf54-k
https://www.youtube.com/watch?v=Q7Q5ygmO3mU
https://www.youtube.com/watch?v=WwbqQ3TPeok
https://www.youtube.com/watch?v=_1GYGOaMCes
https://www.youtube.com/watch?v=jCJ-nEx2ko0
https://www.youtube.com/watch?v=nxQ8UPOk4tc
https://www.youtube.com/watch?v=13m9TxErPRI
https://www.youtube.com/watch?v=IRcFyWNZeAA
https://www.youtube.com/watch?v=UL6kx-t_xM8
https://www.youtube.com/watch?v=9gtn2fmvRlA
https://www.youtube.com/watch?v=xJjpIjfEPoc
https://www.youtube.com/watch?v=-sXNKkOsMNs
https://www.youtube.com/watch?v=883w-T9wHBs
https://www.youtube.com/watch?v=a1EQBbKRfoc
https://www.youtube.com/watch?v=b7_7qpLvKpQ
https://www.youtube.com/watch?v=r2abEcxai08
https://www.youtube.com/watch?v=VEzH8V7Tt80
https://www.youtube.com/watch?v=-Zt0PiwL0Wo
https://www.youtube.com/watch?v=5L_hjw0y9WU
https://www.youtube.com/watch?v=oLpS-692p_Y
https://www.youtube.com/watch?v=c0IiK5jpg_I
https://www.youtube.com/watch?v=JZcpGbm4FX4
https://www.youtube.com/watch?v=RVvUTayv2L8
https://www.youtube.com/watch?v=mKx9CJ2B-Us
https://www.youtube.com/watch?v=6AWS08oFmO4
https://www.youtube.com/watch?v=6VNQ91IAE1U
https://www.youtube.com/watch?v=YdITVnpEOiA
https://www.youtube.com/watch?v=z67zFn_E94g
https://www.youtube.com/watch?v=NduD3p7MugA
https://www.youtube.com/watch?v=aMPTcld2R50
https://www.youtube.com/watch?v=4rsm7Xjv_Hw
https://www.youtube.com/watch?v=cKKi-s6xvGQ
https://www.youtube.com/watch?v=fa8yHVDha9A
https://www.youtube.com/watch?v=2ocd6oVbfSk
https://www.youtube.com/watch?v=xd6BSNRjS9s
https://www.youtube.com/watch?v=kXPkpDzTKUI
https://www.youtube.com/watch?v=TeTxAhmjZDc
https://www.youtube.com/watch?v=pIsznwhTFzA
https://www.youtube.com/watch?v=TX2duib2HvQ
https://www.youtube.com/watch?v=2lSOhcBa1cE
https://www.youtube.com/watch?v=aBHO9vEqeg0
https://www.youtube.com/watch?v=6kNnsU01z6s
https://www.youtube.com/watch?v=AgXkWyfudCY
https://www.youtube.com/watch?v=63LvptDiyn0
https://www.youtube.com/watch?v=mpls4RO2Sew
https://www.youtube.com/watch?v=FQQOy3gK0aM
https://www.youtube.com/watch?v=iElhQSrK_gQ
https://www.youtube.com/watch?v=06rJ3VU5XuY
https://www.youtube.com/watch?v=f1uAhp8G2iY
https://www.youtube.com/watch?v=rEolrC_dWCc
https://www.youtube.com/watch?v=XQBl29RFtRw
https://www.youtube.com/watch?v=gGmKewwq_G4
https://www.youtube.com/watch?v=Ktv_YSp1T48
https://www.youtube.com/watch?v=1s8kzflWSuI
https://www.youtube.com/watch?v=Yt7Gtls0Rpc
https://www.youtube.com/watch?v=0xHg1q2dybA
https://www.youtube.com/watch?v=D9l-LOBMYTA
https://www.youtube.com/watch?v=hw7fxmn8lJk
https://www.youtube.com/watch?v=uvvAIuFptXw
https://www.youtube.com/watch?v=ds2JXnt5xXs
https://www.youtube.com/watch?v=xkWirP51bFU
https://www.youtube.com/watch?v=-jkmgcbf2AQ
https://www.youtube.com/watch?v=mH8Qrii8EOQ
https://www.youtube.com/watch?v=367pqLOIXcM
https://www.youtube.com/watch?v=3jJDsO137Wc
https://www.youtube.com/watch?v=RDGRu99HgWQ
https://www.youtube.com/watch?v=l4W1EeLdpAg
https://www.youtube.com/watch?v=NjZVHcoxgjY
https://www.youtube.com/watch?v=5UncrLkE6rA
https://www.youtube.com/watch?v=01rfz1uMDP0
https://www.youtube.com/watch?v=RN6a6FNGbUI
https://www.youtube.com/watch?v=WZIYfMpDXCY
https://www.youtube.com/watch?v=mvrpON9NTk4
https://www.youtube.com/watch?v=ywM_kpE6x08
https://www.youtube.com/watch?v=ygPlWXqHsig
https://www.youtube.com/watch?v=mAcwJmv-lCc
https://www.youtube.com/watch?v=IPFFvQDYaf4
https://www.youtube.com/watch?v=zurAzTl_h38
https://www.youtube.com/watch?v=fiauZn74bak
https://www.youtube.com/watch?v=uuiFxk428WI
https://www.youtube.com/watch?v=HqayuXwuL3w
https://www.youtube.com/watch?v=E6beeQe7NQA
https://www.youtube.com/watch?v=tKH2QqmekxA
https://www.youtube.com/watch?v=z1T1ObFO_P0
https://www.youtube.com/watch?v=pk_wGZjE4ZM
https://www.youtube.com/watch?v=I8AFTCfTCNU
https://www.youtube.com/watch?v=ef7YgE16fko
https://www.youtube.com/watch?v=XX9gQ5ogYM0
https://www.youtube.com/watch?v=jfwJxQHBuYQ
https://www.youtube.com/watch?v=19Rv6AuPQ2g
https://www.youtube.com/watch?v=0tIXPDwJves
https://www.youtube.com/watch?v=3YPohsVqHU0
https://www.youtube.com/watch?v=mfJXdvV92jI
https://www.youtube.com/watch?v=mSemBOoh2Yo
https://www.youtube.com/watch?v=0l20ILvAwt4
https://www.youtube.com/watch?v=qEYFI_z0K3E
https://www.youtube.com/watch?v=280uzhCRR7s
https://www.youtube.com/watch?v=UNJ3S8ivbTQ
https://www.youtube.com/watch?v=FJm_yAT4yDc
https://www.youtube.com/watch?v=skpWa0r4bUI
https://www.youtube.com/watch?v=W8PJbBAcyV0
https://www.youtube.com/watch?v=TmJS6aj3-dw
https://www.youtube.com/watch?v=k_w_TzpeZxI
https://www.youtube.com/watch?v=K_F3yqv7CJE
https://www.youtube.com/watch?v=QXmlNXcMySE
https://www.youtube.com/watch?v=A1vAAOVmbP0
https://www.youtube.com/watch?v=PaBqH6VD55E
https://www.youtube.com/watch?v=ucAoopysUAI
https://www.youtube.com/watch?v=vjshLjye-qE
https://www.youtube.com/watch?v=XgQBNjTQ3Is
https://www.youtube.com/watch?v=pF7-2QwXHKc
https://www.youtube.com/watch?v=jctNS45TWpU
https://www.youtube.com/watch?v=PKorRRGUyHw
https://www.youtube.com/watch?v=2v8TzB8-LoI
https://www.youtube.com/watch?v=3VatPXxfjaM
https://www.youtube.com/watch?v=omeKGD5FdIA
https://www.youtube.com/watch?v=u4zV6mXmmKs
https://www.youtube.com/watch?v=YYee20a-QqI
https://www.youtube.com/watch?v=SZ9mdXqtwd8
https://www.youtube.com/watch?v=zKYRTjUywaY
https://www.youtube.com/watch?v=-9e4GCOOmp8
https://www.youtube.com/watch?v=6xFLANoml_Q
https://www.youtube.com/watch?v=feMq5y8UB3Q
https://www.youtube.com/watch?v=qZ6tlyBMUqE
https://www.youtube.com/watch?v=fnNqGTWMYBM
https://www.youtube.com/watch?v=Ry8Z5solATY
https://www.youtube.com/watch?v=fnbiyF0buBU
https://www.youtube.com/watch?v=9WXa1LNNe2o

View File

@ -1,239 +0,0 @@
https://www.youtube.com/watch?v=z8UZZZxFeUc
https://www.youtube.com/watch?v=JXaJXwwOJ4Y
https://www.youtube.com/watch?v=uIWXP5jcBv4
https://www.youtube.com/watch?v=bFyGBhVyiMo
https://www.youtube.com/watch?v=QTzobwv3Yw0
https://www.youtube.com/watch?v=8Jbm7SL7s74
https://www.youtube.com/watch?v=jdeZXWw3Nwo
https://www.youtube.com/watch?v=_KtVSsqSl4A
https://www.youtube.com/watch?v=j37CI3fhAwI
https://www.youtube.com/watch?v=IKoQFXPCVL4
https://www.youtube.com/watch?v=b136ue2BxBo
https://www.youtube.com/watch?v=_p2DNkef5RM
https://www.youtube.com/watch?v=2vo7ix3_jYQ
https://www.youtube.com/watch?v=FuuMOebTHGQ
https://www.youtube.com/watch?v=ZBc6NpA__FY
https://www.youtube.com/watch?v=nP4Vzfp4U4g
https://www.youtube.com/watch?v=xYW0e8Vjpb4
https://www.youtube.com/watch?v=YMEov5dJ3Ac
https://www.youtube.com/watch?v=2DYzzjwoqNM
https://www.youtube.com/watch?v=wO89RZgChCU
https://www.youtube.com/watch?v=hK4mdtUs9ZE
https://www.youtube.com/watch?v=QdlEzZKJFqc
https://www.youtube.com/watch?v=Fu0E9CLeHhU
https://www.youtube.com/watch?v=XJ_UDNWXYas
https://www.youtube.com/watch?v=2MYqr8atWKQ
https://www.youtube.com/watch?v=GjhNPl6S3cs
https://www.youtube.com/watch?v=GffJ3Yibndo
https://www.youtube.com/watch?v=SoIAPMCaDHI
https://www.youtube.com/watch?v=2IM9mIWhB0E
https://www.youtube.com/watch?v=qSS_73nlO1s
https://www.youtube.com/watch?v=S7RktiWIF5A
https://www.youtube.com/watch?v=eRGC_P3YV2s
https://www.youtube.com/watch?v=etnv3-g0aHk
https://www.youtube.com/watch?v=shW4hoJwg5c
https://www.youtube.com/watch?v=PokWotcGy8g
https://www.youtube.com/watch?v=xzvv4FnuSok
https://www.youtube.com/watch?v=bX5b98wVpoU
https://www.youtube.com/watch?v=51i8HuB2stg
https://www.youtube.com/watch?v=G2jFSnScYKs
https://www.youtube.com/watch?v=I6ImYrILAHU
https://www.youtube.com/watch?v=VbIR9XG6EFk
https://www.youtube.com/watch?v=1j__nRqiSmg
https://www.youtube.com/watch?v=TdGi6CgQuW8
https://www.youtube.com/watch?v=1gQ99t8InuA
https://www.youtube.com/watch?v=Y-8nqbbFUWQ
https://www.youtube.com/watch?v=P9aHdSdql94
https://www.youtube.com/watch?v=Omb0ipW_Ojo
https://www.youtube.com/watch?v=SKgjRyfg69Y
https://www.youtube.com/watch?v=h1iIlc-bdeM
https://www.youtube.com/watch?v=Bemw7-CrC-Q
https://www.youtube.com/watch?v=kc0VX29APG8
https://www.youtube.com/watch?v=bN7Si1lY_Oo
https://www.youtube.com/watch?v=2NrgIhlGMss
https://www.youtube.com/watch?v=9nQ_Qs8gilE
https://www.youtube.com/watch?v=sFvJOjgA4bA
https://www.youtube.com/watch?v=w1biOJ2oKQw
https://www.youtube.com/watch?v=zF5cN9P5_aU
https://www.youtube.com/watch?v=_cqBSZPGwfw
https://www.youtube.com/watch?v=2T7hTrXKQIM
https://www.youtube.com/watch?v=icaEyu5gfbI
https://www.youtube.com/watch?v=6tIMyrKyupM
https://www.youtube.com/watch?v=lM4tb6fQ_nU
https://www.youtube.com/watch?v=_go14KzQA8A
https://www.youtube.com/watch?v=QgQFc4DL_yg
https://www.youtube.com/watch?v=BixPQs8sCuc
https://www.youtube.com/watch?v=s3jUI9QawCQ
https://www.youtube.com/watch?v=h31v70v8Usw
https://www.youtube.com/watch?v=QKbHkqK1gnk
https://www.youtube.com/watch?v=zGOQhVS8q_I
https://www.youtube.com/watch?v=W-DwBR0wHD8
https://www.youtube.com/watch?v=en4Lom0HTVQ
https://www.youtube.com/watch?v=6-TM3WHpXk8
https://www.youtube.com/watch?v=l-a1tra_LJY
https://www.youtube.com/watch?v=xEheck5jDss
https://www.youtube.com/watch?v=pxOjDtsSaPo
https://www.youtube.com/watch?v=ESDlq4Uza68
https://www.youtube.com/watch?v=l3599LD9ot0
https://www.youtube.com/watch?v=jo1Gcx33xg8
https://www.youtube.com/watch?v=DcADJQWk9AE
https://www.youtube.com/watch?v=DV0mS4OiPv8
https://www.youtube.com/watch?v=Lk_A8heCZUI
https://www.youtube.com/watch?v=PxMAWsZMmSQ
https://www.youtube.com/watch?v=6lZiUc4LLA8
https://www.youtube.com/watch?v=Sa9-Is51Wn8
https://www.youtube.com/watch?v=1RHu2Vfw3v0
https://www.youtube.com/watch?v=aac2UFmgA-Y
https://www.youtube.com/watch?v=lrzdn6syTrM
https://www.youtube.com/watch?v=xUaZZ6Yr6KI
https://www.youtube.com/watch?v=Njm0h38ljqs
https://www.youtube.com/watch?v=npIwOKtj2yM
https://www.youtube.com/watch?v=sX45wetKJq4
https://www.youtube.com/watch?v=E1wRBaIFY3c
https://www.youtube.com/watch?v=akmjJAfy0xM
https://www.youtube.com/watch?v=kB62dvTOFhA
https://www.youtube.com/watch?v=LX3VH1I9Qg4
https://www.youtube.com/watch?v=wSod3-xDe90
https://www.youtube.com/watch?v=xFHN_xIN-eU
https://www.youtube.com/watch?v=LTtc10Iom6o
https://www.youtube.com/watch?v=-bKFpzTM-MA
https://www.youtube.com/watch?v=BjTO2n_c5eQ
https://www.youtube.com/watch?v=G1dNIRpqZJg
https://www.youtube.com/watch?v=xr_Wgs0BHY8
https://www.youtube.com/watch?v=zpvmWlkl74s
https://www.youtube.com/watch?v=-dNDoy2sA1c
https://www.youtube.com/watch?v=q55uhizppEk
https://www.youtube.com/watch?v=ms2xZYmhN7E
https://www.youtube.com/watch?v=I1mXIlzbTNQ
https://www.youtube.com/watch?v=OLgjdd4VYCU
https://www.youtube.com/watch?v=flBo_rqbRqc
https://www.youtube.com/watch?v=vgEke8PTzWo
https://www.youtube.com/watch?v=d9Npvs3YUEA
https://www.youtube.com/watch?v=nB8UexTjlts
https://www.youtube.com/watch?v=1YkCgyAttcY
https://www.youtube.com/watch?v=pNLXjgQbKVA
https://www.youtube.com/watch?v=UI9Ay3sP-Ic
https://www.youtube.com/watch?v=fAMtvFi6JBQ
https://www.youtube.com/watch?v=RSjVB0h5TD8
https://www.youtube.com/watch?v=6PZAVZ4EZVE
https://www.youtube.com/watch?v=1enPX90IDjU
https://www.youtube.com/watch?v=PRHYrtgdz70
https://www.youtube.com/watch?v=qkP5pjASL3o
https://www.youtube.com/watch?v=37zGf0w1Dug
https://www.youtube.com/watch?v=-2SoH9C0tbo
https://www.youtube.com/watch?v=P_1iWnR-tkA
https://www.youtube.com/watch?v=y4dQ6DTWURU
https://www.youtube.com/watch?v=4mPP8uWwxr8
https://www.youtube.com/watch?v=7q8aLzdUXd8
https://www.youtube.com/watch?v=CGoej1jmNbQ
https://www.youtube.com/watch?v=rHCqTxAEOOM
https://www.youtube.com/watch?v=LcGqzVfw85M
https://www.youtube.com/watch?v=iTxzkv8kEWI
https://www.youtube.com/watch?v=j1cRaYMlJmQ
https://www.youtube.com/watch?v=U_bg7CNrAZc
https://www.youtube.com/watch?v=rNUFnOO2rXs
https://www.youtube.com/watch?v=voi8bgF1Ijw
https://www.youtube.com/watch?v=3Xbyc8sbpCI
https://www.youtube.com/watch?v=W61OfSeoWBI
https://www.youtube.com/watch?v=RpcYEFBZMnY
https://www.youtube.com/watch?v=UdATWjLvZ10
https://www.youtube.com/watch?v=1sPiqba_w-c
https://www.youtube.com/watch?v=NGLASxoC4SA
https://www.youtube.com/watch?v=LqYJRx81E6w
https://www.youtube.com/watch?v=kVQqM2w8sm4
https://www.youtube.com/watch?v=rP4AjhcVIMQ
https://www.youtube.com/watch?v=Ptpl65Z_Q0M
https://www.youtube.com/watch?v=aVnv7Iy_QtE
https://www.youtube.com/watch?v=Kl7o4k6CfXw
https://www.youtube.com/watch?v=M6G1BAa8E9c
https://www.youtube.com/watch?v=fkNCVh4HIBA
https://www.youtube.com/watch?v=8gbRco5_2rg
https://www.youtube.com/watch?v=b2UIhsD2Cvg
https://www.youtube.com/watch?v=tqdQ6W-01FM
https://www.youtube.com/watch?v=Li4PMmYFjOg
https://www.youtube.com/watch?v=LM2_eKw2nw4
https://www.youtube.com/watch?v=k8Z3_QVcxQA
https://www.youtube.com/watch?v=Sfo6fW-QJys
https://www.youtube.com/watch?v=c06bh8Gjtk8
https://www.youtube.com/watch?v=mRx56SFQ8QA
https://www.youtube.com/watch?v=6QocZNp2Djo
https://www.youtube.com/watch?v=BbhrHJH4KRI
https://www.youtube.com/watch?v=lNCy7BO37hg
https://www.youtube.com/watch?v=ZF4t_BtIARA
https://www.youtube.com/watch?v=l3A4dHyW0Hk
https://www.youtube.com/watch?v=N5w-HfJdOIE
https://www.youtube.com/watch?v=vWttLvxA-08
https://www.youtube.com/watch?v=TEGB8sgm5S4
https://www.youtube.com/watch?v=dOB9cKSCXZo
https://www.youtube.com/watch?v=7fYHm-70kCs
https://www.youtube.com/watch?v=6do1PrNrhks
https://www.youtube.com/watch?v=lz2hNSk2Trc
https://www.youtube.com/watch?v=awCoKx8VNx0
https://www.youtube.com/watch?v=_IC0X1jsWQg
https://www.youtube.com/watch?v=BfWfOZ8-4Vk
https://www.youtube.com/watch?v=5JxfZDHexes
https://www.youtube.com/watch?v=QKv2dlecwE4
https://www.youtube.com/watch?v=bbFqfVHlOQs
https://www.youtube.com/watch?v=7FwB8-UtSTY
https://www.youtube.com/watch?v=Z6UZ2Est_Rk
https://www.youtube.com/watch?v=lsXrjvmhyGc
https://www.youtube.com/watch?v=yQmff118iG0
https://www.youtube.com/watch?v=5zZCVYX46oA
https://www.youtube.com/watch?v=hakUmmty_Ls
https://www.youtube.com/watch?v=gO7qwdewGL8
https://www.youtube.com/watch?v=N4bChET_zcE
https://www.youtube.com/watch?v=km06-0psWps
https://www.youtube.com/watch?v=vYG3_0iwY6Q
https://www.youtube.com/watch?v=NfW7ugCXq-E
https://www.youtube.com/watch?v=-N6YZYPjyG4
https://www.youtube.com/watch?v=MTqLhhYDQsg
https://www.youtube.com/watch?v=iNnrMwskCt0
https://www.youtube.com/watch?v=cy1hAIofwTA
https://www.youtube.com/watch?v=ssPwz0YCqsA
https://www.youtube.com/watch?v=nPG8_5O1urE
https://www.youtube.com/watch?v=M6sy9m93Kcc
https://www.youtube.com/watch?v=69m-cyniT0Y
https://www.youtube.com/watch?v=JPy5s_vVTFs
https://www.youtube.com/watch?v=KfrQlDAog3Q
https://www.youtube.com/watch?v=x-7MZ-iGoQk
https://www.youtube.com/watch?v=H7IlCQnvU0I
https://www.youtube.com/watch?v=Yy19s2yBsVU
https://www.youtube.com/watch?v=2G5XeSGDPyc
https://www.youtube.com/watch?v=B1oYCHvADZY
https://www.youtube.com/watch?v=fSH7eaRQuJU
https://www.youtube.com/watch?v=95rnBvsZbZU
https://www.youtube.com/watch?v=pKRvuwn9GfI
https://www.youtube.com/watch?v=il0j0nQnK20
https://www.youtube.com/watch?v=x3aishDTjeI
https://www.youtube.com/watch?v=1oGsN1mVgsM
https://www.youtube.com/watch?v=mBVYIXRAkHM
https://www.youtube.com/watch?v=6Z51tXq54zc
https://www.youtube.com/watch?v=-_gyejedS14
https://www.youtube.com/watch?v=FF5ttCY898w
https://www.youtube.com/watch?v=xCGaVzXoBvo
https://www.youtube.com/watch?v=qt7lOLl8GAc
https://www.youtube.com/watch?v=WrjgF6hjfxY
https://www.youtube.com/watch?v=7AyxprXTsIg
https://www.youtube.com/watch?v=SyR5yRwWtNo
https://www.youtube.com/watch?v=-WIOayHCDPo
https://www.youtube.com/watch?v=y0JtoZERkhQ
https://www.youtube.com/watch?v=lqdbxTKrTS8
https://www.youtube.com/watch?v=KUuJH5WSN_c
https://www.youtube.com/watch?v=vGUHmGWFgiA
https://www.youtube.com/watch?v=zDuvYbU6giQ
https://www.youtube.com/watch?v=D-KvhXV9qdM
https://www.youtube.com/watch?v=-NAcOMzY2qk
https://www.youtube.com/watch?v=Dysjq3qtI1c
https://www.youtube.com/watch?v=U-zJ0YVOeac
https://www.youtube.com/watch?v=50F6NBek_uE
https://www.youtube.com/watch?v=yNpIqZkaGXs
https://www.youtube.com/watch?v=CrbeAD8S8hU
https://www.youtube.com/watch?v=b4dHqUw9s98
https://www.youtube.com/watch?v=RxRwInWoNCA
https://www.youtube.com/watch?v=QO8aek7VgOw
https://www.youtube.com/watch?v=Yk1y0z0Lz-Q
https://www.youtube.com/watch?v=fndst7rrz90
https://www.youtube.com/watch?v=_sROrMglc7s
https://www.youtube.com/watch?v=NT8WIkntDBQ
https://www.youtube.com/watch?v=8xO0nrxYhtU
https://www.youtube.com/watch?v=VZJSRmgH7Ww

View File

@ -1,241 +0,0 @@
https://www.youtube.com/watch?v=nsejs-Vj6VA
https://www.youtube.com/watch?v=_cRazf-kuh0
https://www.youtube.com/watch?v=p_JrAn10AsE
https://www.youtube.com/watch?v=8fDFFLoY08s
https://www.youtube.com/watch?v=TKfE2xHLcJU
https://www.youtube.com/watch?v=zY1UHAhvZNY
https://www.youtube.com/watch?v=eJZKJUvWPV0
https://www.youtube.com/watch?v=0REjPqvjFwc
https://www.youtube.com/watch?v=oMVoj6hkt7Q
https://www.youtube.com/watch?v=gog3F-MRaBA
https://www.youtube.com/watch?v=qdDWjoy4g4o
https://www.youtube.com/watch?v=fzVtPI0WTgQ
https://www.youtube.com/watch?v=dceFJd55ZRs
https://www.youtube.com/watch?v=fAUydLncMlQ
https://www.youtube.com/watch?v=6hcoparO0AY
https://www.youtube.com/watch?v=R-OXwsnZkhg
https://www.youtube.com/watch?v=HMyZGhCNWEs
https://www.youtube.com/watch?v=sATiSokDMsE
https://www.youtube.com/watch?v=XrghP79kpjU
https://www.youtube.com/watch?v=Bgo5eBUIG70
https://www.youtube.com/watch?v=iCA8zfIS57s
https://www.youtube.com/watch?v=H1TZCRXBus4
https://www.youtube.com/watch?v=quMnDQBXzA8
https://www.youtube.com/watch?v=dbkiHJyEWEU
https://www.youtube.com/watch?v=NBp4QUN77eg
https://www.youtube.com/watch?v=HZyOQzDS678
https://www.youtube.com/watch?v=teOOvPRSuc8
https://www.youtube.com/watch?v=rC9-woyHDKw
https://www.youtube.com/watch?v=CzDZv3upREs
https://www.youtube.com/watch?v=xkz6y4-I538
https://www.youtube.com/watch?v=hP6-1XpmvW4
https://www.youtube.com/watch?v=nHOMo4L4j_Y
https://www.youtube.com/watch?v=TH2UrUpWakY
https://www.youtube.com/watch?v=Qs1mQA4TRbg
https://www.youtube.com/watch?v=jn-N9NEZybs
https://www.youtube.com/watch?v=3BZsAQC6Prw
https://www.youtube.com/watch?v=i90tbQDY3eo
https://www.youtube.com/watch?v=vmyi7L6xDdU
https://www.youtube.com/watch?v=Yd7P73qLD3c
https://www.youtube.com/watch?v=zOUsPwjhph8
https://www.youtube.com/watch?v=37yUKFtz_BI
https://www.youtube.com/watch?v=WU8iL0D2nY4
https://www.youtube.com/watch?v=MTrIygjBiWk
https://www.youtube.com/watch?v=pNp85sYhcEs
https://www.youtube.com/watch?v=M72QlvVS39I
https://www.youtube.com/watch?v=xQy30Iikl9g
https://www.youtube.com/watch?v=TQq6CE-_zF4
https://www.youtube.com/watch?v=bStpz3_WPuU
https://www.youtube.com/watch?v=lC51HTZjQeY
https://www.youtube.com/watch?v=_gljvpWvjdY
https://www.youtube.com/watch?v=KZUFuALvKSc
https://www.youtube.com/watch?v=-Kr4Ft2fCDc
https://www.youtube.com/watch?v=KcwYIQ8AAOs
https://www.youtube.com/watch?v=c5hjPFgf1DU
https://www.youtube.com/watch?v=w7aabeoAIns
https://www.youtube.com/watch?v=rb5NKtj2fn0
https://www.youtube.com/watch?v=aJFnftIgpVg
https://www.youtube.com/watch?v=0poCn5r1wDc
https://www.youtube.com/watch?v=ygfFlVvBu4o
https://www.youtube.com/watch?v=MSI6bKsLH0s
https://www.youtube.com/watch?v=HbJd4DGdRXo
https://www.youtube.com/watch?v=0u2UXvR4yVI
https://www.youtube.com/watch?v=0TIqoSR1Mfk
https://www.youtube.com/watch?v=nlYkvu2-8_8
https://www.youtube.com/watch?v=y7uOkbMqEjI
https://www.youtube.com/watch?v=V5UrsV1PU3Y
https://www.youtube.com/watch?v=kmw1YaOkfjE
https://www.youtube.com/watch?v=dOuyoV-9aHQ
https://www.youtube.com/watch?v=E0NbvW2TFLg
https://www.youtube.com/watch?v=q-jUCA5Rm28
https://www.youtube.com/watch?v=YrueIOPs5L0
https://www.youtube.com/watch?v=IHkP6Syj82s
https://www.youtube.com/watch?v=iIFl1qX7CoY
https://www.youtube.com/watch?v=0cKhemqjgQ8
https://www.youtube.com/watch?v=dcHvwaI5bAg
https://www.youtube.com/watch?v=Pezn6Ru5fi0
https://www.youtube.com/watch?v=Wf_BnBmT5_E
https://www.youtube.com/watch?v=34Uc06b-yQ4
https://www.youtube.com/watch?v=lnbpEfRQwyc
https://www.youtube.com/watch?v=Ilh90vut7jo
https://www.youtube.com/watch?v=CHc1Bu8A9QM
https://www.youtube.com/watch?v=73Ie5z5aBw8
https://www.youtube.com/watch?v=k8PJC1YTelY
https://www.youtube.com/watch?v=iGZ-i6c2OGU
https://www.youtube.com/watch?v=Tj96vlZBPUs
https://www.youtube.com/watch?v=KDE2s9B1eDo
https://www.youtube.com/watch?v=d2E2x-BSKO0
https://www.youtube.com/watch?v=Vvnq-f43v20
https://www.youtube.com/watch?v=OP0TPyBjq7c
https://www.youtube.com/watch?v=sjZ-dbd7FQ0
https://www.youtube.com/watch?v=Z1Z6KPzy0p4
https://www.youtube.com/watch?v=vg0Ko4LnYh4
https://www.youtube.com/watch?v=f0djA_P2rQ8
https://www.youtube.com/watch?v=cC3q0aHIYCU
https://www.youtube.com/watch?v=LHcJTxccNXE
https://www.youtube.com/watch?v=JA7VdwocB6I
https://www.youtube.com/watch?v=Nhmlm15C4eA
https://www.youtube.com/watch?v=rY_suqSxWV8
https://www.youtube.com/watch?v=pI29BFeSmuI
https://www.youtube.com/watch?v=KOgfzQxYIsI
https://www.youtube.com/watch?v=0D5Tc8Op_n4
https://www.youtube.com/watch?v=CxcEWMHNePE
https://www.youtube.com/watch?v=obijefFzq7A
https://www.youtube.com/watch?v=WU5ULpORl3A
https://www.youtube.com/watch?v=IHZwGNq5tb4
https://www.youtube.com/watch?v=EKFqRJUEjEA
https://www.youtube.com/watch?v=I4eb7P9atas
https://www.youtube.com/watch?v=lLxGNWraT4s
https://www.youtube.com/watch?v=zgr98z06KBA
https://www.youtube.com/watch?v=125ox0n1NeQ
https://www.youtube.com/watch?v=fH22LI57SHY
https://www.youtube.com/watch?v=sGTrqDst1Dc
https://www.youtube.com/watch?v=OBg_Li5ErIU
https://www.youtube.com/watch?v=sQIt0HHZ8fw
https://www.youtube.com/watch?v=M6zNwBAYKSM
https://www.youtube.com/watch?v=Qspv6ZH29FA
https://www.youtube.com/watch?v=WuHPgV2Yxbs
https://www.youtube.com/watch?v=hu8khi3clIY
https://www.youtube.com/watch?v=ds5tz7yWBCk
https://www.youtube.com/watch?v=A7xqGo4oeec
https://www.youtube.com/watch?v=FFJDWDhFybQ
https://www.youtube.com/watch?v=YPHA89-RDqg
https://www.youtube.com/watch?v=bUvul182gWs
https://www.youtube.com/watch?v=3gyX7S8dwq0
https://www.youtube.com/watch?v=4wmE8_0odtY
https://www.youtube.com/watch?v=FpTyGD4M-Cs
https://www.youtube.com/watch?v=NinU97Irqi8
https://www.youtube.com/watch?v=m3YF4PTVOWI
https://www.youtube.com/watch?v=IgytzDONMS8
https://www.youtube.com/watch?v=ccOx041__PE
https://www.youtube.com/watch?v=Go_H4eFwDXk
https://www.youtube.com/watch?v=nw2U2V7o_cg
https://www.youtube.com/watch?v=idtYG-SAps4
https://www.youtube.com/watch?v=O8tSaYoR_40
https://www.youtube.com/watch?v=wFeSbJySSfA
https://www.youtube.com/watch?v=BaToTsaDkl4
https://www.youtube.com/watch?v=Qnytk6loeJQ
https://www.youtube.com/watch?v=vr_mAzDD_fA
https://www.youtube.com/watch?v=Pc_EupD2jSA
https://www.youtube.com/watch?v=hku9k2Xvl6E
https://www.youtube.com/watch?v=tovKGVUwfcA
https://www.youtube.com/watch?v=uVW458DyeOM
https://www.youtube.com/watch?v=0Yijhh3Krb4
https://www.youtube.com/watch?v=jLywttFVo54
https://www.youtube.com/watch?v=LNI382U5WH0
https://www.youtube.com/watch?v=4iGXG127GyE
https://www.youtube.com/watch?v=jEec64BueAw
https://www.youtube.com/watch?v=JQT8tGBCX3U
https://www.youtube.com/watch?v=gFqUAiDSLKc
https://www.youtube.com/watch?v=yO-hte4ubig
https://www.youtube.com/watch?v=OaCaeatGiHg
https://www.youtube.com/watch?v=9lKTASGr6aI
https://www.youtube.com/watch?v=_868vk4OPPA
https://www.youtube.com/watch?v=Z97QM0Yx6uk
https://www.youtube.com/watch?v=jVLhMwjkbf0
https://www.youtube.com/watch?v=4oascqjiSJw
https://www.youtube.com/watch?v=Unf3oPkNIzw
https://www.youtube.com/watch?v=-G_-x2tOFCA
https://www.youtube.com/watch?v=pbc5kBSATt0
https://www.youtube.com/watch?v=1MEbuepe0-I
https://www.youtube.com/watch?v=jWqjpi7hcSQ
https://www.youtube.com/watch?v=WWz-VmWIizU
https://www.youtube.com/watch?v=osNDWp5Lxs4
https://www.youtube.com/watch?v=GRr_a8MjmMc
https://www.youtube.com/watch?v=sBgB3iMc0fU
https://www.youtube.com/watch?v=h7SGCGAwSY0
https://www.youtube.com/watch?v=AxUVWd86vLA
https://www.youtube.com/watch?v=Wc_G3GAFyqA
https://www.youtube.com/watch?v=Cxj3ZRJ7NLY
https://www.youtube.com/watch?v=D_jP3ib6XQE
https://www.youtube.com/watch?v=ouFJwOQrPuM
https://www.youtube.com/watch?v=_xpQyzECcCY
https://www.youtube.com/watch?v=Z7k81eWYy_Y
https://www.youtube.com/watch?v=zj4X_Fyxc2o
https://www.youtube.com/watch?v=P30NDHxamLw
https://www.youtube.com/watch?v=VEa0RiU5aeU
https://www.youtube.com/watch?v=4QaomQeGxoo
https://www.youtube.com/watch?v=FSc6ZUeihNw
https://www.youtube.com/watch?v=b4Rc4ReDb0E
https://www.youtube.com/watch?v=Lpe9IxDT7Mg
https://www.youtube.com/watch?v=GOi9s_z1F1I
https://www.youtube.com/watch?v=Uo64UC9rTcE
https://www.youtube.com/watch?v=ddHwQHOtKZc
https://www.youtube.com/watch?v=S-L86KKvXCQ
https://www.youtube.com/watch?v=JjX-TRYRu4c
https://www.youtube.com/watch?v=2rrPI0swTN4
https://www.youtube.com/watch?v=6cqdOXZ-p1k
https://www.youtube.com/watch?v=WgA9Mp5g3Y4
https://www.youtube.com/watch?v=U0HUrZYx6ac
https://www.youtube.com/watch?v=IBcTbZEV4Fo
https://www.youtube.com/watch?v=AG8OpUezp7c
https://www.youtube.com/watch?v=ItFoZ46tqxQ
https://www.youtube.com/watch?v=GaxaPI3Gu28
https://www.youtube.com/watch?v=lKT2fxbijRY
https://www.youtube.com/watch?v=cVA0grbN7hQ
https://www.youtube.com/watch?v=fHe2-hRyTmY
https://www.youtube.com/watch?v=U8rSmAOKTcQ
https://www.youtube.com/watch?v=aNnFRhE4j9Y
https://www.youtube.com/watch?v=sVnFzhkmtHk
https://www.youtube.com/watch?v=S3iZ3Tc-Hnc
https://www.youtube.com/watch?v=2GtgoycXGvY
https://www.youtube.com/watch?v=oWK6Udr2Nh4
https://www.youtube.com/watch?v=kF5t6C41LEk
https://www.youtube.com/watch?v=s9Zp4qz4Vu0
https://www.youtube.com/watch?v=9EOzw3C1BrM
https://www.youtube.com/watch?v=go17ulKugwQ
https://www.youtube.com/watch?v=f0XhiUgMPfA
https://www.youtube.com/watch?v=F73sFEJferQ
https://www.youtube.com/watch?v=OHI6BTcdMBw
https://www.youtube.com/watch?v=AichQ8Wk4ac
https://www.youtube.com/watch?v=CFx6yBhkciU
https://www.youtube.com/watch?v=VjLtkPPksUY
https://www.youtube.com/watch?v=ft07E4HZJ7Q
https://www.youtube.com/watch?v=mcF-bO_Odq4
https://www.youtube.com/watch?v=64y2g7ahof4
https://www.youtube.com/watch?v=aDsKWg_YM7s
https://www.youtube.com/watch?v=0fRpmAYWgJE
https://www.youtube.com/watch?v=Ddp9uGyjtt4
https://www.youtube.com/watch?v=dx-9vznjBSg
https://www.youtube.com/watch?v=mQ7b1Q_KJfU
https://www.youtube.com/watch?v=0tJR95pgM3o
https://www.youtube.com/watch?v=iY3MxLzEQKI
https://www.youtube.com/watch?v=sSXyzOrkdfs
https://www.youtube.com/watch?v=s4RJntKPU1I
https://www.youtube.com/watch?v=zgqUFmzI54E
https://www.youtube.com/watch?v=woiJ0Pbo1b0
https://www.youtube.com/watch?v=KgT9XrH94Nk
https://www.youtube.com/watch?v=oNxu5lt2efw
https://www.youtube.com/watch?v=E2l9L0m1EUA
https://www.youtube.com/watch?v=ZhJE6t9PlZM
https://www.youtube.com/watch?v=qQD13r8kMOQ
https://www.youtube.com/watch?v=oKe6LVmuan4
https://www.youtube.com/watch?v=XJtLW-hPD7w
https://www.youtube.com/watch?v=q3vqX_pVTZs
https://www.youtube.com/watch?v=GjmR9wnd_Dw
https://www.youtube.com/watch?v=bhKcw0w8fNo
https://www.youtube.com/watch?v=2-p0IXWzGrc
https://www.youtube.com/watch?v=IQRoXoiJKQ4
https://www.youtube.com/watch?v=gJl8gVq6Gpo
https://www.youtube.com/watch?v=oXAszTqnnYk
https://www.youtube.com/watch?v=HfihwpdJjBU

View File

@ -1,230 +0,0 @@
https://www.youtube.com/watch?v=r6dI7depuDc
https://www.youtube.com/watch?v=9Us8X93IpHI
https://www.youtube.com/watch?v=eP9ivGVw7kI
https://www.youtube.com/watch?v=A0deapksgiA
https://www.youtube.com/watch?v=ZLpoV5OTtZk
https://www.youtube.com/watch?v=k4jWWF4BetA
https://www.youtube.com/watch?v=35KOQ4uRIRs
https://www.youtube.com/watch?v=gJ7ROX5Y8hc
https://www.youtube.com/watch?v=tHbkzGWUZog
https://www.youtube.com/watch?v=WWd3g6tHso8
https://www.youtube.com/watch?v=0663OUVIskw
https://www.youtube.com/watch?v=oy-JVwHPaUI
https://www.youtube.com/watch?v=y6G8S8lOUrA
https://www.youtube.com/watch?v=W78no98uQ-U
https://www.youtube.com/watch?v=BA2PP-MGGwE
https://www.youtube.com/watch?v=tTDpFJ1uuPg
https://www.youtube.com/watch?v=0BfC_7PcJB0
https://www.youtube.com/watch?v=0h02m6TUzf8
https://www.youtube.com/watch?v=siHWunTXQ0A
https://www.youtube.com/watch?v=BYC1R04F2Hk
https://www.youtube.com/watch?v=7w-ozZaEyP8
https://www.youtube.com/watch?v=o7dKrpL7BpM
https://www.youtube.com/watch?v=IQyYk7FFCbw
https://www.youtube.com/watch?v=f_Gqgl6cFr0
https://www.youtube.com/watch?v=G3QblS_YOms
https://www.youtube.com/watch?v=DoLGVksNpdM
https://www.youtube.com/watch?v=TT4yzCS4PeY
https://www.youtube.com/watch?v=RIGkw6nxLcc
https://www.youtube.com/watch?v=cWJgvi3UelE
https://www.youtube.com/watch?v=crtRlS9I3ro
https://www.youtube.com/watch?v=6dvBUPqj59c
https://www.youtube.com/watch?v=IfI6GnYGM0I
https://www.youtube.com/watch?v=QnLmgFuVg6w
https://www.youtube.com/watch?v=reouaReX7C4
https://www.youtube.com/watch?v=s1pLvWNsh44
https://www.youtube.com/watch?v=smmhlt3_lws
https://www.youtube.com/watch?v=QgTKDo-NbU8
https://www.youtube.com/watch?v=rOvksymiKqs
https://www.youtube.com/watch?v=l1rVxcAzh1M
https://www.youtube.com/watch?v=_fFuQXF-t3k
https://www.youtube.com/watch?v=BH7R_yuIFKQ
https://www.youtube.com/watch?v=-s89hDTLSjs
https://www.youtube.com/watch?v=4IMfpToOWSo
https://www.youtube.com/watch?v=B0oMlBTEh1U
https://www.youtube.com/watch?v=ylNM15gPhsk
https://www.youtube.com/watch?v=M8ONaAxZ3MU
https://www.youtube.com/watch?v=V51CE9jO4O0
https://www.youtube.com/watch?v=R1Lo5BZtlQw
https://www.youtube.com/watch?v=uqceJK_Iao4
https://www.youtube.com/watch?v=XeSUzWSVk9o
https://www.youtube.com/watch?v=bhenb5eG--I
https://www.youtube.com/watch?v=dpTjA8MrLhI
https://www.youtube.com/watch?v=G2yTkeufjds
https://www.youtube.com/watch?v=o-R_DGU9hpM
https://www.youtube.com/watch?v=L2KUBIDfscI
https://www.youtube.com/watch?v=uLr35eZdFTA
https://www.youtube.com/watch?v=2zIvsCI88hI
https://www.youtube.com/watch?v=zOsscfwC0Mg
https://www.youtube.com/watch?v=c2kh0n2dD7Q
https://www.youtube.com/watch?v=U2nd48Sl3T4
https://www.youtube.com/watch?v=WD99krTKdF4
https://www.youtube.com/watch?v=gfcFr3jruGg
https://www.youtube.com/watch?v=fsnTn1XqBic
https://www.youtube.com/watch?v=SwyLoyzuKm0
https://www.youtube.com/watch?v=pDBToCyX1JE
https://www.youtube.com/watch?v=JNnDcvOlWJc
https://www.youtube.com/watch?v=7FVXJ9nSJhw
https://www.youtube.com/watch?v=j_ndVX1nTWo
https://www.youtube.com/watch?v=twN8USkFwaI
https://www.youtube.com/watch?v=N4P7vW2JA80
https://www.youtube.com/watch?v=YzOTPqp0DQM
https://www.youtube.com/watch?v=aMpYQZTwIi8
https://www.youtube.com/watch?v=H_lzWZl-VEk
https://www.youtube.com/watch?v=OXVeAoTdjHA
https://www.youtube.com/watch?v=17Oin61Mqc4
https://www.youtube.com/watch?v=Cd0ciobBDcM
https://www.youtube.com/watch?v=rgU_5zaM0GU
https://www.youtube.com/watch?v=ll9LATC7_Ec
https://www.youtube.com/watch?v=K89lQsMwKpk
https://www.youtube.com/watch?v=mWcuuWJxLBs
https://www.youtube.com/watch?v=sc9q7JKLAmk
https://www.youtube.com/watch?v=bsu_n3Kljrw
https://www.youtube.com/watch?v=AWT5Itr4iIM
https://www.youtube.com/watch?v=TUbEiHEhahQ
https://www.youtube.com/watch?v=85CB_SgFNE8
https://www.youtube.com/watch?v=rZeEP1suY7Q
https://www.youtube.com/watch?v=aNzVTdaHlpY
https://www.youtube.com/watch?v=QhWVwhe088c
https://www.youtube.com/watch?v=Rrjgb8ug144
https://www.youtube.com/watch?v=ZS1VzG8pMz0
https://www.youtube.com/watch?v=dwqX1IOBO1s
https://www.youtube.com/watch?v=gzq_ipnKzHc
https://www.youtube.com/watch?v=nrbiBE-GOVE
https://www.youtube.com/watch?v=DiPGzyMGw_A
https://www.youtube.com/watch?v=E827rlTAs3o
https://www.youtube.com/watch?v=sZ7HVucYRFM
https://www.youtube.com/watch?v=DJObJpK_wwU
https://www.youtube.com/watch?v=gXj3a_Ldagc
https://www.youtube.com/watch?v=8K5-CDvnefA
https://www.youtube.com/watch?v=AmphO8KrvZU
https://www.youtube.com/watch?v=7cDa5ZVXyRo
https://www.youtube.com/watch?v=NrFI656OE04
https://www.youtube.com/watch?v=-dfb8v4dyts
https://www.youtube.com/watch?v=XHSL4tSuT-0
https://www.youtube.com/watch?v=LUWic9zqq1M
https://www.youtube.com/watch?v=9X04dHWyu3Y
https://www.youtube.com/watch?v=FZmutKzxPhM
https://www.youtube.com/watch?v=Qznqxnn6YM0
https://www.youtube.com/watch?v=CJzeNfEkEdE
https://www.youtube.com/watch?v=Rc2SS3dIFkk
https://www.youtube.com/watch?v=fYNqL3tgPj0
https://www.youtube.com/watch?v=VC42A4FZSWw
https://www.youtube.com/watch?v=tew6A1xC5G4
https://www.youtube.com/watch?v=06x_heN_39g
https://www.youtube.com/watch?v=cPOXcOs2cGg
https://www.youtube.com/watch?v=PyRvVKbbO68
https://www.youtube.com/watch?v=ggOvUskvz9k
https://www.youtube.com/watch?v=BozqO_Ogy5w
https://www.youtube.com/watch?v=wmdb0RzvG7c
https://www.youtube.com/watch?v=FaxJYMPJ-pg
https://www.youtube.com/watch?v=ZpMzEGafoNw
https://www.youtube.com/watch?v=MP5VOBvP1Fg
https://www.youtube.com/watch?v=9EHHxVYRjiY
https://www.youtube.com/watch?v=0ApIbLKs-JU
https://www.youtube.com/watch?v=TWGRgM4fk4M
https://www.youtube.com/watch?v=eevPwoV7RvA
https://www.youtube.com/watch?v=brisSYFS31M
https://www.youtube.com/watch?v=SazhBlQTUhU
https://www.youtube.com/watch?v=3fscCoTP1iU
https://www.youtube.com/watch?v=pVwrZXsiCgE
https://www.youtube.com/watch?v=ztYTv43GvOg
https://www.youtube.com/watch?v=gPnyId2TCyQ
https://www.youtube.com/watch?v=RugwiGqJD60
https://www.youtube.com/watch?v=DAdf7ikSE2M
https://www.youtube.com/watch?v=zHlWqwuktK8
https://www.youtube.com/watch?v=t1JHMHPLJ54
https://www.youtube.com/watch?v=iZC1L2uf-2I
https://www.youtube.com/watch?v=gnyv8uapleQ
https://www.youtube.com/watch?v=HH-oQrhhbgY
https://www.youtube.com/watch?v=8rOfqKXa7F4
https://www.youtube.com/watch?v=yOGlDAJZH2M
https://www.youtube.com/watch?v=JngRtwK7gy4
https://www.youtube.com/watch?v=zZ-oreEX2qo
https://www.youtube.com/watch?v=j690eWDthBo
https://www.youtube.com/watch?v=HBqG4BtYxRo
https://www.youtube.com/watch?v=q2NzF5hj1yw
https://www.youtube.com/watch?v=_Pnm5SJA4E0
https://www.youtube.com/watch?v=mWKP6lmKtew
https://www.youtube.com/watch?v=ktCKwqFdaUU
https://www.youtube.com/watch?v=V4x5yOm9BFk
https://www.youtube.com/watch?v=uEIDIppWvAY
https://www.youtube.com/watch?v=R5KOBpyJmVs
https://www.youtube.com/watch?v=JFzm9o9l-4Y
https://www.youtube.com/watch?v=uf4ybmScHUQ
https://www.youtube.com/watch?v=fjLNYgfoYqs
https://www.youtube.com/watch?v=IWVscbhcsho
https://www.youtube.com/watch?v=zDSLWV9KypM
https://www.youtube.com/watch?v=vZyB77TU1zU
https://www.youtube.com/watch?v=608BY6toahw
https://www.youtube.com/watch?v=WHGPRC2K7mY
https://www.youtube.com/watch?v=3pEd0wcg_C4
https://www.youtube.com/watch?v=AtzcGUWPwII
https://www.youtube.com/watch?v=ePUebx75iRQ
https://www.youtube.com/watch?v=IWdM9uq3ikk
https://www.youtube.com/watch?v=TeYCOtEecYU
https://www.youtube.com/watch?v=ncOWfpeYXFY
https://www.youtube.com/watch?v=x1RUjs9k-1c
https://www.youtube.com/watch?v=hS5VMbTEd1c
https://www.youtube.com/watch?v=XkMM9BQBDCQ
https://www.youtube.com/watch?v=zh-rpPHJWG4
https://www.youtube.com/watch?v=bnVm7K8e1WY
https://www.youtube.com/watch?v=o8nHQ3JXnaY
https://www.youtube.com/watch?v=OLC4IeYEEjI
https://www.youtube.com/watch?v=2sE2osCi4JU
https://www.youtube.com/watch?v=lHFz0A8uiec
https://www.youtube.com/watch?v=H4zDTRO9Z58
https://www.youtube.com/watch?v=IiDRZtiSqiM
https://www.youtube.com/watch?v=Ao7U1fB_yP8
https://www.youtube.com/watch?v=qe6vAkiGXac
https://www.youtube.com/watch?v=QWsUmDesiJc
https://www.youtube.com/watch?v=ZLMHYYFGT4g
https://www.youtube.com/watch?v=dXKBgnRpfj4
https://www.youtube.com/watch?v=CFyw49Fk_4U
https://www.youtube.com/watch?v=qd0Ksf1gO14
https://www.youtube.com/watch?v=taUnDYQ6UI8
https://www.youtube.com/watch?v=L7HY6r3C9T0
https://www.youtube.com/watch?v=sTl6GtA20_M
https://www.youtube.com/watch?v=diL-9EB0TyM
https://www.youtube.com/watch?v=ymIOTE1PjZg
https://www.youtube.com/watch?v=_ZbtbsVaOPU
https://www.youtube.com/watch?v=9sGDtfUiktk
https://www.youtube.com/watch?v=vlVzSCef55I
https://www.youtube.com/watch?v=Y7tG3GrQvJ4
https://www.youtube.com/watch?v=9JyI1FWv5yM
https://www.youtube.com/watch?v=obKqJaSnQK0
https://www.youtube.com/watch?v=XR426H2wVgU
https://www.youtube.com/watch?v=Vu2C1pbIFWA
https://www.youtube.com/watch?v=XA4Ile920PA
https://www.youtube.com/watch?v=6qHXBy9EJvQ
https://www.youtube.com/watch?v=0WdmMfk0e6Q
https://www.youtube.com/watch?v=andVPsILYqw
https://www.youtube.com/watch?v=4Ttoi0W-wAM
https://www.youtube.com/watch?v=TID0TW7QRm0
https://www.youtube.com/watch?v=p4L5_JfbL7w
https://www.youtube.com/watch?v=DUj29F5ZhEE
https://www.youtube.com/watch?v=9T8dkxDC42c
https://www.youtube.com/watch?v=ECCQhQTZ1SE
https://www.youtube.com/watch?v=JfrYNOJMvRg
https://www.youtube.com/watch?v=FJcrRD8yI4E
https://www.youtube.com/watch?v=N85iiQTKsXw
https://www.youtube.com/watch?v=Yyc6vE-0OgI
https://www.youtube.com/watch?v=-z9Kye8DBM0
https://www.youtube.com/watch?v=ulSEa7ZOhWA
https://www.youtube.com/watch?v=SE52l6y7slI
https://www.youtube.com/watch?v=oErU3HKAilo
https://www.youtube.com/watch?v=NEYsomAIa1g
https://www.youtube.com/watch?v=4H5fsY5_MiA
https://www.youtube.com/watch?v=Lo5z_qGuQSI
https://www.youtube.com/watch?v=V2jfRLJsMCE
https://www.youtube.com/watch?v=aZxQruhKMDY
https://www.youtube.com/watch?v=ZMRcCmN25sA
https://www.youtube.com/watch?v=O-XsiSCIK7c
https://www.youtube.com/watch?v=-wl21uhUEx4
https://www.youtube.com/watch?v=WfbrXc-EW54
https://www.youtube.com/watch?v=M0yCTg5daZI
https://www.youtube.com/watch?v=UhT2sev4HK8
https://www.youtube.com/watch?v=4v1AraoTz3w
https://www.youtube.com/watch?v=zTTU1MEiIbY
https://www.youtube.com/watch?v=pHb2ZMXtAiY
https://www.youtube.com/watch?v=mDUf4T5lv8E

View File

@ -1,237 +0,0 @@
https://www.youtube.com/watch?v=-OYTrvsnRSQ
https://www.youtube.com/watch?v=HR-iLG6B8ds
https://www.youtube.com/watch?v=Gjtg8xAMbdU
https://www.youtube.com/watch?v=AKWX1wR4sy4
https://www.youtube.com/watch?v=TTmAZRt8nK0
https://www.youtube.com/watch?v=o242B-tnea8
https://www.youtube.com/watch?v=wg7AyXZYbrs
https://www.youtube.com/watch?v=VSbWDAER9_A
https://www.youtube.com/watch?v=k20mrsFg9-Y
https://www.youtube.com/watch?v=vtkwDMx-tV8
https://www.youtube.com/watch?v=4dvawM7D0hI
https://www.youtube.com/watch?v=eD0JzemrL20
https://www.youtube.com/watch?v=etqVGZkJ2Bg
https://www.youtube.com/watch?v=LEactpOVdSM
https://www.youtube.com/watch?v=IAro_PZ6BQo
https://www.youtube.com/watch?v=RbW6o64ROvo
https://www.youtube.com/watch?v=RRoinZOWSS4
https://www.youtube.com/watch?v=KQCfiswxjZI
https://www.youtube.com/watch?v=WqXw1Fpqpg8
https://www.youtube.com/watch?v=WdueCBIyAfQ
https://www.youtube.com/watch?v=r1KdHltNgaY
https://www.youtube.com/watch?v=RryMChxFH58
https://www.youtube.com/watch?v=A8wuUQt1Wik
https://www.youtube.com/watch?v=rCof-kGXOhk
https://www.youtube.com/watch?v=XX8ZxKFqoms
https://www.youtube.com/watch?v=T28iUbhDyZc
https://www.youtube.com/watch?v=F94negmd_Zg
https://www.youtube.com/watch?v=GjKANaIYvio
https://www.youtube.com/watch?v=XHI4mfWv0NU
https://www.youtube.com/watch?v=IKF_ZKhrxd4
https://www.youtube.com/watch?v=al6SGQh0e0w
https://www.youtube.com/watch?v=SSCnx2HXSSw
https://www.youtube.com/watch?v=wSL896yNi_g
https://www.youtube.com/watch?v=XHugXebbTkw
https://www.youtube.com/watch?v=TtWI3cg-97I
https://www.youtube.com/watch?v=CIi5Khk0w-E
https://www.youtube.com/watch?v=5zitlGXm7i4
https://www.youtube.com/watch?v=RTGPniQnj-4
https://www.youtube.com/watch?v=PygTOgWy9NM
https://www.youtube.com/watch?v=08jdDCM1Jt4
https://www.youtube.com/watch?v=s5mWxWlfmpE
https://www.youtube.com/watch?v=Oz5TobEFC_M
https://www.youtube.com/watch?v=jbdsSlo6EeE
https://www.youtube.com/watch?v=sUeJ4XDEQPA
https://www.youtube.com/watch?v=-QNIsfH1W6o
https://www.youtube.com/watch?v=khefdNkdrUA
https://www.youtube.com/watch?v=RerdE3DEPZk
https://www.youtube.com/watch?v=axSJE2GJCMY
https://www.youtube.com/watch?v=TalhxWW_AWo
https://www.youtube.com/watch?v=wlYmRMxqrKs
https://www.youtube.com/watch?v=tQtalMb-yi0
https://www.youtube.com/watch?v=N2zw3_qroes
https://www.youtube.com/watch?v=f2ul_dsl4YM
https://www.youtube.com/watch?v=Dn-l3hv54dg
https://www.youtube.com/watch?v=Tmk-kKInF1Y
https://www.youtube.com/watch?v=dN6FD2RQXxQ
https://www.youtube.com/watch?v=BZKnqQYvwcE
https://www.youtube.com/watch?v=YB0P9HLgQuI
https://www.youtube.com/watch?v=LCP6qXwwOM4
https://www.youtube.com/watch?v=c7xf3Ojj7KU
https://www.youtube.com/watch?v=gUg5WFE1UQg
https://www.youtube.com/watch?v=oG4j-rydlt0
https://www.youtube.com/watch?v=NqKi-3E9W1Y
https://www.youtube.com/watch?v=mcFkcokKCNo
https://www.youtube.com/watch?v=U9pvsdkU3OE
https://www.youtube.com/watch?v=irL__2bmVBI
https://www.youtube.com/watch?v=aBwkS_wA9nM
https://www.youtube.com/watch?v=JBM5DEEyL-o
https://www.youtube.com/watch?v=H5rMXNG3gyc
https://www.youtube.com/watch?v=Y-03XzLAUDw
https://www.youtube.com/watch?v=xsHtpbobVkM
https://www.youtube.com/watch?v=hITnd7BSG-U
https://www.youtube.com/watch?v=LdKGVfUmpYk
https://www.youtube.com/watch?v=QtBGTQZYNyk
https://www.youtube.com/watch?v=BNF8-3G8vas
https://www.youtube.com/watch?v=B6aT-u__gJo
https://www.youtube.com/watch?v=oIGTEmHwjLg
https://www.youtube.com/watch?v=0VmIV7q34fM
https://www.youtube.com/watch?v=0dSCNkc1va4
https://www.youtube.com/watch?v=98lsZ-wo1ek
https://www.youtube.com/watch?v=8U8nVbfNxlg
https://www.youtube.com/watch?v=9VVVNWk92d4
https://www.youtube.com/watch?v=Ci04JrlD4us
https://www.youtube.com/watch?v=mx-AwWwdQKQ
https://www.youtube.com/watch?v=miNPNWSCQE0
https://www.youtube.com/watch?v=FaJshsUefyQ
https://www.youtube.com/watch?v=TdvGEDxdsv4
https://www.youtube.com/watch?v=1Hrb0g9LTsY
https://www.youtube.com/watch?v=1gERK4_AMgU
https://www.youtube.com/watch?v=6F36ChawCh8
https://www.youtube.com/watch?v=7fCT8l5PA-0
https://www.youtube.com/watch?v=35qUGqnd3nE
https://www.youtube.com/watch?v=SCQenAiVYBA
https://www.youtube.com/watch?v=MhGh_mCqrJU
https://www.youtube.com/watch?v=bOdjMBx60R0
https://www.youtube.com/watch?v=N8gNk6u7nYQ
https://www.youtube.com/watch?v=Y0_-D9Ox9ng
https://www.youtube.com/watch?v=1GPTxwVbwvY
https://www.youtube.com/watch?v=Ewd0UuOEjPY
https://www.youtube.com/watch?v=Qw5oiYJFuSk
https://www.youtube.com/watch?v=55EaCTJ1pmE
https://www.youtube.com/watch?v=cuSZk8Pa8OI
https://www.youtube.com/watch?v=Xmu8YF0Bjyw
https://www.youtube.com/watch?v=ObyY3uEp7pA
https://www.youtube.com/watch?v=SB2092RkzhQ
https://www.youtube.com/watch?v=Xt-GvsySsOM
https://www.youtube.com/watch?v=FdyYRbOf0mQ
https://www.youtube.com/watch?v=9j4iaA9FcMg
https://www.youtube.com/watch?v=9Qi5sQpZGME
https://www.youtube.com/watch?v=d5qyho0ensI
https://www.youtube.com/watch?v=smlSGLoB8eI
https://www.youtube.com/watch?v=fJeDDk9Nzpk
https://www.youtube.com/watch?v=LfXLNUNqCOY
https://www.youtube.com/watch?v=ZEEY7AEA3z8
https://www.youtube.com/watch?v=lEXGzNQtVGw
https://www.youtube.com/watch?v=MXv1fBNlMB0
https://www.youtube.com/watch?v=7p5MNHXFTik
https://www.youtube.com/watch?v=d_4APkAy8Zk
https://www.youtube.com/watch?v=l6DpX3Bszh8
https://www.youtube.com/watch?v=rc01sE8tfjo
https://www.youtube.com/watch?v=DY8BBLB_OuY
https://www.youtube.com/watch?v=NaJxE3R6fZU
https://www.youtube.com/watch?v=TeWVHkxx4C4
https://www.youtube.com/watch?v=vbfYhTkX2FE
https://www.youtube.com/watch?v=TJmm555VGaU
https://www.youtube.com/watch?v=aunY-e-Jim8
https://www.youtube.com/watch?v=HnLAm7OeJZc
https://www.youtube.com/watch?v=BTihVgB3j5U
https://www.youtube.com/watch?v=5Fh8bkdgIvM
https://www.youtube.com/watch?v=T9aj6Hc7tCI
https://www.youtube.com/watch?v=bJNjCcAhajI
https://www.youtube.com/watch?v=7dfLVKKRQ2U
https://www.youtube.com/watch?v=zXZSnUKaQt8
https://www.youtube.com/watch?v=i6ZSMTAD1fQ
https://www.youtube.com/watch?v=koczyZ9jaGA
https://www.youtube.com/watch?v=uatoQB05WBY
https://www.youtube.com/watch?v=Xk1cUTg-1CY
https://www.youtube.com/watch?v=kGZS4pyqzOM
https://www.youtube.com/watch?v=jbyfwRF3qt8
https://www.youtube.com/watch?v=i8VG_QT81cI
https://www.youtube.com/watch?v=0j8h1RiYMRA
https://www.youtube.com/watch?v=bhAej3dGUf8
https://www.youtube.com/watch?v=KVMWvlI_Clg
https://www.youtube.com/watch?v=1zZwJRC7MRU
https://www.youtube.com/watch?v=e3nXSZAqVuM
https://www.youtube.com/watch?v=g_z_q-ylqBs
https://www.youtube.com/watch?v=TUklrg9ecVU
https://www.youtube.com/watch?v=uwwGeFOwabw
https://www.youtube.com/watch?v=zNyTvA0rMVs
https://www.youtube.com/watch?v=1xCZZGySdjY
https://www.youtube.com/watch?v=WoDEdNe_JDg
https://www.youtube.com/watch?v=-r01Nyjmltc
https://www.youtube.com/watch?v=ilN94X3Qmug
https://www.youtube.com/watch?v=3Zi2-g42lio
https://www.youtube.com/watch?v=MiLjhVl2FCY
https://www.youtube.com/watch?v=oZ9bdUqbKIU
https://www.youtube.com/watch?v=1mCjN-v4jvo
https://www.youtube.com/watch?v=oiRQ8weGi88
https://www.youtube.com/watch?v=IVYU6O6uFJQ
https://www.youtube.com/watch?v=IGZGomDqxCE
https://www.youtube.com/watch?v=qlc7mEH_BHs
https://www.youtube.com/watch?v=GBcCT2bKgjY
https://www.youtube.com/watch?v=Gd3OZTuPeuc
https://www.youtube.com/watch?v=UQftIucKJyE
https://www.youtube.com/watch?v=viPomNeDwb4
https://www.youtube.com/watch?v=eL3kOZqhl1Y
https://www.youtube.com/watch?v=0qo2q_lJpyc
https://www.youtube.com/watch?v=aNQS7XwI4uw
https://www.youtube.com/watch?v=lTa_0IrgL0k
https://www.youtube.com/watch?v=Nqvu51Z_uLc
https://www.youtube.com/watch?v=QEZ30dVgqyE
https://www.youtube.com/watch?v=G_4jcOwjjOI
https://www.youtube.com/watch?v=EXs553jIi-M
https://www.youtube.com/watch?v=e56_vnN12G4
https://www.youtube.com/watch?v=uBfhO0pMiv0
https://www.youtube.com/watch?v=ehG4FOaLqS0
https://www.youtube.com/watch?v=tQ6qgJwN1m8
https://www.youtube.com/watch?v=Qs8kAzVmaCA
https://www.youtube.com/watch?v=8ucB_qu1Inw
https://www.youtube.com/watch?v=OuiJhSc7r74
https://www.youtube.com/watch?v=o4ozyDZugJc
https://www.youtube.com/watch?v=io4Ym8HqI2o
https://www.youtube.com/watch?v=bpstDSeSls4
https://www.youtube.com/watch?v=vn9I2-twWKE
https://www.youtube.com/watch?v=-LZeoEcLXlk
https://www.youtube.com/watch?v=-I1qtCjwaBE
https://www.youtube.com/watch?v=AR3YQYvmMwE
https://www.youtube.com/watch?v=eQT4GZ_EqhQ
https://www.youtube.com/watch?v=i7voHoNZM9M
https://www.youtube.com/watch?v=Op1UJ5zciPk
https://www.youtube.com/watch?v=wMVX8tpg4nA
https://www.youtube.com/watch?v=izjTgLIYYs0
https://www.youtube.com/watch?v=Pu1nRr-iZ6A
https://www.youtube.com/watch?v=PGex6tbXQPE
https://www.youtube.com/watch?v=H7HR796PVWg
https://www.youtube.com/watch?v=WZJccvWFoNo
https://www.youtube.com/watch?v=JLibrVW9_T4
https://www.youtube.com/watch?v=7K1FnxElCug
https://www.youtube.com/watch?v=_iZ47vuEtaY
https://www.youtube.com/watch?v=5aafY8hGrUc
https://www.youtube.com/watch?v=3oxXxruuuuM
https://www.youtube.com/watch?v=eXRuojc_GLA
https://www.youtube.com/watch?v=d_qfc4o1RUI
https://www.youtube.com/watch?v=pvB1S3KVFhE
https://www.youtube.com/watch?v=_1X7caqbvkA
https://www.youtube.com/watch?v=XmjibdYAvLo
https://www.youtube.com/watch?v=aGsl9NGHh-E
https://www.youtube.com/watch?v=R_Z4sTgCK4k
https://www.youtube.com/watch?v=v7KHGsg6w4s
https://www.youtube.com/watch?v=aX6aGQufM3w
https://www.youtube.com/watch?v=yQZ94f7WMhc
https://www.youtube.com/watch?v=6i975zYY6eE
https://www.youtube.com/watch?v=RbbFCphAh0c
https://www.youtube.com/watch?v=wN_S8q03iOI
https://www.youtube.com/watch?v=GlhNWhIxzOE
https://www.youtube.com/watch?v=N7GlgQjNEOw
https://www.youtube.com/watch?v=Dyo7vQvXW_w
https://www.youtube.com/watch?v=1vHGsrpdRT8
https://www.youtube.com/watch?v=hQhAIi1TT3s
https://www.youtube.com/watch?v=uJ5DmRIYGNo
https://www.youtube.com/watch?v=zT7ScFhOymY
https://www.youtube.com/watch?v=8SP-XcTntTI
https://www.youtube.com/watch?v=Wh8UZ0DNmmc
https://www.youtube.com/watch?v=IMo-p2hQVcc
https://www.youtube.com/watch?v=yHJ50ZRTN3E
https://www.youtube.com/watch?v=25JPZbxyfn4
https://www.youtube.com/watch?v=BzqkagVVqN8
https://www.youtube.com/watch?v=IBdX5dleUMg
https://www.youtube.com/watch?v=nAqV1-LO8n0
https://www.youtube.com/watch?v=SFFe8JThkeg
https://www.youtube.com/watch?v=Kr5xy7Pqe9s
https://www.youtube.com/watch?v=r1kBEMb3wx8
https://www.youtube.com/watch?v=Nc-iL85MTs8
https://www.youtube.com/watch?v=ULC2AExXG4k
https://www.youtube.com/watch?v=kPoBOD3qbR8
https://www.youtube.com/watch?v=_lZ91R7KZpQ
https://www.youtube.com/watch?v=rCpUpTozlbE

View File

@ -1,233 +0,0 @@
https://www.youtube.com/watch?v=6xG55KCOw9c
https://www.youtube.com/watch?v=THIhR42o5Yo
https://www.youtube.com/watch?v=PueGS2ovb1k
https://www.youtube.com/watch?v=3ULU8ygv2uI
https://www.youtube.com/watch?v=vwmzA7puaag
https://www.youtube.com/watch?v=REdLuU2aKlE
https://www.youtube.com/watch?v=MmrsXGdjCN4
https://www.youtube.com/watch?v=xkwPcTLBNYM
https://www.youtube.com/watch?v=-IBaWMv16RU
https://www.youtube.com/watch?v=Sq9QEsAOTkM
https://www.youtube.com/watch?v=V2EGLGgqYkI
https://www.youtube.com/watch?v=QDafrUdxXpM
https://www.youtube.com/watch?v=LOP7GVSzlcY
https://www.youtube.com/watch?v=uZoyBTh10S8
https://www.youtube.com/watch?v=tX8jPSql8YU
https://www.youtube.com/watch?v=hiOy1Gf-3qY
https://www.youtube.com/watch?v=7piyNpPpESg
https://www.youtube.com/watch?v=zbLPgrDB3yI
https://www.youtube.com/watch?v=bIZIXRHyqoo
https://www.youtube.com/watch?v=JUv89Ehkoj0
https://www.youtube.com/watch?v=Hi2L2OXSPMQ
https://www.youtube.com/watch?v=CiIFLJPAzx4
https://www.youtube.com/watch?v=EFvv5hl9Cbo
https://www.youtube.com/watch?v=6SybA6YL2Dk
https://www.youtube.com/watch?v=diesIlZaYwg
https://www.youtube.com/watch?v=f9fQ-3ZKMW4
https://www.youtube.com/watch?v=g3gIt_8pNFw
https://www.youtube.com/watch?v=WOOlnWuo5cc
https://www.youtube.com/watch?v=6saf_WUItTs
https://www.youtube.com/watch?v=yDTQvqANLwA
https://www.youtube.com/watch?v=2rTMHKBYkak
https://www.youtube.com/watch?v=tT3W6GP6cCA
https://www.youtube.com/watch?v=tz9GT0P0ryA
https://www.youtube.com/watch?v=eojre2rUMqw
https://www.youtube.com/watch?v=0Zzl3HzhYz0
https://www.youtube.com/watch?v=vTp1auScNMk
https://www.youtube.com/watch?v=oBWZjKR9ZeQ
https://www.youtube.com/watch?v=Kk1xwtIV8vs
https://www.youtube.com/watch?v=v80YwdDGoeA
https://www.youtube.com/watch?v=RTwCoLuVEkk
https://www.youtube.com/watch?v=O_eVwq5srVs
https://www.youtube.com/watch?v=4y6sP0QP9fw
https://www.youtube.com/watch?v=8175ZvSPjtI
https://www.youtube.com/watch?v=p5uEXSimNHw
https://www.youtube.com/watch?v=tx_ezJgB-1U
https://www.youtube.com/watch?v=d61Y1IQjREI
https://www.youtube.com/watch?v=rjA6vEh2VcM
https://www.youtube.com/watch?v=M922Dq0aAMo
https://www.youtube.com/watch?v=kr__8j-30GI
https://www.youtube.com/watch?v=O9QntgWyKMw
https://www.youtube.com/watch?v=X3T-yVZnSwE
https://www.youtube.com/watch?v=cu32CcPzNG4
https://www.youtube.com/watch?v=EFksUEbPBSg
https://www.youtube.com/watch?v=b_ZTwMM5wU0
https://www.youtube.com/watch?v=g5QLW6LeJ3Q
https://www.youtube.com/watch?v=IxhhDb0Ap1g
https://www.youtube.com/watch?v=Hk0rRNw6Hao
https://www.youtube.com/watch?v=Zsm80SgJRBQ
https://www.youtube.com/watch?v=DpWBCMbE7aE
https://www.youtube.com/watch?v=6LWXnVr-9t8
https://www.youtube.com/watch?v=8qWVLCpRShE
https://www.youtube.com/watch?v=AbHEP5JHusQ
https://www.youtube.com/watch?v=rbRn3rUhX4E
https://www.youtube.com/watch?v=8b5_qEwmUu8
https://www.youtube.com/watch?v=gsKssuz8tPA
https://www.youtube.com/watch?v=pwEtEXQ6PVw
https://www.youtube.com/watch?v=v7rFBX9QOpE
https://www.youtube.com/watch?v=oxxRgi8a120
https://www.youtube.com/watch?v=LRtEUdjrQqg
https://www.youtube.com/watch?v=unkTCzy1qrA
https://www.youtube.com/watch?v=Cd7912woWsE
https://www.youtube.com/watch?v=w5LMl9o3Ofs
https://www.youtube.com/watch?v=_dqILnERIXQ
https://www.youtube.com/watch?v=s9RnwZ7IaaA
https://www.youtube.com/watch?v=ftksxfcG4V0
https://www.youtube.com/watch?v=ZWr0tbXySlg
https://www.youtube.com/watch?v=e-BCNcMv2j0
https://www.youtube.com/watch?v=mewfu8lafy8
https://www.youtube.com/watch?v=VWG7L3c_5J8
https://www.youtube.com/watch?v=JXZXNFLQMHE
https://www.youtube.com/watch?v=fb3zbGCVL6M
https://www.youtube.com/watch?v=uN4p8E-AZF0
https://www.youtube.com/watch?v=SgFtJaJoX8Y
https://www.youtube.com/watch?v=WvAs0s7DVYg
https://www.youtube.com/watch?v=t799a5XUSAU
https://www.youtube.com/watch?v=ljyY1LjK_ZE
https://www.youtube.com/watch?v=ArD55SLC62E
https://www.youtube.com/watch?v=cvpGOFUsVnI
https://www.youtube.com/watch?v=SZXg328Mzsk
https://www.youtube.com/watch?v=-BoZ1gEAC6g
https://www.youtube.com/watch?v=VQzU4Sy-cQQ
https://www.youtube.com/watch?v=73lYz3r3XZY
https://www.youtube.com/watch?v=JFrABFhjAMk
https://www.youtube.com/watch?v=sN6qndqz0KQ
https://www.youtube.com/watch?v=s0_MmWg2m8A
https://www.youtube.com/watch?v=I0GtLj2q5kQ
https://www.youtube.com/watch?v=3tJ0S7ciHRg
https://www.youtube.com/watch?v=hc_7bu1togM
https://www.youtube.com/watch?v=gFbL3pzkNEM
https://www.youtube.com/watch?v=I8R94gDqiGE
https://www.youtube.com/watch?v=TdErMRIxRqY
https://www.youtube.com/watch?v=49JqAHear9E
https://www.youtube.com/watch?v=Y6IbKjhHFSg
https://www.youtube.com/watch?v=-DluaoC73Oo
https://www.youtube.com/watch?v=6GAc1HfTQX8
https://www.youtube.com/watch?v=zdGkKCiKYuE
https://www.youtube.com/watch?v=1oUH8LWmM0I
https://www.youtube.com/watch?v=TNf_8J8LoM0
https://www.youtube.com/watch?v=kXcjZ2BkE2Y
https://www.youtube.com/watch?v=dgK8hbT2D3Y
https://www.youtube.com/watch?v=H1sdanVzblg
https://www.youtube.com/watch?v=Y4HSgvsz-AI
https://www.youtube.com/watch?v=YvBfHVPpBmw
https://www.youtube.com/watch?v=sSxhG5qH38Q
https://www.youtube.com/watch?v=x4zz7xAa-fM
https://www.youtube.com/watch?v=nOPm3XAlxZg
https://www.youtube.com/watch?v=7Yor7ci8noM
https://www.youtube.com/watch?v=BnJ4BDxlgSM
https://www.youtube.com/watch?v=j4otcsHfzrE
https://www.youtube.com/watch?v=lIHDioyC4Q4
https://www.youtube.com/watch?v=e40TGdOO1JU
https://www.youtube.com/watch?v=iKnbegCExns
https://www.youtube.com/watch?v=ao1TeLrOPPQ
https://www.youtube.com/watch?v=Lx_UnKWJyLE
https://www.youtube.com/watch?v=nDtgGczwIPY
https://www.youtube.com/watch?v=0bJ2mfxtmjg
https://www.youtube.com/watch?v=cbG1GetVqyw
https://www.youtube.com/watch?v=gIONn9bp8_w
https://www.youtube.com/watch?v=vePMmBM-IVU
https://www.youtube.com/watch?v=xP9cwmJ5dog
https://www.youtube.com/watch?v=uxYeke3bLMk
https://www.youtube.com/watch?v=eCze8sLvmcE
https://www.youtube.com/watch?v=EcBx2S-x3Hs
https://www.youtube.com/watch?v=zO3rfSDuBFg
https://www.youtube.com/watch?v=0RtavDSBnt4
https://www.youtube.com/watch?v=8EojjO04ahA
https://www.youtube.com/watch?v=UbWPWyl8xWQ
https://www.youtube.com/watch?v=Ocwj3RfhTwI
https://www.youtube.com/watch?v=8EOo1DTKT98
https://www.youtube.com/watch?v=7Rh7MjD6LLQ
https://www.youtube.com/watch?v=_C3FNmY1IxI
https://www.youtube.com/watch?v=yvQXwo4w3_0
https://www.youtube.com/watch?v=PNVWUdB653k
https://www.youtube.com/watch?v=-DWQIrKbrEc
https://www.youtube.com/watch?v=1MgfOJ9ILCo
https://www.youtube.com/watch?v=og-mP2_bjcE
https://www.youtube.com/watch?v=Gar0-Yc_gpY
https://www.youtube.com/watch?v=OAyBcaFI56I
https://www.youtube.com/watch?v=5T1AD2jE6Kg
https://www.youtube.com/watch?v=sECZvLhjvPQ
https://www.youtube.com/watch?v=f1YdQbcFe3k
https://www.youtube.com/watch?v=B00d1ZrHlgc
https://www.youtube.com/watch?v=s05OP79dB28
https://www.youtube.com/watch?v=6e-ID8Wxbjo
https://www.youtube.com/watch?v=64Uihh-m0LE
https://www.youtube.com/watch?v=hDc6FPsH7h4
https://www.youtube.com/watch?v=HDiJLqx3Luk
https://www.youtube.com/watch?v=wcy3iNJXAJs
https://www.youtube.com/watch?v=rV39f1xY040
https://www.youtube.com/watch?v=A8oTnrChwoc
https://www.youtube.com/watch?v=xmjPzr1PQG8
https://www.youtube.com/watch?v=SlYjBVi4LeM
https://www.youtube.com/watch?v=m7hq3HU1x1c
https://www.youtube.com/watch?v=Ycdfq5SVgYs
https://www.youtube.com/watch?v=Lgy5IT6V1II
https://www.youtube.com/watch?v=ivPrbp3Ef0M
https://www.youtube.com/watch?v=xS8T1nXqibQ
https://www.youtube.com/watch?v=DWXBC9Ud32Y
https://www.youtube.com/watch?v=ZQdxYJkbD6s
https://www.youtube.com/watch?v=xkB7ra_ZcCM
https://www.youtube.com/watch?v=ugCKpZwt7-Q
https://www.youtube.com/watch?v=CHXI5J4V4AE
https://www.youtube.com/watch?v=Jt7_FFHd1Uw
https://www.youtube.com/watch?v=3aKlZpFAI3w
https://www.youtube.com/watch?v=i_Y7of6rGy8
https://www.youtube.com/watch?v=YUp3RSlqBnk
https://www.youtube.com/watch?v=zRmm0-IMV-0
https://www.youtube.com/watch?v=9IlBbSIQQes
https://www.youtube.com/watch?v=LhaDO3MwY08
https://www.youtube.com/watch?v=GIMPGEcvats
https://www.youtube.com/watch?v=tJjwfD-adPE
https://www.youtube.com/watch?v=tCyYgZr6CHA
https://www.youtube.com/watch?v=O2IuSn7abLQ
https://www.youtube.com/watch?v=vWAIK55GrSQ
https://www.youtube.com/watch?v=X0Pwphld4l8
https://www.youtube.com/watch?v=Men_VTPKzRk
https://www.youtube.com/watch?v=4N1QO35qeAw
https://www.youtube.com/watch?v=mDyz1zC5mV4
https://www.youtube.com/watch?v=IwOpp57oQa8
https://www.youtube.com/watch?v=rpBONirrpFY
https://www.youtube.com/watch?v=1nPRqTVWcXo
https://www.youtube.com/watch?v=qflzvf0P8Go
https://www.youtube.com/watch?v=TbLnKnd55KI
https://www.youtube.com/watch?v=jA3nz1RGhoQ
https://www.youtube.com/watch?v=mqx4w9RxyC8
https://www.youtube.com/watch?v=mqoJr3qL98Q
https://www.youtube.com/watch?v=KTxnalxOBq4
https://www.youtube.com/watch?v=ghOcpZ_n3ck
https://www.youtube.com/watch?v=dReCYPaWB28
https://www.youtube.com/watch?v=XY79-cpbFME
https://www.youtube.com/watch?v=jwVwgGtdoAc
https://www.youtube.com/watch?v=wpGu13Xt_w0
https://www.youtube.com/watch?v=5NRyHwCPfgY
https://www.youtube.com/watch?v=zTYxYSTDBjM
https://www.youtube.com/watch?v=QQj4uFa05N4
https://www.youtube.com/watch?v=1GMgjlp4Yps
https://www.youtube.com/watch?v=kUo37zZhNxE
https://www.youtube.com/watch?v=Jz5CGd0dsaU
https://www.youtube.com/watch?v=iLQS_HPWO9c
https://www.youtube.com/watch?v=_lApBJu9gUY
https://www.youtube.com/watch?v=M1XwLmDpElY
https://www.youtube.com/watch?v=JT5HaX5yVPc
https://www.youtube.com/watch?v=PmQc2_9cux8
https://www.youtube.com/watch?v=wQMkJ47gTto
https://www.youtube.com/watch?v=arYXHRmVUSQ
https://www.youtube.com/watch?v=hC_KKWCju34
https://www.youtube.com/watch?v=dYi51VQyqWM
https://www.youtube.com/watch?v=tN_qiCQU8mE
https://www.youtube.com/watch?v=ilm532-pJ6k
https://www.youtube.com/watch?v=5uo1Ir6X_r8
https://www.youtube.com/watch?v=-M5YASO4Qo8
https://www.youtube.com/watch?v=Jg0yPEiD5uk
https://www.youtube.com/watch?v=GIgOpPFcNlU
https://www.youtube.com/watch?v=ttJtmEsjeik
https://www.youtube.com/watch?v=IuoT4lj5320
https://www.youtube.com/watch?v=0_zqq0BXwfk
https://www.youtube.com/watch?v=sK6VuV8mgPk
https://www.youtube.com/watch?v=z0JQaxqXlic
https://www.youtube.com/watch?v=W06m5BF4bZA
https://www.youtube.com/watch?v=tGZJWiETVto
https://www.youtube.com/watch?v=09u2R0LZ1YU
https://www.youtube.com/watch?v=_OqomCppv30
https://www.youtube.com/watch?v=wf8tUgUCPac

View File

@ -1,236 +0,0 @@
https://www.youtube.com/watch?v=lczFvGhUfts
https://www.youtube.com/watch?v=kFx3-0pNPWc
https://www.youtube.com/watch?v=JZfuvcVU91E
https://www.youtube.com/watch?v=xT5JEhTo7Rc
https://www.youtube.com/watch?v=mTinH8GAflM
https://www.youtube.com/watch?v=3srlxHfZLm4
https://www.youtube.com/watch?v=clv601ldGA4
https://www.youtube.com/watch?v=bHPY6Nn9QMs
https://www.youtube.com/watch?v=9y5aeZ33w_4
https://www.youtube.com/watch?v=yhUxI5phmO8
https://www.youtube.com/watch?v=VbE2CU9nA1k
https://www.youtube.com/watch?v=It-PLAJt_zQ
https://www.youtube.com/watch?v=9SekDfrN-X4
https://www.youtube.com/watch?v=T0pnbDrW1CI
https://www.youtube.com/watch?v=XBYyXyvW6tE
https://www.youtube.com/watch?v=XFep6Dhx-Fs
https://www.youtube.com/watch?v=W8iTykaPSLM
https://www.youtube.com/watch?v=z3YrrXDQDyY
https://www.youtube.com/watch?v=9KU33u28EtI
https://www.youtube.com/watch?v=kvDMgzLZR2k
https://www.youtube.com/watch?v=_xWION4vOgM
https://www.youtube.com/watch?v=70zAbdwD9yY
https://www.youtube.com/watch?v=59ZG0Hjf5Us
https://www.youtube.com/watch?v=ZnI-KMjlrgA
https://www.youtube.com/watch?v=0-HtNtzR3HY
https://www.youtube.com/watch?v=G5UwFimUH_k
https://www.youtube.com/watch?v=UQ9YjPBUgII
https://www.youtube.com/watch?v=TtO1czEGERI
https://www.youtube.com/watch?v=s_1O1UJtMtQ
https://www.youtube.com/watch?v=Cq9XGcbsNRA
https://www.youtube.com/watch?v=dWsO8_oHeAo
https://www.youtube.com/watch?v=xF8HGuSfoYE
https://www.youtube.com/watch?v=nIuZdHv6chU
https://www.youtube.com/watch?v=pfbdEHVtBz4
https://www.youtube.com/watch?v=OJWP4kft7NI
https://www.youtube.com/watch?v=1ZRrg52B81w
https://www.youtube.com/watch?v=cPtg_qRa59w
https://www.youtube.com/watch?v=W4WZjoUiKr8
https://www.youtube.com/watch?v=70niBZ6GLSs
https://www.youtube.com/watch?v=auTIwWeByuw
https://www.youtube.com/watch?v=VaXHkSUlEdI
https://www.youtube.com/watch?v=PbelNpfEJQc
https://www.youtube.com/watch?v=R2VIfDPxX3M
https://www.youtube.com/watch?v=aTv5h7kqPp0
https://www.youtube.com/watch?v=A7Vh8ZemNkc
https://www.youtube.com/watch?v=zn7dx98XomI
https://www.youtube.com/watch?v=DyLVTThUXBc
https://www.youtube.com/watch?v=zdNxMHSVkFg
https://www.youtube.com/watch?v=MXp2LFunRFc
https://www.youtube.com/watch?v=gE1RoN6ChBs
https://www.youtube.com/watch?v=cFcnz9rCD-o
https://www.youtube.com/watch?v=JueaSBXghD0
https://www.youtube.com/watch?v=03de7Gcpz_4
https://www.youtube.com/watch?v=Ij33PqNmbro
https://www.youtube.com/watch?v=57iQZerkh0o
https://www.youtube.com/watch?v=N1b7xZdVtRw
https://www.youtube.com/watch?v=M4gLxvcaBYU
https://www.youtube.com/watch?v=yGuH6jx7xkI
https://www.youtube.com/watch?v=AeopuNMkQt8
https://www.youtube.com/watch?v=QKzE7lsCPw8
https://www.youtube.com/watch?v=uZn0PIJ_k9E
https://www.youtube.com/watch?v=bxJ4-3W6LwE
https://www.youtube.com/watch?v=2MJPj6oWIKo
https://www.youtube.com/watch?v=yujj2p2dGWk
https://www.youtube.com/watch?v=-bXdzATNQKw
https://www.youtube.com/watch?v=xN3nXJX9vJs
https://www.youtube.com/watch?v=zwgm4a5OugI
https://www.youtube.com/watch?v=NBNN0lqZ8tU
https://www.youtube.com/watch?v=5e460FHKV64
https://www.youtube.com/watch?v=hsbCYJIvq3o
https://www.youtube.com/watch?v=_qsfagfYAVs
https://www.youtube.com/watch?v=ZNl4FVK9-yQ
https://www.youtube.com/watch?v=2-TYgeGNTQk
https://www.youtube.com/watch?v=Zin6ZIfdTAY
https://www.youtube.com/watch?v=sn7XKfA4W-0
https://www.youtube.com/watch?v=gmRYrsutXEo
https://www.youtube.com/watch?v=1jhOFw6GrLg
https://www.youtube.com/watch?v=PxzOVCQU-4U
https://www.youtube.com/watch?v=BlHNshJh9zM
https://www.youtube.com/watch?v=LVmH60yo0JI
https://www.youtube.com/watch?v=o-Tzlp_nG70
https://www.youtube.com/watch?v=njuIKmM2xDM
https://www.youtube.com/watch?v=4zwQ88zMyio
https://www.youtube.com/watch?v=E3Wr6GWkRzA
https://www.youtube.com/watch?v=aypLuo7UXWE
https://www.youtube.com/watch?v=TANbVyW6x1k
https://www.youtube.com/watch?v=oNcdsp12pag
https://www.youtube.com/watch?v=qv7QFzRqf34
https://www.youtube.com/watch?v=cYK9aTSRtbE
https://www.youtube.com/watch?v=7kmaoLaQlF8
https://www.youtube.com/watch?v=st1zEAlNPic
https://www.youtube.com/watch?v=2XQnxn39yj4
https://www.youtube.com/watch?v=gUbA8Zn8YPQ
https://www.youtube.com/watch?v=Uny3t9woExo
https://www.youtube.com/watch?v=lCaIfb8B5Mo
https://www.youtube.com/watch?v=jEkQ0RZObiI
https://www.youtube.com/watch?v=Beo6jYwuZpc
https://www.youtube.com/watch?v=KILWCT_Zzoc
https://www.youtube.com/watch?v=fb2VzDSf7S0
https://www.youtube.com/watch?v=4a_oIbAhCgo
https://www.youtube.com/watch?v=eCLS2lWR1us
https://www.youtube.com/watch?v=MS01ipsrJCA
https://www.youtube.com/watch?v=3zqIIJhiyoo
https://www.youtube.com/watch?v=R3KOub_thmc
https://www.youtube.com/watch?v=ZQadO6LiP9s
https://www.youtube.com/watch?v=pzhhzPhwYh8
https://www.youtube.com/watch?v=kLkNcyM0gNA
https://www.youtube.com/watch?v=yHpVRxZJyMg
https://www.youtube.com/watch?v=DZlfy7Eeeu0
https://www.youtube.com/watch?v=b9Ao1j5bDXU
https://www.youtube.com/watch?v=vfPEVcXrO2c
https://www.youtube.com/watch?v=TK3gpctWmy8
https://www.youtube.com/watch?v=Gn9_FWSU32Y
https://www.youtube.com/watch?v=zrSrLrBZ_eA
https://www.youtube.com/watch?v=5CS4NAkSjeM
https://www.youtube.com/watch?v=bbNkX9AWRbM
https://www.youtube.com/watch?v=03Yi0nl6N8Y
https://www.youtube.com/watch?v=moANh2cUXzw
https://www.youtube.com/watch?v=X4MNSJQ3jxk
https://www.youtube.com/watch?v=Eh7IdlNdcjk
https://www.youtube.com/watch?v=HNR4pmJ_YPs
https://www.youtube.com/watch?v=QIzmK6U41yc
https://www.youtube.com/watch?v=i7Tsz0bTzv4
https://www.youtube.com/watch?v=U2Qz2Uz38Ds
https://www.youtube.com/watch?v=I6G_ojX3FO0
https://www.youtube.com/watch?v=gQOsKTlMr9w
https://www.youtube.com/watch?v=LBC0msJZofU
https://www.youtube.com/watch?v=gHRicolSA8Y
https://www.youtube.com/watch?v=v8roAb048Us
https://www.youtube.com/watch?v=P_RIqP_sjZ8
https://www.youtube.com/watch?v=e9E-fnJZObk
https://www.youtube.com/watch?v=UDRyL9EH5Bs
https://www.youtube.com/watch?v=EG77Z0p17Hc
https://www.youtube.com/watch?v=EW8OrhKu6ac
https://www.youtube.com/watch?v=swGrHB6VYF0
https://www.youtube.com/watch?v=1KSp_Hr0_8g
https://www.youtube.com/watch?v=WqyHn7p90ws
https://www.youtube.com/watch?v=8rASZi0V6hI
https://www.youtube.com/watch?v=JPX0c4vmMbk
https://www.youtube.com/watch?v=DDstXkeEY5k
https://www.youtube.com/watch?v=lIWpl1rISOU
https://www.youtube.com/watch?v=w3t-AhvtwIY
https://www.youtube.com/watch?v=bCPOPg9Jxko
https://www.youtube.com/watch?v=M4CIPaK4CzU
https://www.youtube.com/watch?v=sSWYhKIPK_w
https://www.youtube.com/watch?v=DL9orOzONFM
https://www.youtube.com/watch?v=zV1mn041-MY
https://www.youtube.com/watch?v=u9fKGIZEOZc
https://www.youtube.com/watch?v=nwW_2C1VQRA
https://www.youtube.com/watch?v=xyjIpsJ9C_E
https://www.youtube.com/watch?v=zv3JJeoNWpw
https://www.youtube.com/watch?v=yFh-pd4K8kM
https://www.youtube.com/watch?v=helJykjI5Ks
https://www.youtube.com/watch?v=O5grg4BhmwA
https://www.youtube.com/watch?v=hxqqPMdeWlk
https://www.youtube.com/watch?v=aBfY4EXizl4
https://www.youtube.com/watch?v=uXbTAyacm-o
https://www.youtube.com/watch?v=EHchyDF5mPA
https://www.youtube.com/watch?v=8hN6tFyXtMM
https://www.youtube.com/watch?v=ytaFppE2PrQ
https://www.youtube.com/watch?v=H7T79men-54
https://www.youtube.com/watch?v=wobBUFvj6vw
https://www.youtube.com/watch?v=JCkSb2SHIY8
https://www.youtube.com/watch?v=DbdV9vauV_8
https://www.youtube.com/watch?v=WY7F2paiokM
https://www.youtube.com/watch?v=7_XKe5nL3j4
https://www.youtube.com/watch?v=ImQPrZx6ZHw
https://www.youtube.com/watch?v=D4Of5F5CDLA
https://www.youtube.com/watch?v=piL1B00ekBE
https://www.youtube.com/watch?v=etOwU2jh5vo
https://www.youtube.com/watch?v=jqceDCEbuds
https://www.youtube.com/watch?v=NBJgtpZxRks
https://www.youtube.com/watch?v=Rztv9EWij4Y
https://www.youtube.com/watch?v=Mb3C2fSxLdI
https://www.youtube.com/watch?v=vK2LAKvrzPo
https://www.youtube.com/watch?v=w0JpfEd31AM
https://www.youtube.com/watch?v=mYSaT9V4324
https://www.youtube.com/watch?v=3979ONO_DOk
https://www.youtube.com/watch?v=5li6iJy_DYs
https://www.youtube.com/watch?v=wA9RT0MBe3s
https://www.youtube.com/watch?v=NEeLktfngjQ
https://www.youtube.com/watch?v=es24xo9ju7U
https://www.youtube.com/watch?v=OaGCZ8SsKG4
https://www.youtube.com/watch?v=lQsBzd952xk
https://www.youtube.com/watch?v=neD2PEqr-Po
https://www.youtube.com/watch?v=vHqIYvrKwks
https://www.youtube.com/watch?v=1A6qbKM5vgE
https://www.youtube.com/watch?v=zxPStAHlg4E
https://www.youtube.com/watch?v=jZCfL7fblMM
https://www.youtube.com/watch?v=TAccYcmvQ0k
https://www.youtube.com/watch?v=kzbyykGWN9U
https://www.youtube.com/watch?v=Jzj8TyuuvzQ
https://www.youtube.com/watch?v=ruQVatMNzto
https://www.youtube.com/watch?v=4tOse6iLVs0
https://www.youtube.com/watch?v=3l_25zyd-s8
https://www.youtube.com/watch?v=Cu0D682ezDU
https://www.youtube.com/watch?v=ZkHmWMl_rfo
https://www.youtube.com/watch?v=qcI0TSXWzVs
https://www.youtube.com/watch?v=THdAow8-nG8
https://www.youtube.com/watch?v=53FiW6nOJfQ
https://www.youtube.com/watch?v=w52Vjf2dphk
https://www.youtube.com/watch?v=O3buBB-zkw4
https://www.youtube.com/watch?v=qsRb4sFg7iE
https://www.youtube.com/watch?v=B36mO34Yaj8
https://www.youtube.com/watch?v=apgTsYIOZks
https://www.youtube.com/watch?v=uONZRzmh3xc
https://www.youtube.com/watch?v=FTY3sC1ad-8
https://www.youtube.com/watch?v=OfFoZQsS0qI
https://www.youtube.com/watch?v=SlE1uDWGMuI
https://www.youtube.com/watch?v=rDZYOsRB_DE
https://www.youtube.com/watch?v=iWUNJPVzTT0
https://www.youtube.com/watch?v=hfEwesszkvI
https://www.youtube.com/watch?v=Tk1zk4xikrk
https://www.youtube.com/watch?v=kbqKifCiUFo
https://www.youtube.com/watch?v=H4oCces6L-I
https://www.youtube.com/watch?v=dZc0Wi1l0L0
https://www.youtube.com/watch?v=_DRdlJnx2Vo
https://www.youtube.com/watch?v=uoyvZlIQ_-I
https://www.youtube.com/watch?v=gCrKVixw7Tk
https://www.youtube.com/watch?v=IVk6qntdVzk
https://www.youtube.com/watch?v=ZK3n2LCqRio
https://www.youtube.com/watch?v=ZzKNrl5kyF4
https://www.youtube.com/watch?v=Gld-GRwARlA
https://www.youtube.com/watch?v=tcqDUEhgT_g
https://www.youtube.com/watch?v=fPh0SGZHbTk
https://www.youtube.com/watch?v=rAsmd_5SfII
https://www.youtube.com/watch?v=8Dotiqbtvoo
https://www.youtube.com/watch?v=Q1bJaVlV-84
https://www.youtube.com/watch?v=CBQ5qbYV3XM
https://www.youtube.com/watch?v=pOsgKiHqKt0
https://www.youtube.com/watch?v=HC-e2wj7ePc
https://www.youtube.com/watch?v=JiBb2n5UEG8
https://www.youtube.com/watch?v=INNL2cWTgmI
https://www.youtube.com/watch?v=nEiX-U8P4H4
https://www.youtube.com/watch?v=azXdNflZL28
https://www.youtube.com/watch?v=8GxjkvKfRh4

View File

@ -1,237 +0,0 @@
https://www.youtube.com/watch?v=HH71IKNVVYk
https://www.youtube.com/watch?v=807yt9yhd6I
https://www.youtube.com/watch?v=El7pa-DsJ7w
https://www.youtube.com/watch?v=nIZUoHkJNH0
https://www.youtube.com/watch?v=v8EuLwLPF1I
https://www.youtube.com/watch?v=VXvqZQBGF5A
https://www.youtube.com/watch?v=tTkxj7PJ6Ss
https://www.youtube.com/watch?v=Brraz_ZKUIA
https://www.youtube.com/watch?v=BEjJRVVD0d4
https://www.youtube.com/watch?v=Kw52RfKqX5Q
https://www.youtube.com/watch?v=Tj1CYEYIZI8
https://www.youtube.com/watch?v=R6sRU0cLwcg
https://www.youtube.com/watch?v=HYN-BL55NyI
https://www.youtube.com/watch?v=4xRSfFe0aA4
https://www.youtube.com/watch?v=qcT3I4Fhod8
https://www.youtube.com/watch?v=FhR_Fc6I-Es
https://www.youtube.com/watch?v=5kQI69iDZEI
https://www.youtube.com/watch?v=eiQUC22MT0o
https://www.youtube.com/watch?v=mVNsumhm0bk
https://www.youtube.com/watch?v=5kyt-4p0qpw
https://www.youtube.com/watch?v=kdWxc8hDPbA
https://www.youtube.com/watch?v=VK8oMBYr9AE
https://www.youtube.com/watch?v=KHIe0adNnPw
https://www.youtube.com/watch?v=WSRrChHFR2I
https://www.youtube.com/watch?v=eWyU5N5oGUw
https://www.youtube.com/watch?v=OTF26cgxS5A
https://www.youtube.com/watch?v=AyOrNze3SE4
https://www.youtube.com/watch?v=0bShQ01DxnY
https://www.youtube.com/watch?v=jal3Rr0TPpM
https://www.youtube.com/watch?v=IvwSs1j6sL0
https://www.youtube.com/watch?v=rbvJbvdspoU
https://www.youtube.com/watch?v=gSqq_vuW0es
https://www.youtube.com/watch?v=EY7KAbarSnk
https://www.youtube.com/watch?v=oCZ5BJlh6Jo
https://www.youtube.com/watch?v=veVrB1DNV-c
https://www.youtube.com/watch?v=Jl0MB13-zK8
https://www.youtube.com/watch?v=qfYc2z7cvC4
https://www.youtube.com/watch?v=J8NH9qAkys4
https://www.youtube.com/watch?v=bBpcF-hnKz0
https://www.youtube.com/watch?v=3JsODEZl5lo
https://www.youtube.com/watch?v=I5CFgz1qeSc
https://www.youtube.com/watch?v=LIKcI1MBnto
https://www.youtube.com/watch?v=79wzDxJu0UE
https://www.youtube.com/watch?v=gHS0Lf8TyF8
https://www.youtube.com/watch?v=oqgrcgRkunw
https://www.youtube.com/watch?v=mWJC1eL_OR0
https://www.youtube.com/watch?v=41cWX4W1vEI
https://www.youtube.com/watch?v=8mO3yxKHU1E
https://www.youtube.com/watch?v=cR2EZ9b2AYw
https://www.youtube.com/watch?v=wyPYWkVKQyw
https://www.youtube.com/watch?v=MuUsmsfwizg
https://www.youtube.com/watch?v=9Y18qV1UCRM
https://www.youtube.com/watch?v=hd9jxasd-60
https://www.youtube.com/watch?v=WEVdhWKG2x8
https://www.youtube.com/watch?v=csq261RPA2I
https://www.youtube.com/watch?v=8pmocmq8Z_Y
https://www.youtube.com/watch?v=OFUVP1sDMhQ
https://www.youtube.com/watch?v=YZnULU8-WiY
https://www.youtube.com/watch?v=bkUEPqEqMOc
https://www.youtube.com/watch?v=fgxaU6KA4kI
https://www.youtube.com/watch?v=6XziyFQ2qb0
https://www.youtube.com/watch?v=71QrG4wEY_g
https://www.youtube.com/watch?v=wJXJTliyR98
https://www.youtube.com/watch?v=lDw7qM3lCOU
https://www.youtube.com/watch?v=ONpSx6GGlTE
https://www.youtube.com/watch?v=W3tNcsdWzv4
https://www.youtube.com/watch?v=Sq7SBO1VqKc
https://www.youtube.com/watch?v=PEmiEWUb1uo
https://www.youtube.com/watch?v=Mw1Du8gYMiE
https://www.youtube.com/watch?v=H4u0uhldLJQ
https://www.youtube.com/watch?v=fEDVDvIWfZE
https://www.youtube.com/watch?v=1tgrLHWLhBs
https://www.youtube.com/watch?v=VDgp3h8bAu8
https://www.youtube.com/watch?v=8ZcZAL3C928
https://www.youtube.com/watch?v=0Zpivy32UGU
https://www.youtube.com/watch?v=u-m-BBQQ9Tw
https://www.youtube.com/watch?v=cvHvX8mFTJE
https://www.youtube.com/watch?v=L-o5p75Q-cg
https://www.youtube.com/watch?v=ZfORukYHkGA
https://www.youtube.com/watch?v=t9sbedJUdUc
https://www.youtube.com/watch?v=zBoHq5-w5UY
https://www.youtube.com/watch?v=q50-whddP7Y
https://www.youtube.com/watch?v=D1PiHzL703U
https://www.youtube.com/watch?v=voQEUBq8q_A
https://www.youtube.com/watch?v=Ps2HyJCINyw
https://www.youtube.com/watch?v=kzco7k86Unw
https://www.youtube.com/watch?v=YE2zr2lV50M
https://www.youtube.com/watch?v=pU_MuwLZeAc
https://www.youtube.com/watch?v=PC5NoUQMOhA
https://www.youtube.com/watch?v=KGm_VKYfTMI
https://www.youtube.com/watch?v=duUThrCj7Lk
https://www.youtube.com/watch?v=n5UaEOcC5G4
https://www.youtube.com/watch?v=gjEmu6h_lpE
https://www.youtube.com/watch?v=en1KyArCG6w
https://www.youtube.com/watch?v=bRPWouCfF7I
https://www.youtube.com/watch?v=lXnIwNH8vGo
https://www.youtube.com/watch?v=xmPqK9T_yEU
https://www.youtube.com/watch?v=PYUNAbge9Xw
https://www.youtube.com/watch?v=J39cA-10XcE
https://www.youtube.com/watch?v=1BJD7Rv7iS8
https://www.youtube.com/watch?v=D6WdfQDXRsw
https://www.youtube.com/watch?v=g6-v4pwo2ik
https://www.youtube.com/watch?v=UCgHBFhO7FM
https://www.youtube.com/watch?v=ALNUXWNvEZY
https://www.youtube.com/watch?v=0Qx13_oyycg
https://www.youtube.com/watch?v=1KeCeqkwnTs
https://www.youtube.com/watch?v=JmQd29Q6-s8
https://www.youtube.com/watch?v=5eE9eTDjII0
https://www.youtube.com/watch?v=-kPpPTIYpeY
https://www.youtube.com/watch?v=n7IfbF48Jqo
https://www.youtube.com/watch?v=HXG-qjuDkPE
https://www.youtube.com/watch?v=77sKr6xPbW8
https://www.youtube.com/watch?v=w9uEnYCbmno
https://www.youtube.com/watch?v=M9B2M5rGOqE
https://www.youtube.com/watch?v=g-6eEKHMT4A
https://www.youtube.com/watch?v=0k6rcvts1FM
https://www.youtube.com/watch?v=3Glq6IgKJ-g
https://www.youtube.com/watch?v=dl_kgfUid_E
https://www.youtube.com/watch?v=RpBgeBWcw4I
https://www.youtube.com/watch?v=WiyI1U6HfJ8
https://www.youtube.com/watch?v=CvPD5zHvxpE
https://www.youtube.com/watch?v=H5aghJz-6-0
https://www.youtube.com/watch?v=hvGwiRk9qFI
https://www.youtube.com/watch?v=kP5en-KOZak
https://www.youtube.com/watch?v=Lt9z2PZxTA8
https://www.youtube.com/watch?v=ZAN2gg3Bf88
https://www.youtube.com/watch?v=dfcoKxlL9zo
https://www.youtube.com/watch?v=6G-zWuGwXwE
https://www.youtube.com/watch?v=IFOgQYDqCDg
https://www.youtube.com/watch?v=fN1ASoQVOtc
https://www.youtube.com/watch?v=nDvvyndocg0
https://www.youtube.com/watch?v=YS2do-ydhsE
https://www.youtube.com/watch?v=5IL47nns0R4
https://www.youtube.com/watch?v=A3BueGvC0dU
https://www.youtube.com/watch?v=qE5_I54Onsg
https://www.youtube.com/watch?v=1QA_KBw3bpM
https://www.youtube.com/watch?v=H2ms1CuhCAQ
https://www.youtube.com/watch?v=zh7AyRCjMK8
https://www.youtube.com/watch?v=oklOIfwaIm4
https://www.youtube.com/watch?v=2_m2D7BlCSY
https://www.youtube.com/watch?v=AOh3iLuvEx0
https://www.youtube.com/watch?v=qt6rxatB78k
https://www.youtube.com/watch?v=CCLgvay1AJg
https://www.youtube.com/watch?v=2CJsz7yfWVQ
https://www.youtube.com/watch?v=mXr5R4PybQ8
https://www.youtube.com/watch?v=FPJUY3OG1j4
https://www.youtube.com/watch?v=AxZSX649ZQM
https://www.youtube.com/watch?v=vAqqcajeLyw
https://www.youtube.com/watch?v=wT50rGqJi-A
https://www.youtube.com/watch?v=bURXdJK5yDo
https://www.youtube.com/watch?v=Ci-jKDX2Utc
https://www.youtube.com/watch?v=4jNOtUpzcsM
https://www.youtube.com/watch?v=9F1xYw3-wC0
https://www.youtube.com/watch?v=-wgG-UKu6P4
https://www.youtube.com/watch?v=RA0QPohN8OA
https://www.youtube.com/watch?v=swaAAvGIVkQ
https://www.youtube.com/watch?v=E2cttPQLTmU
https://www.youtube.com/watch?v=7u_HP6AjgNM
https://www.youtube.com/watch?v=-o7z8zDigKw
https://www.youtube.com/watch?v=2iDzZRY4nFg
https://www.youtube.com/watch?v=TvauDXRHRQM
https://www.youtube.com/watch?v=gm0CbvYTfyU
https://www.youtube.com/watch?v=aVH30g7M7-s
https://www.youtube.com/watch?v=SaRJpKcMpIc
https://www.youtube.com/watch?v=VME8ViM9p_E
https://www.youtube.com/watch?v=Ob1gwjj03-g
https://www.youtube.com/watch?v=Kx8QHsFjzdo
https://www.youtube.com/watch?v=k0MXRrhoGzw
https://www.youtube.com/watch?v=iQpIcy-iINQ
https://www.youtube.com/watch?v=iIegaxZamds
https://www.youtube.com/watch?v=nKgS3q8Eixw
https://www.youtube.com/watch?v=Vu53Y9-u6OQ
https://www.youtube.com/watch?v=A-1xFilVGKA
https://www.youtube.com/watch?v=NzXu-1jdv2w
https://www.youtube.com/watch?v=dN0sAUTPOo0
https://www.youtube.com/watch?v=KhKqvIa4KPI
https://www.youtube.com/watch?v=7EQlStjqPXk
https://www.youtube.com/watch?v=GvGYuLtTpI4
https://www.youtube.com/watch?v=socM2vLFAXw
https://www.youtube.com/watch?v=4Lf0s5sF6Ps
https://www.youtube.com/watch?v=O0SNpN9bA7M
https://www.youtube.com/watch?v=_1B1Unp8rzQ
https://www.youtube.com/watch?v=2Pxto4fr8fU
https://www.youtube.com/watch?v=-wnCY6-C_LE
https://www.youtube.com/watch?v=Jyo0daMS_FI
https://www.youtube.com/watch?v=6Mv190DEO5Q
https://www.youtube.com/watch?v=A3vqXUImxmo
https://www.youtube.com/watch?v=jTavkw3fHCU
https://www.youtube.com/watch?v=wO_IP_UBHSE
https://www.youtube.com/watch?v=eK76eyqZLuI
https://www.youtube.com/watch?v=fGq8t2S9A4Y
https://www.youtube.com/watch?v=kwgTZ7Kx1j0
https://www.youtube.com/watch?v=QpR8PIGCI6A
https://www.youtube.com/watch?v=wP6eLrWucEU
https://www.youtube.com/watch?v=c8wcjZjYUKk
https://www.youtube.com/watch?v=3jqtAd3ms6s
https://www.youtube.com/watch?v=yQJxa2y3RMU
https://www.youtube.com/watch?v=xq46z38tXds
https://www.youtube.com/watch?v=N5xZyUxRJBM
https://www.youtube.com/watch?v=seeZtr3lYz8
https://www.youtube.com/watch?v=DtoINgJb8NM
https://www.youtube.com/watch?v=Q1by2tU9Rp4
https://www.youtube.com/watch?v=X7Ho48iO098
https://www.youtube.com/watch?v=Nxs8sYQ6D4Y
https://www.youtube.com/watch?v=ePktyiVhM9Y
https://www.youtube.com/watch?v=FHAjPjwINDM
https://www.youtube.com/watch?v=iog13CSQsBE
https://www.youtube.com/watch?v=JC_7WZnqqYI
https://www.youtube.com/watch?v=qzar3W-wKdA
https://www.youtube.com/watch?v=JaLalK-3Ll0
https://www.youtube.com/watch?v=L-VBIZ1lxZ8
https://www.youtube.com/watch?v=z6LqQAbFZ5U
https://www.youtube.com/watch?v=V4sIhzXF5mw
https://www.youtube.com/watch?v=XaZskPC2CH8
https://www.youtube.com/watch?v=N-sgaV4nP_c
https://www.youtube.com/watch?v=g4S3tejwVBQ
https://www.youtube.com/watch?v=uq7Wh078eUg
https://www.youtube.com/watch?v=zSmiH1Gbpp8
https://www.youtube.com/watch?v=6WSy_8fjh3Y
https://www.youtube.com/watch?v=KRxe3BiApmU
https://www.youtube.com/watch?v=bO-OsMybcdY
https://www.youtube.com/watch?v=Tl9lNbchXKU
https://www.youtube.com/watch?v=MIGsaLyquMI
https://www.youtube.com/watch?v=clfJqj03TEs
https://www.youtube.com/watch?v=0Wpkb1L4pYw
https://www.youtube.com/watch?v=qtIP2ieotyk
https://www.youtube.com/watch?v=jm3t-hXP1Eo
https://www.youtube.com/watch?v=KU5QuYzEte0
https://www.youtube.com/watch?v=r5iLqBt3TkE
https://www.youtube.com/watch?v=8vFMbZEma7w
https://www.youtube.com/watch?v=Q6SgU9w7XbY
https://www.youtube.com/watch?v=y9nZZxnsoTw
https://www.youtube.com/watch?v=y1VjxqY8BrI
https://www.youtube.com/watch?v=cVy39nbbSB4
https://www.youtube.com/watch?v=JJDOUA_9TnU
https://www.youtube.com/watch?v=5fh0g4j0JPM
https://www.youtube.com/watch?v=4bp7st-p2Ns

View File

@ -1,235 +0,0 @@
https://www.youtube.com/watch?v=Vka_RHl8NBg
https://www.youtube.com/watch?v=C7mEj7LsxDM
https://www.youtube.com/watch?v=oU3xk3oaIWs
https://www.youtube.com/watch?v=Vc80-qcAYTI
https://www.youtube.com/watch?v=6jdO7Z0IA9s
https://www.youtube.com/watch?v=M624f51E6TU
https://www.youtube.com/watch?v=a2zb_vffseo
https://www.youtube.com/watch?v=kjJSsR7G-yA
https://www.youtube.com/watch?v=HTfUdUUB2sY
https://www.youtube.com/watch?v=clDTNsCoAt0
https://www.youtube.com/watch?v=EtlQ36lNezM
https://www.youtube.com/watch?v=LvXVwrrsMjE
https://www.youtube.com/watch?v=lmC8nB3Sx0Y
https://www.youtube.com/watch?v=UgJMzy12jEk
https://www.youtube.com/watch?v=mqrZu6fiOlA
https://www.youtube.com/watch?v=cxyHJSC6qDI
https://www.youtube.com/watch?v=yJOvKMfMy9Q
https://www.youtube.com/watch?v=74eDPvsPc58
https://www.youtube.com/watch?v=OVuJECLWkQQ
https://www.youtube.com/watch?v=V35MoqZdaKU
https://www.youtube.com/watch?v=5VVSyz8DYH8
https://www.youtube.com/watch?v=DJz78yI9zvg
https://www.youtube.com/watch?v=Qs5ynEowGp0
https://www.youtube.com/watch?v=PeRX98IZhJM
https://www.youtube.com/watch?v=GiVe7i3bY74
https://www.youtube.com/watch?v=WOsmZel1tUQ
https://www.youtube.com/watch?v=6HyYFW4KAjE
https://www.youtube.com/watch?v=T40pRAlLPFI
https://www.youtube.com/watch?v=ESJ4fEsc3E8
https://www.youtube.com/watch?v=JrmW9FXdiqg
https://www.youtube.com/watch?v=nfCY1Y2wCaE
https://www.youtube.com/watch?v=QDA8tjQeGa0
https://www.youtube.com/watch?v=VNWFVRESr9k
https://www.youtube.com/watch?v=FwzDqhHNZq4
https://www.youtube.com/watch?v=otevRZLVIns
https://www.youtube.com/watch?v=QbbaZPVWjZ0
https://www.youtube.com/watch?v=NThxpgYS54k
https://www.youtube.com/watch?v=GzqvU_H3cjs
https://www.youtube.com/watch?v=aINHgBzBAsc
https://www.youtube.com/watch?v=dFLilVIP6v0
https://www.youtube.com/watch?v=NQ_VXw5C5_g
https://www.youtube.com/watch?v=NlwOF78qdkM
https://www.youtube.com/watch?v=W8-8ZfmTIw8
https://www.youtube.com/watch?v=_rSM6kDk6jU
https://www.youtube.com/watch?v=Tr38d3eLXf8
https://www.youtube.com/watch?v=YhIXV4DWhIA
https://www.youtube.com/watch?v=-nfJpVRjRM0
https://www.youtube.com/watch?v=kosQ_XMUtDI
https://www.youtube.com/watch?v=D7OYw3vhtlQ
https://www.youtube.com/watch?v=3Xnmt2Ymii4
https://www.youtube.com/watch?v=bM2Lw4iDWMs
https://www.youtube.com/watch?v=lVeTpIpFTuI
https://www.youtube.com/watch?v=Bxb6m5jVs4k
https://www.youtube.com/watch?v=rh8Zo8ZCw8w
https://www.youtube.com/watch?v=7C0wdkJH4tw
https://www.youtube.com/watch?v=TOOHa3M_3_A
https://www.youtube.com/watch?v=P0gfEojLMmY
https://www.youtube.com/watch?v=OcS5i4b94fE
https://www.youtube.com/watch?v=6K_SzWye5tE
https://www.youtube.com/watch?v=pVlrX8hakyE
https://www.youtube.com/watch?v=zIrG0Wt6CcA
https://www.youtube.com/watch?v=EjXQbmbgxhQ
https://www.youtube.com/watch?v=MmvWYXp1PNM
https://www.youtube.com/watch?v=lLx3Z3c7mV8
https://www.youtube.com/watch?v=73ykZR11zes
https://www.youtube.com/watch?v=WHYrfpgtm6U
https://www.youtube.com/watch?v=b8xrtcAyyw0
https://www.youtube.com/watch?v=SNFeZyokO6o
https://www.youtube.com/watch?v=sre-OE94un8
https://www.youtube.com/watch?v=q28um1cqF4Q
https://www.youtube.com/watch?v=phgtPLyP97w
https://www.youtube.com/watch?v=jlUkuRkYFZU
https://www.youtube.com/watch?v=3G3m2BTXUQw
https://www.youtube.com/watch?v=te2LYx1SVBE
https://www.youtube.com/watch?v=JACfOF38Ffk
https://www.youtube.com/watch?v=ZKb43WnjoiU
https://www.youtube.com/watch?v=Qel0DXeISN0
https://www.youtube.com/watch?v=9FekdRZPIpQ
https://www.youtube.com/watch?v=L3E45I283ZU
https://www.youtube.com/watch?v=ZpKcW61Y3zI
https://www.youtube.com/watch?v=SiJmWpuQhNQ
https://www.youtube.com/watch?v=sYe2vx6p_T4
https://www.youtube.com/watch?v=eJEGgi8ZkRQ
https://www.youtube.com/watch?v=V--wYltJnB4
https://www.youtube.com/watch?v=z8EMf_9wUzc
https://www.youtube.com/watch?v=2vuVM43MRiA
https://www.youtube.com/watch?v=Sc88FJQw-8A
https://www.youtube.com/watch?v=2lB9s3y-6eA
https://www.youtube.com/watch?v=vNkYcCV7S4U
https://www.youtube.com/watch?v=F3ls751u5o0
https://www.youtube.com/watch?v=oFo8wPllZu8
https://www.youtube.com/watch?v=Ku9lKfjZU30
https://www.youtube.com/watch?v=3Yq8fNNHB1Y
https://www.youtube.com/watch?v=Sk3dVf0H9C0
https://www.youtube.com/watch?v=w99cGM_y4Og
https://www.youtube.com/watch?v=jm0Zv6ydHeo
https://www.youtube.com/watch?v=5X2g_qbEgEs
https://www.youtube.com/watch?v=MYTQew8auIY
https://www.youtube.com/watch?v=GOy_-PzLf8g
https://www.youtube.com/watch?v=YUkd_TZQZKk
https://www.youtube.com/watch?v=hzeo2pDrtLk
https://www.youtube.com/watch?v=wEabH0TyYwM
https://www.youtube.com/watch?v=SlL6VTxaCog
https://www.youtube.com/watch?v=s6ZflJj1p34
https://www.youtube.com/watch?v=TMTHkY8hCx8
https://www.youtube.com/watch?v=cRBj61eBjhQ
https://www.youtube.com/watch?v=DTL3G1h2SdQ
https://www.youtube.com/watch?v=NCI6-NWq6DY
https://www.youtube.com/watch?v=lIvSjlKUvCg
https://www.youtube.com/watch?v=6r7Ubc_dEQk
https://www.youtube.com/watch?v=IFdEerP9z4Q
https://www.youtube.com/watch?v=JbBGMq-wBHM
https://www.youtube.com/watch?v=RIYSRqBseLI
https://www.youtube.com/watch?v=2v_UzrgNwpo
https://www.youtube.com/watch?v=iX_vynlyrC8
https://www.youtube.com/watch?v=ZBjsh2p80sY
https://www.youtube.com/watch?v=AK6dg9YJz70
https://www.youtube.com/watch?v=AMAzK-IjtW0
https://www.youtube.com/watch?v=PfaI57gHTkU
https://www.youtube.com/watch?v=Cn9asaKJkP8
https://www.youtube.com/watch?v=GWDOcK-GHUo
https://www.youtube.com/watch?v=FdrM63-sL5E
https://www.youtube.com/watch?v=Ni15untE9lc
https://www.youtube.com/watch?v=ZXiNG_wc3cs
https://www.youtube.com/watch?v=a1NjiQlF_QU
https://www.youtube.com/watch?v=aKlFxKwlJCI
https://www.youtube.com/watch?v=ncyhGHY-J8s
https://www.youtube.com/watch?v=Nsl6F3KD68I
https://www.youtube.com/watch?v=y-MVI2HeAb8
https://www.youtube.com/watch?v=lDmo3DvHSoQ
https://www.youtube.com/watch?v=_nO4GwwK7a0
https://www.youtube.com/watch?v=JNV0Bs_Z2rY
https://www.youtube.com/watch?v=QCs8H3xMCCg
https://www.youtube.com/watch?v=xSlTzHlfJy0
https://www.youtube.com/watch?v=2eVxtYA-1jI
https://www.youtube.com/watch?v=a9Ma4IjuWvM
https://www.youtube.com/watch?v=8vaxMejObcY
https://www.youtube.com/watch?v=SR6dOkauvIo
https://www.youtube.com/watch?v=Pudto3Xw_NY
https://www.youtube.com/watch?v=6EIjFdz_344
https://www.youtube.com/watch?v=Q30ETlsokl0
https://www.youtube.com/watch?v=4RJNa-Dj68I
https://www.youtube.com/watch?v=KoNzY_CeeKw
https://www.youtube.com/watch?v=7xNd2lPYR68
https://www.youtube.com/watch?v=Bowfz2gZ2Sk
https://www.youtube.com/watch?v=X32vDZjnIWc
https://www.youtube.com/watch?v=GLP9CuTWUlk
https://www.youtube.com/watch?v=JDsMfCdDUfk
https://www.youtube.com/watch?v=VKynMQrBM7E
https://www.youtube.com/watch?v=ejB1lm7jDzw
https://www.youtube.com/watch?v=8B4Sg9xIGTQ
https://www.youtube.com/watch?v=_rB2YS9t63M
https://www.youtube.com/watch?v=KJyvaMvan9I
https://www.youtube.com/watch?v=ZcdhsUEd6TU
https://www.youtube.com/watch?v=NCPYSEYoF1c
https://www.youtube.com/watch?v=EgupaPnIeMM
https://www.youtube.com/watch?v=TqPRCGbHSjw
https://www.youtube.com/watch?v=Hi62GOTWGI8
https://www.youtube.com/watch?v=dHcZffZjYB0
https://www.youtube.com/watch?v=Pvnqhkywbxc
https://www.youtube.com/watch?v=UceBEWYa13w
https://www.youtube.com/watch?v=AgRH2C5yPjQ
https://www.youtube.com/watch?v=dXM5zpjqtOc
https://www.youtube.com/watch?v=a4Sb42va86Q
https://www.youtube.com/watch?v=JSZdE83PiRQ
https://www.youtube.com/watch?v=mWi5mU1bPPg
https://www.youtube.com/watch?v=ZTVH8aYSwjQ
https://www.youtube.com/watch?v=e_3WleCGJbc
https://www.youtube.com/watch?v=woMgT3A_71Q
https://www.youtube.com/watch?v=771kiAizO8g
https://www.youtube.com/watch?v=7PYkf420f9c
https://www.youtube.com/watch?v=AgE4Ke5nNoY
https://www.youtube.com/watch?v=ZD-UOCrU6Fg
https://www.youtube.com/watch?v=WnNpg4LRzHI
https://www.youtube.com/watch?v=guKqHIuxq90
https://www.youtube.com/watch?v=WZ89NCgdvII
https://www.youtube.com/watch?v=e9r2o7MRyMQ
https://www.youtube.com/watch?v=1xdaVv0qb0I
https://www.youtube.com/watch?v=l5SYopn7vSg
https://www.youtube.com/watch?v=5xiKlm6vG5w
https://www.youtube.com/watch?v=iKAtkHSVfjU
https://www.youtube.com/watch?v=xQHmDw2ayNw
https://www.youtube.com/watch?v=PsWWUiANTfo
https://www.youtube.com/watch?v=tQIIhH7A9FE
https://www.youtube.com/watch?v=6EkM3Fvirq0
https://www.youtube.com/watch?v=mQ-kLX_NRwU
https://www.youtube.com/watch?v=QR6WfdodfDU
https://www.youtube.com/watch?v=Xb4ZZ6T50vM
https://www.youtube.com/watch?v=E0HGtjMKljg
https://www.youtube.com/watch?v=tbtPMw1BNA4
https://www.youtube.com/watch?v=eFJlQBkjEqw
https://www.youtube.com/watch?v=vJ4Ue81SyQw
https://www.youtube.com/watch?v=dXkf-O-ByOQ
https://www.youtube.com/watch?v=zIfZxrswlEY
https://www.youtube.com/watch?v=A5vdyzU-0zg
https://www.youtube.com/watch?v=2ajukBAGGuU
https://www.youtube.com/watch?v=WTZ4zCezHUU
https://www.youtube.com/watch?v=XWi8rXiRq9E
https://www.youtube.com/watch?v=qpMl2erxOgU
https://www.youtube.com/watch?v=VwbO60gjWoA
https://www.youtube.com/watch?v=7IuipODNcRE
https://www.youtube.com/watch?v=HfMHDL0SgZ0
https://www.youtube.com/watch?v=yv5mcmNEwdU
https://www.youtube.com/watch?v=xma0nVpBjZo
https://www.youtube.com/watch?v=TKxNwoYM5ec
https://www.youtube.com/watch?v=hiKwAlTM1Ys
https://www.youtube.com/watch?v=KG-30LxX-qs
https://www.youtube.com/watch?v=_6Ms_SJmQn4
https://www.youtube.com/watch?v=0tdXaee9o6k
https://www.youtube.com/watch?v=J9FCafzAuaE
https://www.youtube.com/watch?v=7jTrdi5t83Y
https://www.youtube.com/watch?v=WIky8XG3SP4
https://www.youtube.com/watch?v=0iv6pKiKzL4
https://www.youtube.com/watch?v=ZfWDeOgVSQo
https://www.youtube.com/watch?v=7QdQ7z3mFr4
https://www.youtube.com/watch?v=JfjXBXIvd-8
https://www.youtube.com/watch?v=YyOK4Lau-xY
https://www.youtube.com/watch?v=klgdpFMajjY
https://www.youtube.com/watch?v=I-ar6huKQ_c
https://www.youtube.com/watch?v=vkL_zAcnkI8
https://www.youtube.com/watch?v=tgMX2SfF5lI
https://www.youtube.com/watch?v=4FrSX37DoTU
https://www.youtube.com/watch?v=ERX--LGwC10
https://www.youtube.com/watch?v=t_2kZnBl9i0
https://www.youtube.com/watch?v=YnhdkKajmRA
https://www.youtube.com/watch?v=7nfJO1rJYMo
https://www.youtube.com/watch?v=B4bd4HcRo0Q
https://www.youtube.com/watch?v=BOs7CVyGyGg
https://www.youtube.com/watch?v=OZyMlE-yy3U
https://www.youtube.com/watch?v=SvbS2IALKmo
https://www.youtube.com/watch?v=U-IdboWZNiA
https://www.youtube.com/watch?v=msjiKwDbsaM
https://www.youtube.com/watch?v=dmZ1pWgxZn0
https://www.youtube.com/watch?v=utGMKcJtuPo
https://www.youtube.com/watch?v=sGseachRqQs

View File

@ -1,230 +0,0 @@
https://www.youtube.com/watch?v=s_7soIjx_sk
https://www.youtube.com/watch?v=QiFpIIF7-4g
https://www.youtube.com/watch?v=EFCcOOuWWc8
https://www.youtube.com/watch?v=ORAwIWpC3Gw
https://www.youtube.com/watch?v=nB5njVbYF_k
https://www.youtube.com/watch?v=nfz__ppKnHM
https://www.youtube.com/watch?v=9y9YPN5mclI
https://www.youtube.com/watch?v=knEbE0conCs
https://www.youtube.com/watch?v=uuoZinaXlAs
https://www.youtube.com/watch?v=HATcwbvQEac
https://www.youtube.com/watch?v=9B5bfAzqT6A
https://www.youtube.com/watch?v=Spa5Hstqe9c
https://www.youtube.com/watch?v=RX8-5eZZH-g
https://www.youtube.com/watch?v=FhNZRjWbV88
https://www.youtube.com/watch?v=4iOXfE5mrOs
https://www.youtube.com/watch?v=v2ZKFLsJh44
https://www.youtube.com/watch?v=Im7IqqmDmwE
https://www.youtube.com/watch?v=ldQM2aFCLAY
https://www.youtube.com/watch?v=wNqXAjJtZG0
https://www.youtube.com/watch?v=etSP1c6XQF8
https://www.youtube.com/watch?v=Kz7tHkNSXnU
https://www.youtube.com/watch?v=YurbIFCKqHo
https://www.youtube.com/watch?v=UspvXAlQ26o
https://www.youtube.com/watch?v=JYvEggbevBw
https://www.youtube.com/watch?v=TJpGFfXl4Rg
https://www.youtube.com/watch?v=T59N1dRRAtw
https://www.youtube.com/watch?v=sdW6jwBctac
https://www.youtube.com/watch?v=jTWH3Rb72rI
https://www.youtube.com/watch?v=OT5UFvRabSg
https://www.youtube.com/watch?v=N7uyjA7i7SI
https://www.youtube.com/watch?v=D2z8w4MNnOI
https://www.youtube.com/watch?v=PGaSqod7xJ8
https://www.youtube.com/watch?v=W0RtLCRayZI
https://www.youtube.com/watch?v=9d_fnwE-P3g
https://www.youtube.com/watch?v=Y_nKb8qDnS4
https://www.youtube.com/watch?v=mSqdcFxU-BE
https://www.youtube.com/watch?v=dh8voXLq30Q
https://www.youtube.com/watch?v=j34tBBPPO7k
https://www.youtube.com/watch?v=Z39BEKY6r2g
https://www.youtube.com/watch?v=tV48lHlCO_E
https://www.youtube.com/watch?v=9AdxIIwBwD8
https://www.youtube.com/watch?v=sgYKkmO4Y9k
https://www.youtube.com/watch?v=g38X9lWrCvM
https://www.youtube.com/watch?v=qeZX32QWR5I
https://www.youtube.com/watch?v=t949upp1nAc
https://www.youtube.com/watch?v=FQG5G5yuhkI
https://www.youtube.com/watch?v=vqArE3Ddjp0
https://www.youtube.com/watch?v=kWD8zGU-sHs
https://www.youtube.com/watch?v=UKpJ07-Fdco
https://www.youtube.com/watch?v=yCTY7L58M0c
https://www.youtube.com/watch?v=Eg9kwoacM_Q
https://www.youtube.com/watch?v=TYh4lXx57b4
https://www.youtube.com/watch?v=KkQGpeabUmc
https://www.youtube.com/watch?v=alqtdG-lH20
https://www.youtube.com/watch?v=LhUFVnK5IBU
https://www.youtube.com/watch?v=SKxio1UECBA
https://www.youtube.com/watch?v=OkZQdhgToL4
https://www.youtube.com/watch?v=tSyp0B5KLIc
https://www.youtube.com/watch?v=X2l_P0KN4FE
https://www.youtube.com/watch?v=Rw6YwelbmPU
https://www.youtube.com/watch?v=QucIkabx540
https://www.youtube.com/watch?v=R6a2CRSeq4I
https://www.youtube.com/watch?v=S_pCLiKX1jQ
https://www.youtube.com/watch?v=B_uOj6ZaGjU
https://www.youtube.com/watch?v=pMxjV14wEaQ
https://www.youtube.com/watch?v=hSCD7O2zZqs
https://www.youtube.com/watch?v=MT7JYGXHvsg
https://www.youtube.com/watch?v=Z7eUu3-vcXA
https://www.youtube.com/watch?v=Pk2cph6j3Qk
https://www.youtube.com/watch?v=hionali1f8Q
https://www.youtube.com/watch?v=RFkfq59Q6Jc
https://www.youtube.com/watch?v=ZIZVTRYCGWM
https://www.youtube.com/watch?v=QFMoiOAY6i4
https://www.youtube.com/watch?v=6uRUgR7azzc
https://www.youtube.com/watch?v=76sAQCo9Ao0
https://www.youtube.com/watch?v=vEjnuOJadpA
https://www.youtube.com/watch?v=T5qyH3fO_NQ
https://www.youtube.com/watch?v=HIHiwfUj8_Y
https://www.youtube.com/watch?v=oMnMBFHzO2A
https://www.youtube.com/watch?v=AsRDIaqn-b0
https://www.youtube.com/watch?v=u1VSkXNGAM0
https://www.youtube.com/watch?v=ys3q0YCAtWA
https://www.youtube.com/watch?v=zCfWIxAIo2A
https://www.youtube.com/watch?v=5hgHNqbMovk
https://www.youtube.com/watch?v=JQGubPbpwp0
https://www.youtube.com/watch?v=np2aMq-duMA
https://www.youtube.com/watch?v=MsKvbsmxSEk
https://www.youtube.com/watch?v=xOOUaiwdY98
https://www.youtube.com/watch?v=gxJsp3I9PvQ
https://www.youtube.com/watch?v=cpFV2AxmEeY
https://www.youtube.com/watch?v=42mSq6e5ns0
https://www.youtube.com/watch?v=ZcMv4sje3Vw
https://www.youtube.com/watch?v=tNTzPVJoMKQ
https://www.youtube.com/watch?v=NQRZXINtEhs
https://www.youtube.com/watch?v=aTG1pNOSYro
https://www.youtube.com/watch?v=ZoTtMmX1oz8
https://www.youtube.com/watch?v=ulUenhoi__M
https://www.youtube.com/watch?v=tT-3k4barTQ
https://www.youtube.com/watch?v=QtMoPqXYXDk
https://www.youtube.com/watch?v=HEftQMgt4rg
https://www.youtube.com/watch?v=b4r4XexQLrM
https://www.youtube.com/watch?v=1yFwBLcBFRQ
https://www.youtube.com/watch?v=WKHlvJ6x1LA
https://www.youtube.com/watch?v=xSOIjsEekWA
https://www.youtube.com/watch?v=62akyr7rzxI
https://www.youtube.com/watch?v=XIIJpCoCm4Q
https://www.youtube.com/watch?v=BbtWZWOoACk
https://www.youtube.com/watch?v=ijyJWAgGWG0
https://www.youtube.com/watch?v=ACHMYMusnKo
https://www.youtube.com/watch?v=trsCBMyuyLI
https://www.youtube.com/watch?v=iSaF1n0cbuQ
https://www.youtube.com/watch?v=10j3GH59eL4
https://www.youtube.com/watch?v=mIwhMVo7GJs
https://www.youtube.com/watch?v=1cB4HhI47Jg
https://www.youtube.com/watch?v=H94hOHN2rVw
https://www.youtube.com/watch?v=rGnMrpcgUjE
https://www.youtube.com/watch?v=Tg15phZM9MA
https://www.youtube.com/watch?v=ayPZx_IUoos
https://www.youtube.com/watch?v=hKq0g55QaM4
https://www.youtube.com/watch?v=NrAW-afOBqM
https://www.youtube.com/watch?v=kQa5hKggcjw
https://www.youtube.com/watch?v=yEymLHO3CQ4
https://www.youtube.com/watch?v=0NsTlre-PPA
https://www.youtube.com/watch?v=mELSxoR-zCU
https://www.youtube.com/watch?v=yEVXuFCUjXw
https://www.youtube.com/watch?v=hMFcuOFDCcg
https://www.youtube.com/watch?v=6MeAaGtWp9s
https://www.youtube.com/watch?v=jKvNdMDukcc
https://www.youtube.com/watch?v=8KS0CHgvI1A
https://www.youtube.com/watch?v=AKvX7ah_hOw
https://www.youtube.com/watch?v=GuLbIe2cF2w
https://www.youtube.com/watch?v=sY-zWrNqkOE
https://www.youtube.com/watch?v=3iC097rvS_o
https://www.youtube.com/watch?v=wGZtL-gaazM
https://www.youtube.com/watch?v=Er1FX6IMfqQ
https://www.youtube.com/watch?v=1jvJ3DMJAlg
https://www.youtube.com/watch?v=c85Fh_WDU3A
https://www.youtube.com/watch?v=wtiG7C87QX4
https://www.youtube.com/watch?v=eACzH5r_Ma8
https://www.youtube.com/watch?v=9ypHGzE6Di8
https://www.youtube.com/watch?v=EvcvCsNF0-g
https://www.youtube.com/watch?v=zEp3EOVlRFE
https://www.youtube.com/watch?v=ZMVgFtRVSuQ
https://www.youtube.com/watch?v=YELUbjJS280
https://www.youtube.com/watch?v=ViNvarsfuNQ
https://www.youtube.com/watch?v=bITtNQSvWfc
https://www.youtube.com/watch?v=bg0YLrDhXgQ
https://www.youtube.com/watch?v=TF2weikuHEo
https://www.youtube.com/watch?v=Lw55m9XNSaQ
https://www.youtube.com/watch?v=NNCTs-K7U38
https://www.youtube.com/watch?v=lgLixSq9wS4
https://www.youtube.com/watch?v=2ZMLHS7l5NU
https://www.youtube.com/watch?v=TVfRO7hn3bs
https://www.youtube.com/watch?v=wByimVu-hFs
https://www.youtube.com/watch?v=amk_ol9sb3M
https://www.youtube.com/watch?v=S6QvzexdgfY
https://www.youtube.com/watch?v=yFvRdCOe_Ss
https://www.youtube.com/watch?v=ghvU3NQvb-4
https://www.youtube.com/watch?v=-R5HMIcBxNo
https://www.youtube.com/watch?v=xksdvTH0fA4
https://www.youtube.com/watch?v=VwlX1fGTOio
https://www.youtube.com/watch?v=luTcIoSJooo
https://www.youtube.com/watch?v=zyQoz35pRN4
https://www.youtube.com/watch?v=1ijOjx54_8k
https://www.youtube.com/watch?v=Q57suljQVtE
https://www.youtube.com/watch?v=f6Wpgyakg4Q
https://www.youtube.com/watch?v=SmZwenV25hI
https://www.youtube.com/watch?v=438S8whO7sM
https://www.youtube.com/watch?v=GJwGamIG_2o
https://www.youtube.com/watch?v=RBEjeQf3eUA
https://www.youtube.com/watch?v=0Bm0VrVWTVM
https://www.youtube.com/watch?v=ELO3tyx76R0
https://www.youtube.com/watch?v=2RjdVnvMtZk
https://www.youtube.com/watch?v=6zyz3lHLlh0
https://www.youtube.com/watch?v=gAjmL2hgjVA
https://www.youtube.com/watch?v=UxD-eeezwz8
https://www.youtube.com/watch?v=FQ1Qj5tiAfU
https://www.youtube.com/watch?v=ApE2rPgtQ04
https://www.youtube.com/watch?v=wp-d1_jUCS8
https://www.youtube.com/watch?v=6Y22VyY5h1A
https://www.youtube.com/watch?v=CQgucWUHzS8
https://www.youtube.com/watch?v=5_7mObtXbvc
https://www.youtube.com/watch?v=xMAkXgaG1LY
https://www.youtube.com/watch?v=haNnYV72rKw
https://www.youtube.com/watch?v=DELqUJkPXPg
https://www.youtube.com/watch?v=pX7s8Xw5YCc
https://www.youtube.com/watch?v=9vr04dqyX98
https://www.youtube.com/watch?v=dYLPqsSQpcU
https://www.youtube.com/watch?v=ljQZiwqYqi8
https://www.youtube.com/watch?v=JASbtU-NIYE
https://www.youtube.com/watch?v=DrTj3YdT_S8
https://www.youtube.com/watch?v=3E_M5GC5me0
https://www.youtube.com/watch?v=Wz74VDYVpO4
https://www.youtube.com/watch?v=BseyjmC39x8
https://www.youtube.com/watch?v=IHaqsz_LDBE
https://www.youtube.com/watch?v=X9y2thI8n-g
https://www.youtube.com/watch?v=Tvt1-4bA0ZU
https://www.youtube.com/watch?v=PX-Z916sh_Q
https://www.youtube.com/watch?v=k0pCbTE11jE
https://www.youtube.com/watch?v=Rut5u78laS8
https://www.youtube.com/watch?v=6e9y8HP9-Qs
https://www.youtube.com/watch?v=lpfomwG5Jb0
https://www.youtube.com/watch?v=hJVfabvgg7g
https://www.youtube.com/watch?v=ot5N-kaB86g
https://www.youtube.com/watch?v=I8W2eJ6lMv8
https://www.youtube.com/watch?v=iK489f6qZhw
https://www.youtube.com/watch?v=7m7GdAaDUJk
https://www.youtube.com/watch?v=b9-QhEhWCMg
https://www.youtube.com/watch?v=1NPHHtZobWA
https://www.youtube.com/watch?v=md4EimJIMxU
https://www.youtube.com/watch?v=0cxc5iVzavo
https://www.youtube.com/watch?v=EShwg_-8gBQ
https://www.youtube.com/watch?v=jBNU3d1APAk
https://www.youtube.com/watch?v=X5s3aQD2vTg
https://www.youtube.com/watch?v=pg4PFn1a3w4
https://www.youtube.com/watch?v=j25u9KdduTI
https://www.youtube.com/watch?v=1G2a1FOdBX4
https://www.youtube.com/watch?v=cnAz3Tz6KM0
https://www.youtube.com/watch?v=x4RzM8MpYQs
https://www.youtube.com/watch?v=1zan33JDOaU
https://www.youtube.com/watch?v=e2qGGGcaNbE
https://www.youtube.com/watch?v=KxKh-ey1anM
https://www.youtube.com/watch?v=sY33ut-4q5U
https://www.youtube.com/watch?v=Yhy_9B-W1Bc
https://www.youtube.com/watch?v=HVjLcXoWdr0
https://www.youtube.com/watch?v=EDk4aURqdt0
https://www.youtube.com/watch?v=f69aszkNSrA
https://www.youtube.com/watch?v=ArP-iJnxr3g
https://www.youtube.com/watch?v=cSMlgnD9Uf4
https://www.youtube.com/watch?v=VvpHCEq3ETg

View File

@ -1,235 +0,0 @@
https://www.youtube.com/watch?v=Uh3-23diDuQ
https://www.youtube.com/watch?v=0h5fjDk4Fxk
https://www.youtube.com/watch?v=w729MixxpsY
https://www.youtube.com/watch?v=2JQv7_ijvHs
https://www.youtube.com/watch?v=lmBbDkhNtfU
https://www.youtube.com/watch?v=HdCoCj7DNlM
https://www.youtube.com/watch?v=LikXRzpPYEw
https://www.youtube.com/watch?v=tU9heYibc_4
https://www.youtube.com/watch?v=PXbkzMxmd_Q
https://www.youtube.com/watch?v=4Wrgvzh9oF8
https://www.youtube.com/watch?v=jBY7nHRdFuY
https://www.youtube.com/watch?v=WFxITA-cPn0
https://www.youtube.com/watch?v=iZXtblrH3E0
https://www.youtube.com/watch?v=21n1QM1E5Tg
https://www.youtube.com/watch?v=ore2ZKBFVIE
https://www.youtube.com/watch?v=DJPjz4TM-r8
https://www.youtube.com/watch?v=Uz13MyjVlI0
https://www.youtube.com/watch?v=M734Drp7DEk
https://www.youtube.com/watch?v=nG_dAIdROnA
https://www.youtube.com/watch?v=5S5i0RcG4JU
https://www.youtube.com/watch?v=nhP6k9XeJj8
https://www.youtube.com/watch?v=T00fMHYd4xg
https://www.youtube.com/watch?v=5btdrrWIPj8
https://www.youtube.com/watch?v=-0Dy7ZB5Tp0
https://www.youtube.com/watch?v=cbRqV5Nczs0
https://www.youtube.com/watch?v=wnWfEALwfrw
https://www.youtube.com/watch?v=hrMmDB735d8
https://www.youtube.com/watch?v=-T5viJt_NeY
https://www.youtube.com/watch?v=NbifCHxb1kU
https://www.youtube.com/watch?v=xUJKjeIknXY
https://www.youtube.com/watch?v=rGUN31cV2Hg
https://www.youtube.com/watch?v=2HRQDPQkntU
https://www.youtube.com/watch?v=QuidE5bynlg
https://www.youtube.com/watch?v=OzTDb4Vslkg
https://www.youtube.com/watch?v=u-t0Dm4Jx6s
https://www.youtube.com/watch?v=QQn7WjVq35M
https://www.youtube.com/watch?v=UcsqZ0r43xQ
https://www.youtube.com/watch?v=hcIdB-l3c_U
https://www.youtube.com/watch?v=4KF3dnURKaU
https://www.youtube.com/watch?v=km_2AcH-76A
https://www.youtube.com/watch?v=GG1lxGKvFZo
https://www.youtube.com/watch?v=4gM4ngPf-Ug
https://www.youtube.com/watch?v=CQrUmMTaCss
https://www.youtube.com/watch?v=fgkJpaPp6Ho
https://www.youtube.com/watch?v=Sa1h5lFB2oo
https://www.youtube.com/watch?v=JPxdXpn8pU8
https://www.youtube.com/watch?v=OiDz8w9nSC8
https://www.youtube.com/watch?v=Ka9PvuYAdm0
https://www.youtube.com/watch?v=_qr5dg7jIN0
https://www.youtube.com/watch?v=_PYpZwH1Goo
https://www.youtube.com/watch?v=lk9E49BqUkQ
https://www.youtube.com/watch?v=rcreLiAZGL0
https://www.youtube.com/watch?v=ieic-zav9Fk
https://www.youtube.com/watch?v=CwCpNXBF6js
https://www.youtube.com/watch?v=LlvC2Kj2pFo
https://www.youtube.com/watch?v=prErUmVPO3A
https://www.youtube.com/watch?v=StNr4Qo5QxI
https://www.youtube.com/watch?v=DwXPj__Wm5E
https://www.youtube.com/watch?v=z3tA2nrhIuk
https://www.youtube.com/watch?v=oKGM6yhysMw
https://www.youtube.com/watch?v=bXNjW_I_1Qo
https://www.youtube.com/watch?v=LPmoYOBnVR0
https://www.youtube.com/watch?v=GyI9lJELbEk
https://www.youtube.com/watch?v=_C6t5mf4lbo
https://www.youtube.com/watch?v=OiGxqjZfbJc
https://www.youtube.com/watch?v=KCPW6EXa8-o
https://www.youtube.com/watch?v=W8yWWK0C8yg
https://www.youtube.com/watch?v=_obpK8FSh0M
https://www.youtube.com/watch?v=IChe3vDfRiI
https://www.youtube.com/watch?v=Bl_ph6AMXz8
https://www.youtube.com/watch?v=xReRVPK97SE
https://www.youtube.com/watch?v=w3UZJ21mpS8
https://www.youtube.com/watch?v=50ilwSLjqPQ
https://www.youtube.com/watch?v=k1gJ787wdR4
https://www.youtube.com/watch?v=6onGbYzhrks
https://www.youtube.com/watch?v=JzdOP-4lSAM
https://www.youtube.com/watch?v=25ey_nTjFXM
https://www.youtube.com/watch?v=JHwghzfAkNM
https://www.youtube.com/watch?v=c2N-wMntv7o
https://www.youtube.com/watch?v=wvTkHp8yWQA
https://www.youtube.com/watch?v=ekx12gwN35o
https://www.youtube.com/watch?v=3Dmwk7vH5aE
https://www.youtube.com/watch?v=37oU7XZqHKQ
https://www.youtube.com/watch?v=p7cRbQwQLIU
https://www.youtube.com/watch?v=O5WZy3lgUwk
https://www.youtube.com/watch?v=xwQ7heyQDU0
https://www.youtube.com/watch?v=Ioc2DzR36eg
https://www.youtube.com/watch?v=_Ws5CLQAZiY
https://www.youtube.com/watch?v=0eWGo4d061o
https://www.youtube.com/watch?v=w0gwOfOspWM
https://www.youtube.com/watch?v=kuBIKagXlD0
https://www.youtube.com/watch?v=MxUVYfjSKUo
https://www.youtube.com/watch?v=Gm2rAs40jCU
https://www.youtube.com/watch?v=5_jbqEeWdqY
https://www.youtube.com/watch?v=_vCLPKftFsk
https://www.youtube.com/watch?v=sLzyP0g3Tz0
https://www.youtube.com/watch?v=6_ALpSHwsyU
https://www.youtube.com/watch?v=iyChl-zsg8I
https://www.youtube.com/watch?v=n-tgxts1qTQ
https://www.youtube.com/watch?v=xwIoQ7aHASA
https://www.youtube.com/watch?v=kWDWFGZcPn0
https://www.youtube.com/watch?v=Z3WZ8IYnx6o
https://www.youtube.com/watch?v=6grKBi186q0
https://www.youtube.com/watch?v=yzmSXl428lo
https://www.youtube.com/watch?v=-7aDUxmsbho
https://www.youtube.com/watch?v=2E15vQEUh3Y
https://www.youtube.com/watch?v=-gTJpdOSKdE
https://www.youtube.com/watch?v=1BgHKfpGqxQ
https://www.youtube.com/watch?v=CGG8-6y82Dc
https://www.youtube.com/watch?v=OYWFQagB5SA
https://www.youtube.com/watch?v=v2ejcHqbB9M
https://www.youtube.com/watch?v=Ui8eW_bQI-c
https://www.youtube.com/watch?v=Df43IR-Y2pg
https://www.youtube.com/watch?v=8SxFtbjJLIY
https://www.youtube.com/watch?v=_CzzsdbwVvU
https://www.youtube.com/watch?v=1zQ_pVzAWmQ
https://www.youtube.com/watch?v=Zx7l88BnZq4
https://www.youtube.com/watch?v=8D7cgnCRg9M
https://www.youtube.com/watch?v=1mmOlk_6KiY
https://www.youtube.com/watch?v=f9yuepxqoI8
https://www.youtube.com/watch?v=WzwdRsbyrsE
https://www.youtube.com/watch?v=O0PK3YdZ6Gs
https://www.youtube.com/watch?v=y5sBj8J1zXE
https://www.youtube.com/watch?v=Erg0E8MkI8g
https://www.youtube.com/watch?v=V2XpuzU9qKc
https://www.youtube.com/watch?v=iO6Td9WZl8Q
https://www.youtube.com/watch?v=Ve6hXGghGDU
https://www.youtube.com/watch?v=XQkWto6o-zE
https://www.youtube.com/watch?v=jU7HHOjHIHI
https://www.youtube.com/watch?v=y8U0G8Yp5As
https://www.youtube.com/watch?v=hK3dOOeJYeQ
https://www.youtube.com/watch?v=3XwUV_IIeAY
https://www.youtube.com/watch?v=-H7oKDmsJBA
https://www.youtube.com/watch?v=ax2sbkIwsbs
https://www.youtube.com/watch?v=emBuYmGX9pA
https://www.youtube.com/watch?v=bYnSsdt-EwQ
https://www.youtube.com/watch?v=xWH-3s6ae6g
https://www.youtube.com/watch?v=bGwSw06nHlI
https://www.youtube.com/watch?v=XzfATljHtZA
https://www.youtube.com/watch?v=vHEpEVrULj8
https://www.youtube.com/watch?v=Vw9VQqB9nZY
https://www.youtube.com/watch?v=Zo8DB-WiT8o
https://www.youtube.com/watch?v=118Qmc4PW94
https://www.youtube.com/watch?v=mqjvTCW28wA
https://www.youtube.com/watch?v=COOUsA8sgzw
https://www.youtube.com/watch?v=nJjZWvoIXno
https://www.youtube.com/watch?v=Un-iP21XLcY
https://www.youtube.com/watch?v=ndqEvSH28sE
https://www.youtube.com/watch?v=ROTsUIJFFAI
https://www.youtube.com/watch?v=sjtU9ZJ1kl4
https://www.youtube.com/watch?v=-l6aB5-5IGo
https://www.youtube.com/watch?v=B2zGJdhw7Qk
https://www.youtube.com/watch?v=Hq-agpSNVvk
https://www.youtube.com/watch?v=9Kmah0OdmfQ
https://www.youtube.com/watch?v=G3pd86ahuIk
https://www.youtube.com/watch?v=Z4zFfpUWFjc
https://www.youtube.com/watch?v=uCXKbn0_LYU
https://www.youtube.com/watch?v=Mm4CsHQ7jEY
https://www.youtube.com/watch?v=gzDj9vQhM3U
https://www.youtube.com/watch?v=nYBMPcWh6io
https://www.youtube.com/watch?v=sTIi-NkM_4o
https://www.youtube.com/watch?v=Mid_00T8OzY
https://www.youtube.com/watch?v=O74uCOmq-5w
https://www.youtube.com/watch?v=5czh6hr5ZCU
https://www.youtube.com/watch?v=Seu6J2umwKg
https://www.youtube.com/watch?v=aYtahgqJzGM
https://www.youtube.com/watch?v=hOGbqy4YQbc
https://www.youtube.com/watch?v=5hBzDL7E9yk
https://www.youtube.com/watch?v=cOY9GMP_fyA
https://www.youtube.com/watch?v=S7pF2ggDLM4
https://www.youtube.com/watch?v=eTDWGdCwEsU
https://www.youtube.com/watch?v=9CyB887wkHs
https://www.youtube.com/watch?v=PnKwZK96xOQ
https://www.youtube.com/watch?v=1bnsQ6waGm8
https://www.youtube.com/watch?v=ezJJzX7F57E
https://www.youtube.com/watch?v=abgJBju9UhE
https://www.youtube.com/watch?v=Gb_CTWcoscs
https://www.youtube.com/watch?v=XK70iPIeAnY
https://www.youtube.com/watch?v=bpROcbyQOdc
https://www.youtube.com/watch?v=Pvyi1ldhlVQ
https://www.youtube.com/watch?v=a8ABcZWicB0
https://www.youtube.com/watch?v=STXjofgjHMo
https://www.youtube.com/watch?v=yKUAqlCUFx4
https://www.youtube.com/watch?v=aE3IkzploxM
https://www.youtube.com/watch?v=0BNLKVNMeuo
https://www.youtube.com/watch?v=HcoKqF60cO8
https://www.youtube.com/watch?v=kj7UFFcXVlQ
https://www.youtube.com/watch?v=E30v2guQPJg
https://www.youtube.com/watch?v=_L01csO4Nek
https://www.youtube.com/watch?v=p3uH5LA0IdM
https://www.youtube.com/watch?v=cEnSOZhGdfo
https://www.youtube.com/watch?v=P97y46gLFOk
https://www.youtube.com/watch?v=iayw8uf6DM8
https://www.youtube.com/watch?v=hrUZrC6OplE
https://www.youtube.com/watch?v=F36GPTHUyXU
https://www.youtube.com/watch?v=QPc0595_s28
https://www.youtube.com/watch?v=kl9hYdYQy0g
https://www.youtube.com/watch?v=_BucBOfiyLg
https://www.youtube.com/watch?v=PWwNGu-AaqA
https://www.youtube.com/watch?v=HQhgyMKvBSY
https://www.youtube.com/watch?v=VNCQU3dRY64
https://www.youtube.com/watch?v=bG4ByNMW5ds
https://www.youtube.com/watch?v=L8tcS2e3rfk
https://www.youtube.com/watch?v=g39DLsLdXTE
https://www.youtube.com/watch?v=4d4aeRHTdTo
https://www.youtube.com/watch?v=RgdhZCq19ZQ
https://www.youtube.com/watch?v=I9NHi6EmkTk
https://www.youtube.com/watch?v=6CcpmLOWyMk
https://www.youtube.com/watch?v=IB7E41G2CtI
https://www.youtube.com/watch?v=6O-Aef1Gn4c
https://www.youtube.com/watch?v=uxfL1LRpbW4
https://www.youtube.com/watch?v=FCpcyBHurFw
https://www.youtube.com/watch?v=CygnT11F_ZI
https://www.youtube.com/watch?v=cHVGidgy71o
https://www.youtube.com/watch?v=bGtQwp5ixTA
https://www.youtube.com/watch?v=voeKD_enQTo
https://www.youtube.com/watch?v=G05pwijaK3A
https://www.youtube.com/watch?v=B44UzfY3Xsc
https://www.youtube.com/watch?v=p8E-1M3hZSo
https://www.youtube.com/watch?v=pxWm1Pfzy1w
https://www.youtube.com/watch?v=PWN2vaiAdrE
https://www.youtube.com/watch?v=TRapAZa9UgQ
https://www.youtube.com/watch?v=JaopWs4FnpU
https://www.youtube.com/watch?v=Bm1xne0yK08
https://www.youtube.com/watch?v=UrbDu6--eyY
https://www.youtube.com/watch?v=0uxmUb8_bYQ
https://www.youtube.com/watch?v=xEPT6rceiaE
https://www.youtube.com/watch?v=OcWYreMBLHE
https://www.youtube.com/watch?v=74Hh1nJJRXs
https://www.youtube.com/watch?v=kxH_bu7oWEQ
https://www.youtube.com/watch?v=xLZQHETWRCM
https://www.youtube.com/watch?v=QLNurv0bgDs
https://www.youtube.com/watch?v=Xa_JupHC-BY
https://www.youtube.com/watch?v=5AdEUJ_bA-w
https://www.youtube.com/watch?v=CumOhS7DldM

View File

@ -1,233 +0,0 @@
https://www.youtube.com/watch?v=w4rRYaRB5T0
https://www.youtube.com/watch?v=FjvPtUvZZRE
https://www.youtube.com/watch?v=iII9kszf9-E
https://www.youtube.com/watch?v=7hyi0F6EEGg
https://www.youtube.com/watch?v=ilQMJ-kwMAU
https://www.youtube.com/watch?v=SJOP-f5pvlY
https://www.youtube.com/watch?v=LhBc8ElIf_Y
https://www.youtube.com/watch?v=w_koXhVD1uc
https://www.youtube.com/watch?v=HCoR_bmy-lA
https://www.youtube.com/watch?v=vKPezUzvTjg
https://www.youtube.com/watch?v=Gy3teXRZc3M
https://www.youtube.com/watch?v=4FqSKCGo_yQ
https://www.youtube.com/watch?v=hoY7RZV1W4Y
https://www.youtube.com/watch?v=vKyKEcq44uw
https://www.youtube.com/watch?v=g0LBa1CxhDc
https://www.youtube.com/watch?v=4Je1Q5SioSQ
https://www.youtube.com/watch?v=141sUnI0Z8o
https://www.youtube.com/watch?v=RssoSCiTvcQ
https://www.youtube.com/watch?v=gNmiJWksHV0
https://www.youtube.com/watch?v=-sht8xDsM_4
https://www.youtube.com/watch?v=q9npqzOrkXQ
https://www.youtube.com/watch?v=0e1tKLUdWc0
https://www.youtube.com/watch?v=FWa3LA5diUA
https://www.youtube.com/watch?v=iANrNIqlfEU
https://www.youtube.com/watch?v=HoQgL7JmzYE
https://www.youtube.com/watch?v=m4xFa51_qTw
https://www.youtube.com/watch?v=JEtPhrfEZTc
https://www.youtube.com/watch?v=GV6PNirTB5Q
https://www.youtube.com/watch?v=ymOwuN4Icbw
https://www.youtube.com/watch?v=z-PZegRllLE
https://www.youtube.com/watch?v=_FmUv2AX36Q
https://www.youtube.com/watch?v=NytoPxQXGFk
https://www.youtube.com/watch?v=yX05hYFyiBY
https://www.youtube.com/watch?v=JcdVC9wJX9g
https://www.youtube.com/watch?v=Zm1o6Uvqt6w
https://www.youtube.com/watch?v=1_3XNdOEHB4
https://www.youtube.com/watch?v=iERK93l5S4U
https://www.youtube.com/watch?v=bzQGeM-Adpk
https://www.youtube.com/watch?v=hEPeQWP9gq8
https://www.youtube.com/watch?v=kdxXsrO-_2o
https://www.youtube.com/watch?v=O0kH3wOwfyM
https://www.youtube.com/watch?v=7MT3LwhuYjo
https://www.youtube.com/watch?v=3rrT4hwjRiE
https://www.youtube.com/watch?v=h1Zx3W0OtAk
https://www.youtube.com/watch?v=Nor0KPVPImQ
https://www.youtube.com/watch?v=_eWaSkmUP-Q
https://www.youtube.com/watch?v=RYF5UYJC6Mk
https://www.youtube.com/watch?v=OUgecZ8n-dk
https://www.youtube.com/watch?v=PD-okljVoz8
https://www.youtube.com/watch?v=t1lDjE6zGR8
https://www.youtube.com/watch?v=QRcFSOOIR04
https://www.youtube.com/watch?v=W3E5IOtsz68
https://www.youtube.com/watch?v=qZlW5AeLKaA
https://www.youtube.com/watch?v=A8wb1LtJzbA
https://www.youtube.com/watch?v=-83bTbd6Vb4
https://www.youtube.com/watch?v=gaZ6nCFyT-E
https://www.youtube.com/watch?v=JCmlbmA9seQ
https://www.youtube.com/watch?v=66zl23CVyBE
https://www.youtube.com/watch?v=enjb-u5GS0E
https://www.youtube.com/watch?v=zQ7DZ_ZKLQ0
https://www.youtube.com/watch?v=cltTMwuk-Y0
https://www.youtube.com/watch?v=od41NVjN6Uc
https://www.youtube.com/watch?v=6XVgwxF8bU8
https://www.youtube.com/watch?v=KIAv3ZQ1gks
https://www.youtube.com/watch?v=4yzSoM9ZJoc
https://www.youtube.com/watch?v=mKM_Hve4PSM
https://www.youtube.com/watch?v=EabCNrEsgXY
https://www.youtube.com/watch?v=ObwEO_jK9_g
https://www.youtube.com/watch?v=Y19eeNT0hNQ
https://www.youtube.com/watch?v=OBvGnwg4WeY
https://www.youtube.com/watch?v=blwClcLpTZA
https://www.youtube.com/watch?v=e5FHL3Gl56k
https://www.youtube.com/watch?v=om0sYjy7eT8
https://www.youtube.com/watch?v=GG1uIMTZA30
https://www.youtube.com/watch?v=bW5lxEe9Qqg
https://www.youtube.com/watch?v=cbwxE8eZBhw
https://www.youtube.com/watch?v=kMzNX0j4NO8
https://www.youtube.com/watch?v=q5XBM4-OVJg
https://www.youtube.com/watch?v=Ip67Pcwm-8c
https://www.youtube.com/watch?v=ietAfH6O3IE
https://www.youtube.com/watch?v=0CBUwyVhssQ
https://www.youtube.com/watch?v=WocIrnJHCks
https://www.youtube.com/watch?v=Vu2sP1rBSuQ
https://www.youtube.com/watch?v=ypxZjDVLPd8
https://www.youtube.com/watch?v=v1O8jVvuDZE
https://www.youtube.com/watch?v=0gVYLElHyuU
https://www.youtube.com/watch?v=efx7G2vGiWc
https://www.youtube.com/watch?v=i907BKtWzKM
https://www.youtube.com/watch?v=Bru4dK_5cJk
https://www.youtube.com/watch?v=JSNrp0VfFxM
https://www.youtube.com/watch?v=cKvtjYtK_Zs
https://www.youtube.com/watch?v=xkWrdzoqOHc
https://www.youtube.com/watch?v=gU8Q4qhtWT8
https://www.youtube.com/watch?v=uFOs71VYSXQ
https://www.youtube.com/watch?v=9kb7VxErS64
https://www.youtube.com/watch?v=jzmHWb1uKFY
https://www.youtube.com/watch?v=g6ZEep0YEe8
https://www.youtube.com/watch?v=ARKY-HjGuio
https://www.youtube.com/watch?v=0Yr6CwKNorQ
https://www.youtube.com/watch?v=B0tA10cAXZw
https://www.youtube.com/watch?v=WsGUy4_LiEU
https://www.youtube.com/watch?v=-ndIAbqBYgk
https://www.youtube.com/watch?v=QbKhTZKEarU
https://www.youtube.com/watch?v=HOuePkn2qeI
https://www.youtube.com/watch?v=rDajxgZA5NU
https://www.youtube.com/watch?v=_0aMs9sWoeQ
https://www.youtube.com/watch?v=fPHFbYqK7kc
https://www.youtube.com/watch?v=2_wU4yl4LAY
https://www.youtube.com/watch?v=4xuBM4deE6g
https://www.youtube.com/watch?v=LmYgj8A2Tdk
https://www.youtube.com/watch?v=1WzMTIcIsJo
https://www.youtube.com/watch?v=3qCYB7CVewo
https://www.youtube.com/watch?v=GaflSBOX4Bw
https://www.youtube.com/watch?v=WzzoHmeVfEg
https://www.youtube.com/watch?v=ggY8ruNAJaM
https://www.youtube.com/watch?v=8JpXJQN3Q4U
https://www.youtube.com/watch?v=zahEfgHltAQ
https://www.youtube.com/watch?v=PidO06KgJgM
https://www.youtube.com/watch?v=ahkrZU7dbwg
https://www.youtube.com/watch?v=_6Uf0Wef5lk
https://www.youtube.com/watch?v=NtakSrWF8qw
https://www.youtube.com/watch?v=KPB550JDKvs
https://www.youtube.com/watch?v=a9b8E1cFAcg
https://www.youtube.com/watch?v=lVlxJHf9Uz0
https://www.youtube.com/watch?v=3wB7hOa6pTI
https://www.youtube.com/watch?v=4rT8sKSoHYg
https://www.youtube.com/watch?v=315qa9cu8NM
https://www.youtube.com/watch?v=eXInviPnnZA
https://www.youtube.com/watch?v=hiis-ugAa6Y
https://www.youtube.com/watch?v=vZy7LUVqK4I
https://www.youtube.com/watch?v=_Y8mWMbYcKk
https://www.youtube.com/watch?v=bIZrrGvkcaE
https://www.youtube.com/watch?v=kvgZc-q7K08
https://www.youtube.com/watch?v=4uiqIjIzJqg
https://www.youtube.com/watch?v=g2P3KlzZ_bc
https://www.youtube.com/watch?v=i6c47ojZ1gU
https://www.youtube.com/watch?v=J-WVFLD5z4E
https://www.youtube.com/watch?v=ViKKvVgHQro
https://www.youtube.com/watch?v=n-QbtsYb3ao
https://www.youtube.com/watch?v=4buYFi4RHXM
https://www.youtube.com/watch?v=h_-2AKl9mEU
https://www.youtube.com/watch?v=RKirt5pR5s4
https://www.youtube.com/watch?v=vQQDv11dLZo
https://www.youtube.com/watch?v=pw-v-Af0Zt4
https://www.youtube.com/watch?v=yyAUCuj_rfU
https://www.youtube.com/watch?v=5FSqDMjwFLQ
https://www.youtube.com/watch?v=J3m8gQKj9ls
https://www.youtube.com/watch?v=FiYgnqVWeD0
https://www.youtube.com/watch?v=35M5N22WEAw
https://www.youtube.com/watch?v=BrKHrNWU7fw
https://www.youtube.com/watch?v=CEVUU_s8Pk8
https://www.youtube.com/watch?v=p2l0QYUNsLw
https://www.youtube.com/watch?v=hGbyVDtuKlU
https://www.youtube.com/watch?v=5iPFMlxPSTc
https://www.youtube.com/watch?v=GVlVCSWeJL4
https://www.youtube.com/watch?v=WdVYt8PL0Po
https://www.youtube.com/watch?v=p2P_A9TqH78
https://www.youtube.com/watch?v=LQ-Kt4arMu8
https://www.youtube.com/watch?v=0ygsfbMtrZA
https://www.youtube.com/watch?v=5hQEI4jGEao
https://www.youtube.com/watch?v=0HG6LF7hsPk
https://www.youtube.com/watch?v=JqR_R4EvsNs
https://www.youtube.com/watch?v=YTS9r3aQ2Rw
https://www.youtube.com/watch?v=UDfjPR3XxGI
https://www.youtube.com/watch?v=eJhUqThVeTU
https://www.youtube.com/watch?v=eSFea0F4pM4
https://www.youtube.com/watch?v=Iumi423BMsY
https://www.youtube.com/watch?v=ix_lu8DoqVM
https://www.youtube.com/watch?v=YQMsHKAftA8
https://www.youtube.com/watch?v=YvdY-vk_qXo
https://www.youtube.com/watch?v=v5S1NeyK4zo
https://www.youtube.com/watch?v=T6eYlq568q8
https://www.youtube.com/watch?v=LHncOi0gLVo
https://www.youtube.com/watch?v=JzNhRNPs-zs
https://www.youtube.com/watch?v=fh1cW1xt6Hs
https://www.youtube.com/watch?v=96pUYEFNtbw
https://www.youtube.com/watch?v=3iav7GPapME
https://www.youtube.com/watch?v=ifeg0nKnYAc
https://www.youtube.com/watch?v=TSOGNwPdJC0
https://www.youtube.com/watch?v=Vdb-LAifVgw
https://www.youtube.com/watch?v=N8MzHelGeZA
https://www.youtube.com/watch?v=8lDLi6KL3NE
https://www.youtube.com/watch?v=AzesbB6B-Pw
https://www.youtube.com/watch?v=rCsByru8q80
https://www.youtube.com/watch?v=KFCIJ6HAefo
https://www.youtube.com/watch?v=gZm2T-9vQgA
https://www.youtube.com/watch?v=urdmi5PF9tk
https://www.youtube.com/watch?v=uwdnq5BHelM
https://www.youtube.com/watch?v=Npojrebk8K4
https://www.youtube.com/watch?v=hF8nxFrWEBM
https://www.youtube.com/watch?v=8ZP8Tmej3qI
https://www.youtube.com/watch?v=5LT6yyfblKU
https://www.youtube.com/watch?v=rwQdepyLhIc
https://www.youtube.com/watch?v=dJf0WhYGC-A
https://www.youtube.com/watch?v=LEADj_2kk5M
https://www.youtube.com/watch?v=rqklugKUZf0
https://www.youtube.com/watch?v=toYKzDsIlM0
https://www.youtube.com/watch?v=u8DoguzNFlE
https://www.youtube.com/watch?v=6A14Z3bVKxk
https://www.youtube.com/watch?v=Hx6V61dT9-c
https://www.youtube.com/watch?v=WbkkdqZtHhw
https://www.youtube.com/watch?v=FTTp__BIL6c
https://www.youtube.com/watch?v=2NIPuIbmeBg
https://www.youtube.com/watch?v=63Q5WLMF0lU
https://www.youtube.com/watch?v=KioksPhg_78
https://www.youtube.com/watch?v=zMJ27qdfGAI
https://www.youtube.com/watch?v=q9biOLIba28
https://www.youtube.com/watch?v=7RPWlQI1HFY
https://www.youtube.com/watch?v=Z2fPu9NX3DE
https://www.youtube.com/watch?v=_H3X_k7z7Iw
https://www.youtube.com/watch?v=0YPBfRGXejQ
https://www.youtube.com/watch?v=xUVwAIJu40g
https://www.youtube.com/watch?v=50GV67qKcSM
https://www.youtube.com/watch?v=DuVy41rot60
https://www.youtube.com/watch?v=4GXF7gItY6g
https://www.youtube.com/watch?v=mNuPUraHMMM
https://www.youtube.com/watch?v=OE5R-dxhktY
https://www.youtube.com/watch?v=wO52Mj2dOvk
https://www.youtube.com/watch?v=T10wxcDWchE
https://www.youtube.com/watch?v=qh9FHZ1-ihg
https://www.youtube.com/watch?v=n5R7te2NUDo
https://www.youtube.com/watch?v=xMK-jrM9KKM
https://www.youtube.com/watch?v=iQsw6tnDWUw
https://www.youtube.com/watch?v=QzGwqKSJp1Q
https://www.youtube.com/watch?v=2aTk7l-SBjc
https://www.youtube.com/watch?v=pF3fZ9Uv-hQ
https://www.youtube.com/watch?v=XZeKA5WRGkY
https://www.youtube.com/watch?v=kLsZ4t2a6Ts
https://www.youtube.com/watch?v=Zw8SxU02ZIk
https://www.youtube.com/watch?v=OplKfLEIq6w
https://www.youtube.com/watch?v=cXyk40-MWZA
https://www.youtube.com/watch?v=wlMw3dhuicc
https://www.youtube.com/watch?v=wFyVmtInX04

File diff suppressed because it is too large Load Diff

View File

@ -1,30 +0,0 @@
[
"https://www.youtube.com/watch?v=lKrVuufVMXA",
"https://www.youtube.com/watch?v=ISqDcqGdow0",
"https://www.youtube.com/watch?v=srG-WnQdZq8",
"https://www.youtube.com/watch?v=HP-KB6XFqgs",
"https://www.youtube.com/watch?v=1e13SIh51wk",
"https://www.youtube.com/watch?v=VTKG48FjSxs",
"https://www.youtube.com/watch?v=onEWAyPRm6E",
"https://www.youtube.com/watch?v=7RdrGwpZzMo",
"https://www.youtube.com/watch?v=M5uu93_AhXg",
"https://www.youtube.com/watch?v=xnkvCBfTfok",
"https://www.youtube.com/watch?v=oE9hGZyFN8E",
"https://www.youtube.com/watch?v=7LofBMRP6U4",
"https://www.youtube.com/watch?v=EDE8tyroJEE",
"https://www.youtube.com/watch?v=oLwsWGi0sUc",
"https://www.youtube.com/watch?v=a6dvhHPyFIw",
"https://www.youtube.com/watch?v=4jds773UlWE",
"https://www.youtube.com/watch?v=B6dXxqiSBSM",
"https://www.youtube.com/watch?v=9EbS6w3RSG0",
"https://www.youtube.com/watch?v=LyKONGzUANU",
"https://www.youtube.com/watch?v=sGW5kfpR6Wo",
"https://www.youtube.com/watch?v=pa4-JninkUQ",
"https://www.youtube.com/watch?v=DxXMFBWarjY",
"https://www.youtube.com/watch?v=PYQjfpCEWvc",
"https://www.youtube.com/watch?v=_jlNCjI9jiQ",
"https://www.youtube.com/watch?v=BxEC11QS3sQ",
"https://www.youtube.com/watch?v=6-qbWRzVbGA",
"https://www.youtube.com/watch?v=p3lCQvZBv_k",
"https://www.youtube.com/watch?v=67YA1CHpGrM"
]

View File

@ -1,5 +0,0 @@
[
"https://www.youtube.com/watch?v=uxiLE2Kv7wc",
"https://www.youtube.com/watch?v=Q7R0epGFnRI",
"https://www.youtube.com/watch?v=4mEmsJXKroE"
]

View File

@ -1,48 +0,0 @@
[
"https://www.youtube.com/watch?v=l700b4BpFAA",
"https://www.youtube.com/watch?v=G_JAVwwWyUM",
"https://www.youtube.com/watch?v=2LGz9nUw-XI",
"https://www.youtube.com/watch?v=7dK6a8LWAWw",
"https://www.youtube.com/watch?v=lKSZnZggcto",
"https://www.youtube.com/watch?v=Zy0ZFAMqm7U",
"https://www.youtube.com/watch?v=7UunWMHBrEE",
"https://www.youtube.com/watch?v=LPdbLCX3N-4",
"https://www.youtube.com/watch?v=-lJ5DVbkVw4",
"https://www.youtube.com/watch?v=QrRRS0RzELs",
"https://www.youtube.com/watch?v=XSty74mE1iE",
"https://www.youtube.com/watch?v=orijdeDOk5g",
"https://www.youtube.com/watch?v=27YVRo9VUE8",
"https://www.youtube.com/watch?v=p-JNgLI_8nA",
"https://www.youtube.com/watch?v=gkekjIJB_Nw",
"https://www.youtube.com/watch?v=V8QFCgOfkgw",
"https://www.youtube.com/watch?v=_GVVEsxZ_Mo",
"https://www.youtube.com/watch?v=7_zMqxK4gZE",
"https://www.youtube.com/watch?v=cwuJCb316yQ",
"https://www.youtube.com/watch?v=TIGxtvVVHak",
"https://www.youtube.com/watch?v=KhcicW2keWY",
"https://www.youtube.com/watch?v=miUJ85pFCPE",
"https://www.youtube.com/watch?v=97L4qVfSwv4",
"https://www.youtube.com/watch?v=Wk38hWQfz24",
"https://www.youtube.com/watch?v=iIU-NVWkTDE",
"https://www.youtube.com/watch?v=l89VaRof8ug",
"https://www.youtube.com/watch?v=IIkjS5MpQVM",
"https://www.youtube.com/watch?v=9XxPGKkOs0o",
"https://www.youtube.com/watch?v=_dlpve9GPZM",
"https://www.youtube.com/watch?v=He_3MjAuZNQ",
"https://www.youtube.com/watch?v=FnPEHn2NHT4",
"https://www.youtube.com/watch?v=HuSjI7HFkzo",
"https://www.youtube.com/watch?v=pBZSgVJHacs",
"https://www.youtube.com/watch?v=OgsG082zDGo",
"https://www.youtube.com/watch?v=_4sxhmPsryY",
"https://www.youtube.com/watch?v=kqU6B5rIEnI",
"https://www.youtube.com/watch?v=BEYn_ILHmBE",
"https://www.youtube.com/watch?v=qy9Zr3HV9V4",
"https://www.youtube.com/watch?v=7I1VvJZbG-M",
"https://www.youtube.com/watch?v=WOa-HA3MoVQ",
"https://www.youtube.com/watch?v=uaHI-WHwivc",
"https://www.youtube.com/watch?v=9ku8r8uZ9EQ",
"https://www.youtube.com/watch?v=XAyaDcLxwHQ",
"https://www.youtube.com/watch?v=zpc-hJGSNBc",
"https://www.youtube.com/watch?v=AGbG62y1DyE",
"https://www.youtube.com/watch?v=7rmyabL60oA"
]

View File

@ -1,18 +0,0 @@
"""
Airflow plugins initialization.
"""
import os
import logging
# Set the custom secrets masker
os.environ['AIRFLOW__LOGGING__SECRETS_MASKER_CLASS'] = 'custom_secrets_masker.CustomSecretsMasker'
# Apply Thrift patches
try:
from patch_thrift_exceptions import patch_thrift_exceptions
patch_thrift_exceptions()
except Exception as e:
logging.error(f"Error applying Thrift exception patches: {e}")
logger = logging.getLogger(__name__)
logger.info("Airflow custom configuration applied")

View File

@ -1,56 +0,0 @@
from airflow.plugins_manager import AirflowPlugin
from airflow.hooks.base import BaseHook
from airflow.configuration import conf
import uuid
import backoff
class YTDLPHook(BaseHook):
def __init__(self, conn_id='ytdlp_default'):
super().__init__()
self.conn_id = conn_id
self.connection = self.get_connection(conn_id)
self.timeout = conf.getint('ytdlp', 'timeout', fallback=120)
self.max_retries = conf.getint('ytdlp', 'max_retries', fallback=3)
@backoff.on_exception(backoff.expo,
Exception,
max_tries=3,
max_time=300)
def start_service(self, host, port, service_id, work_dir):
"""Start token service as a long-running process"""
import subprocess
import os
from pathlib import Path
# Get script path relative to Airflow home
airflow_home = os.getenv('AIRFLOW_HOME', '')
script_path = Path(airflow_home).parent / 'ytdlp_ops_server.py'
# Ensure work directory exists
os.makedirs(work_dir, exist_ok=True)
# Start service process
cmd = [
'python', str(script_path),
'--port', str(port),
'--host', host,
'--service-id', service_id,
'--context-dir', work_dir,
'--script-dir', str(Path(airflow_home) / 'dags' / 'scripts')
]
self.log.info(f"Starting token service: {' '.join(cmd)}")
# Start process detached
docker_cmd = [
'docker-compose', '-f', 'docker-compose.yaml',
'up', '-d', '--build', 'ytdlp-service'
]
subprocess.run(docker_cmd, check=True)
self.log.info(f"Token service started on {host}:{port}")
return True
class YTDLPPlugin(AirflowPlugin):
name = 'ytdlp_plugin'
hooks = [YTDLPHook]

View File

@ -1,331 +0,0 @@
---
- name: Check if Airflow master deployment directory exists
stat:
path: "{{ airflow_master_dir }}"
register: master_dir_stat
- name: Ensure Airflow master deployment directory exists
file:
path: "{{ airflow_master_dir }}"
state: directory
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0755'
become: yes
when: not master_dir_stat.stat.exists
- name: Ensure Airflow master configs directory exists
file:
path: "{{ airflow_master_dir }}/configs"
state: directory
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0755'
become: yes
- name: Ensure Airflow master config directory exists
file:
path: "{{ airflow_master_dir }}/config"
state: directory
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0755'
become: yes
- name: Ensure Airflow operational directories exist with correct permissions
file:
path: "{{ airflow_master_dir }}/{{ item }}"
state: directory
owner: "{{ airflow_uid }}"
group: "{{ deploy_group }}"
mode: '0775'
become: yes
loop:
- "dags"
- "logs"
- "plugins"
- "downloadfiles"
- "addfiles"
- "inputfiles"
- name: Check if source directories exist
stat:
path: "../{{ item }}"
register: source_dirs
loop:
- "airflow/plugins"
- "airflow/addfiles"
- "airflow/bgutil-ytdlp-pot-provider"
- name: "Log: Syncing Airflow core files"
debug:
msg: "Syncing DAGs, configs, and Python source code to the master node."
- name: Sync Airflow master files
synchronize:
src: "../{{ item }}"
dest: "{{ airflow_master_dir }}/"
archive: yes
recursive: yes
delete: yes
rsync_path: "sudo rsync"
rsync_opts: "{{ rsync_default_opts }}"
loop:
- "airflow/Dockerfile"
- "airflow/Dockerfile.caddy"
- "airflow/.dockerignore"
- "airflow/dags"
- "airflow/inputfiles"
- "setup.py"
- "yt_ops_services"
- "thrift_model"
- "VERSION"
- "airflow/update-yt-dlp.sh"
- "get_info_json_client.py"
- "proxy_manager_client.py"
- "utils"
- name: Copy custom Python config files to master
copy:
src: "../airflow/config/{{ item }}"
dest: "{{ airflow_master_dir }}/config/{{ item }}"
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0644'
become: yes
loop:
- "custom_task_hooks.py"
- "airflow_local_settings.py"
- name: Ensure any existing airflow.cfg directory is removed
file:
path: "{{ airflow_master_dir }}/config/airflow.cfg"
state: absent
become: yes
ignore_errors: yes
- name: Copy airflow.cfg to master
copy:
src: "../airflow/airflow.cfg"
dest: "{{ airflow_master_dir }}/config/airflow.cfg"
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0644'
become: yes
- name: Sync Airflow master config files
synchronize:
src: "../airflow/configs/{{ item }}"
dest: "{{ airflow_master_dir }}/configs/"
archive: yes
recursive: yes
rsync_path: "sudo rsync"
rsync_opts: "{{ rsync_default_opts }}"
loop:
- "nginx.conf"
- "Caddyfile"
- name: Sync optional directories if they exist
synchronize:
src: "../{{ item.item }}/"
dest: "{{ airflow_master_dir }}/{{ item.item | basename }}/"
archive: yes
recursive: yes
delete: yes
rsync_path: "sudo rsync"
rsync_opts: "{{ rsync_default_opts }}"
loop: "{{ source_dirs.results }}"
when: item.stat.exists
- name: Sync pangramia thrift files
synchronize:
src: "../thrift_model/gen_py/pangramia/"
dest: "{{ airflow_master_dir }}/pangramia/"
archive: yes
recursive: yes
delete: yes
rsync_path: "sudo rsync"
rsync_opts: "{{ rsync_default_opts }}"
- name: Template docker-compose file for master
template:
src: "{{ playbook_dir }}/../airflow/configs/docker-compose-master.yaml.j2"
dest: "{{ airflow_master_dir }}/configs/docker-compose-master.yaml"
mode: "{{ file_permissions }}"
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
become: yes
- name: Template Redis connection file
template:
src: "../airflow/config/redis_default_conn.json.j2"
dest: "{{ airflow_master_dir }}/config/redis_default_conn.json"
mode: "{{ file_permissions }}"
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
become: yes
- name: Template Minio connection file for master
template:
src: "../airflow/config/minio_default_conn.json.j2"
dest: "{{ airflow_master_dir }}/config/minio_default_conn.json"
mode: "{{ file_permissions }}"
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
become: yes
- name: Ensure config directory is group-writable for Airflow initialization
file:
path: "{{ airflow_master_dir }}/config"
state: directory
mode: '0775'
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
become: yes
- name: Ensure airflow.cfg is group-writable for Airflow initialization
file:
path: "{{ airflow_master_dir }}/config/airflow.cfg"
state: file
mode: '0664'
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
become: yes
- name: Create symlink for docker-compose.yaml
file:
src: "{{ airflow_master_dir }}/configs/docker-compose-master.yaml"
dest: "{{ airflow_master_dir }}/docker-compose.yaml"
state: link
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
force: yes
follow: no
- name: Ensure correct permissions for build context
file:
path: "{{ airflow_master_dir }}"
state: directory
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
recurse: yes
become: yes
- name: Ensure postgres-data directory exists on master and has correct permissions
file:
path: "{{ airflow_master_dir }}/postgres-data"
state: directory
owner: "999" # UID for the 'postgres' user in the official postgres image
group: "999" # GID for the 'postgres' group in the official postgres image
mode: '0700'
become: yes
- name: Set group-writable and setgid permissions on master logs directory contents
shell: |
find {{ airflow_master_dir }}/logs -type d -exec chmod g+rws {} +
find {{ airflow_master_dir }}/logs -type f -exec chmod g+rw {} +
become: yes
- name: Verify Dockerfile exists in build directory
stat:
path: "{{ airflow_master_dir }}/Dockerfile"
register: dockerfile_stat
- name: Fail if Dockerfile is missing
fail:
msg: "Dockerfile not found in {{ airflow_master_dir }}. Cannot build image."
when: not dockerfile_stat.stat.exists
- name: "Log: Building Airflow Docker image"
debug:
msg: "Building the main Airflow Docker image ({{ airflow_image_name }}) locally on the master node. This may take a few minutes."
- name: Build Airflow master image
community.docker.docker_image:
name: "{{ airflow_image_name }}"
build:
path: "{{ airflow_master_dir }}"
dockerfile: "Dockerfile" # Explicitly specify the Dockerfile name
source: build
force_source: true
when: not fast_deploy | default(false)
- name: "Log: Preparing assets for Caddy image"
debug:
msg: "Extracting static assets from the Airflow image to build the Caddy reverse proxy."
when: not fast_deploy | default(false)
- name: Prepare Caddy asset extraction directory
file:
path: "{{ airflow_master_dir }}/caddy_build_assets"
state: "{{ item }}"
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0755'
loop:
- absent
- directory
become: yes
when: not fast_deploy | default(false)
- name: Ensure subdirectories exist with correct permissions
file:
path: "{{ airflow_master_dir }}/caddy_build_assets/{{ item }}"
state: directory
owner: "{{ ssh_user }}"
group: "{{ deploy_group }}"
mode: '0755'
loop:
- "appbuilder"
- "dist"
become: yes
when: not fast_deploy | default(false)
- name: Extract static assets from Airflow image for Caddy build
shell: |
set -e
CONTAINER_ID=$(docker create {{ airflow_image_name }})
# Dynamically find paths inside the container
APPBUILDER_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, flask_appbuilder; print(os.path.join(os.path.dirname(flask_appbuilder.__file__), "static", "appbuilder"))')
AIRFLOW_DIST_PATH=$(docker run --rm --entrypoint "" {{ airflow_image_name }} python -c 'import os, airflow; print(os.path.join(os.path.dirname(airflow.__file__), "www/static/dist"))')
# Copy assets from container to host
docker cp "${CONTAINER_ID}:${APPBUILDER_PATH}/." "./caddy_build_assets/appbuilder"
docker cp "${CONTAINER_ID}:${AIRFLOW_DIST_PATH}/." "./caddy_build_assets/dist"
docker rm -f $CONTAINER_ID
# Pre-compress assets
find ./caddy_build_assets/appbuilder -type f -print0 | xargs -0 gzip -k -9
find ./caddy_build_assets/dist -type f -print0 | xargs -0 gzip -k -9
args:
chdir: "{{ airflow_master_dir }}"
executable: /bin/bash
become: yes
register: asset_extraction
changed_when: asset_extraction.rc == 0
when: not fast_deploy | default(false)
- name: "Log: Building Caddy reverse proxy image"
debug:
msg: "Building the Caddy image (pangramia/ytdlp-ops-caddy:latest) to serve static assets."
- name: Build Caddy image
community.docker.docker_image:
name: "pangramia/ytdlp-ops-caddy:latest"
build:
path: "{{ airflow_master_dir }}"
dockerfile: "Dockerfile.caddy"
source: build
force_source: true
when: not fast_deploy | default(false)
- name: "Log: Starting Airflow services"
debug:
msg: "Starting Airflow core services (webserver, scheduler, etc.) on the master node using docker-compose."
- name: Start Airflow master service
community.docker.docker_compose_v2:
project_src: "{{ airflow_master_dir }}"
files:
- "configs/docker-compose-master.yaml"
state: present
remove_orphans: true
pull: "{{ 'never' if fast_deploy | default(false) else 'missing' }}"

View File

@ -1,7 +0,0 @@
#!/bin/bash
# update-yt-dlp.sh run by the worker container before every DAG execution
set -e
echo "[$(date)] Updating yt-dlp to latest nightly master..."
python3 -m pip install -U --pre "yt-dlp[default]" --upgrade-strategy eager --force-reinstall --no-cache-dir
echo "[$(date)] yt-dlp updated to:"
yt-dlp --version

3
ansible/.gitignore vendored
View File

@ -1 +1,4 @@
inventory.ini
group_vars/all/generated_vars.yml
host_vars/
.aider* .aider*

View File

@ -1 +0,0 @@
ytdlp-ops

View File

@ -1,9 +0,0 @@
# Migration Notes
This document tracks the process of migrating the Ansible deployment.
## Guiding Principles
- No changes to business logic or core functionality are permitted during this phase.
- The focus is solely on resolving file path issues, dependency errors, and structural inconsistencies resulting from the migration of a subset of files.
- All changes should be aimed at making the existing playbooks runnable in the new environment.

Some files were not shown because too many files have changed in this diff Show More